AnalysisBasedWarnings.cpp 92 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560
  1. //=- AnalysisBasedWarnings.cpp - Sema warnings based on libAnalysis -*- C++ -*-=//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file defines analysis_warnings::[Policy,Executor].
  10. // Together they are used by Sema to issue warnings based on inexpensive
  11. // static analysis algorithms in libAnalysis.
  12. //
  13. //===----------------------------------------------------------------------===//
  14. #include "clang/Sema/AnalysisBasedWarnings.h"
  15. #include "clang/AST/DeclCXX.h"
  16. #include "clang/AST/DeclObjC.h"
  17. #include "clang/AST/EvaluatedExprVisitor.h"
  18. #include "clang/AST/Expr.h"
  19. #include "clang/AST/ExprCXX.h"
  20. #include "clang/AST/ExprObjC.h"
  21. #include "clang/AST/OperationKinds.h"
  22. #include "clang/AST/ParentMap.h"
  23. #include "clang/AST/RecursiveASTVisitor.h"
  24. #include "clang/AST/StmtCXX.h"
  25. #include "clang/AST/StmtObjC.h"
  26. #include "clang/AST/StmtVisitor.h"
  27. #include "clang/Analysis/Analyses/CFGReachabilityAnalysis.h"
  28. #include "clang/Analysis/Analyses/CalledOnceCheck.h"
  29. #include "clang/Analysis/Analyses/Consumed.h"
  30. #include "clang/Analysis/Analyses/ReachableCode.h"
  31. #include "clang/Analysis/Analyses/ThreadSafety.h"
  32. #include "clang/Analysis/Analyses/UninitializedValues.h"
  33. #include "clang/Analysis/Analyses/UnsafeBufferUsage.h"
  34. #include "clang/Analysis/AnalysisDeclContext.h"
  35. #include "clang/Analysis/CFG.h"
  36. #include "clang/Analysis/CFGStmtMap.h"
  37. #include "clang/Basic/SourceLocation.h"
  38. #include "clang/Basic/SourceManager.h"
  39. #include "clang/Lex/Preprocessor.h"
  40. #include "clang/Sema/ScopeInfo.h"
  41. #include "clang/Sema/SemaInternal.h"
  42. #include "llvm/ADT/ArrayRef.h"
  43. #include "llvm/ADT/BitVector.h"
  44. #include "llvm/ADT/MapVector.h"
  45. #include "llvm/ADT/SmallString.h"
  46. #include "llvm/ADT/SmallVector.h"
  47. #include "llvm/ADT/StringRef.h"
  48. #include "llvm/Support/Casting.h"
  49. #include <algorithm>
  50. #include <deque>
  51. #include <iterator>
  52. #include <optional>
  53. using namespace clang;
  54. //===----------------------------------------------------------------------===//
  55. // Unreachable code analysis.
  56. //===----------------------------------------------------------------------===//
  57. namespace {
  58. class UnreachableCodeHandler : public reachable_code::Callback {
  59. Sema &S;
  60. SourceRange PreviousSilenceableCondVal;
  61. public:
  62. UnreachableCodeHandler(Sema &s) : S(s) {}
  63. void HandleUnreachable(reachable_code::UnreachableKind UK,
  64. SourceLocation L,
  65. SourceRange SilenceableCondVal,
  66. SourceRange R1,
  67. SourceRange R2) override {
  68. // Avoid reporting multiple unreachable code diagnostics that are
  69. // triggered by the same conditional value.
  70. if (PreviousSilenceableCondVal.isValid() &&
  71. SilenceableCondVal.isValid() &&
  72. PreviousSilenceableCondVal == SilenceableCondVal)
  73. return;
  74. PreviousSilenceableCondVal = SilenceableCondVal;
  75. unsigned diag = diag::warn_unreachable;
  76. switch (UK) {
  77. case reachable_code::UK_Break:
  78. diag = diag::warn_unreachable_break;
  79. break;
  80. case reachable_code::UK_Return:
  81. diag = diag::warn_unreachable_return;
  82. break;
  83. case reachable_code::UK_Loop_Increment:
  84. diag = diag::warn_unreachable_loop_increment;
  85. break;
  86. case reachable_code::UK_Other:
  87. break;
  88. }
  89. S.Diag(L, diag) << R1 << R2;
  90. SourceLocation Open = SilenceableCondVal.getBegin();
  91. if (Open.isValid()) {
  92. SourceLocation Close = SilenceableCondVal.getEnd();
  93. Close = S.getLocForEndOfToken(Close);
  94. if (Close.isValid()) {
  95. S.Diag(Open, diag::note_unreachable_silence)
  96. << FixItHint::CreateInsertion(Open, "/* DISABLES CODE */ (")
  97. << FixItHint::CreateInsertion(Close, ")");
  98. }
  99. }
  100. }
  101. };
  102. } // anonymous namespace
  103. /// CheckUnreachable - Check for unreachable code.
  104. static void CheckUnreachable(Sema &S, AnalysisDeclContext &AC) {
  105. // As a heuristic prune all diagnostics not in the main file. Currently
  106. // the majority of warnings in headers are false positives. These
  107. // are largely caused by configuration state, e.g. preprocessor
  108. // defined code, etc.
  109. //
  110. // Note that this is also a performance optimization. Analyzing
  111. // headers many times can be expensive.
  112. if (!S.getSourceManager().isInMainFile(AC.getDecl()->getBeginLoc()))
  113. return;
  114. UnreachableCodeHandler UC(S);
  115. reachable_code::FindUnreachableCode(AC, S.getPreprocessor(), UC);
  116. }
  117. namespace {
  118. /// Warn on logical operator errors in CFGBuilder
  119. class LogicalErrorHandler : public CFGCallback {
  120. Sema &S;
  121. public:
  122. LogicalErrorHandler(Sema &S) : S(S) {}
  123. static bool HasMacroID(const Expr *E) {
  124. if (E->getExprLoc().isMacroID())
  125. return true;
  126. // Recurse to children.
  127. for (const Stmt *SubStmt : E->children())
  128. if (const Expr *SubExpr = dyn_cast_or_null<Expr>(SubStmt))
  129. if (HasMacroID(SubExpr))
  130. return true;
  131. return false;
  132. }
  133. void compareAlwaysTrue(const BinaryOperator *B, bool isAlwaysTrue) override {
  134. if (HasMacroID(B))
  135. return;
  136. SourceRange DiagRange = B->getSourceRange();
  137. S.Diag(B->getExprLoc(), diag::warn_tautological_overlap_comparison)
  138. << DiagRange << isAlwaysTrue;
  139. }
  140. void compareBitwiseEquality(const BinaryOperator *B,
  141. bool isAlwaysTrue) override {
  142. if (HasMacroID(B))
  143. return;
  144. SourceRange DiagRange = B->getSourceRange();
  145. S.Diag(B->getExprLoc(), diag::warn_comparison_bitwise_always)
  146. << DiagRange << isAlwaysTrue;
  147. }
  148. void compareBitwiseOr(const BinaryOperator *B) override {
  149. if (HasMacroID(B))
  150. return;
  151. SourceRange DiagRange = B->getSourceRange();
  152. S.Diag(B->getExprLoc(), diag::warn_comparison_bitwise_or) << DiagRange;
  153. }
  154. static bool hasActiveDiagnostics(DiagnosticsEngine &Diags,
  155. SourceLocation Loc) {
  156. return !Diags.isIgnored(diag::warn_tautological_overlap_comparison, Loc) ||
  157. !Diags.isIgnored(diag::warn_comparison_bitwise_or, Loc);
  158. }
  159. };
  160. } // anonymous namespace
  161. //===----------------------------------------------------------------------===//
  162. // Check for infinite self-recursion in functions
  163. //===----------------------------------------------------------------------===//
  164. // Returns true if the function is called anywhere within the CFGBlock.
  165. // For member functions, the additional condition of being call from the
  166. // this pointer is required.
  167. static bool hasRecursiveCallInPath(const FunctionDecl *FD, CFGBlock &Block) {
  168. // Process all the Stmt's in this block to find any calls to FD.
  169. for (const auto &B : Block) {
  170. if (B.getKind() != CFGElement::Statement)
  171. continue;
  172. const CallExpr *CE = dyn_cast<CallExpr>(B.getAs<CFGStmt>()->getStmt());
  173. if (!CE || !CE->getCalleeDecl() ||
  174. CE->getCalleeDecl()->getCanonicalDecl() != FD)
  175. continue;
  176. // Skip function calls which are qualified with a templated class.
  177. if (const DeclRefExpr *DRE =
  178. dyn_cast<DeclRefExpr>(CE->getCallee()->IgnoreParenImpCasts())) {
  179. if (NestedNameSpecifier *NNS = DRE->getQualifier()) {
  180. if (NNS->getKind() == NestedNameSpecifier::TypeSpec &&
  181. isa<TemplateSpecializationType>(NNS->getAsType())) {
  182. continue;
  183. }
  184. }
  185. }
  186. const CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(CE);
  187. if (!MCE || isa<CXXThisExpr>(MCE->getImplicitObjectArgument()) ||
  188. !MCE->getMethodDecl()->isVirtual())
  189. return true;
  190. }
  191. return false;
  192. }
  193. // Returns true if every path from the entry block passes through a call to FD.
  194. static bool checkForRecursiveFunctionCall(const FunctionDecl *FD, CFG *cfg) {
  195. llvm::SmallPtrSet<CFGBlock *, 16> Visited;
  196. llvm::SmallVector<CFGBlock *, 16> WorkList;
  197. // Keep track of whether we found at least one recursive path.
  198. bool foundRecursion = false;
  199. const unsigned ExitID = cfg->getExit().getBlockID();
  200. // Seed the work list with the entry block.
  201. WorkList.push_back(&cfg->getEntry());
  202. while (!WorkList.empty()) {
  203. CFGBlock *Block = WorkList.pop_back_val();
  204. for (auto I = Block->succ_begin(), E = Block->succ_end(); I != E; ++I) {
  205. if (CFGBlock *SuccBlock = *I) {
  206. if (!Visited.insert(SuccBlock).second)
  207. continue;
  208. // Found a path to the exit node without a recursive call.
  209. if (ExitID == SuccBlock->getBlockID())
  210. return false;
  211. // If the successor block contains a recursive call, end analysis there.
  212. if (hasRecursiveCallInPath(FD, *SuccBlock)) {
  213. foundRecursion = true;
  214. continue;
  215. }
  216. WorkList.push_back(SuccBlock);
  217. }
  218. }
  219. }
  220. return foundRecursion;
  221. }
  222. static void checkRecursiveFunction(Sema &S, const FunctionDecl *FD,
  223. const Stmt *Body, AnalysisDeclContext &AC) {
  224. FD = FD->getCanonicalDecl();
  225. // Only run on non-templated functions and non-templated members of
  226. // templated classes.
  227. if (FD->getTemplatedKind() != FunctionDecl::TK_NonTemplate &&
  228. FD->getTemplatedKind() != FunctionDecl::TK_MemberSpecialization)
  229. return;
  230. CFG *cfg = AC.getCFG();
  231. if (!cfg) return;
  232. // If the exit block is unreachable, skip processing the function.
  233. if (cfg->getExit().pred_empty())
  234. return;
  235. // Emit diagnostic if a recursive function call is detected for all paths.
  236. if (checkForRecursiveFunctionCall(FD, cfg))
  237. S.Diag(Body->getBeginLoc(), diag::warn_infinite_recursive_function);
  238. }
  239. //===----------------------------------------------------------------------===//
  240. // Check for throw in a non-throwing function.
  241. //===----------------------------------------------------------------------===//
  242. /// Determine whether an exception thrown by E, unwinding from ThrowBlock,
  243. /// can reach ExitBlock.
  244. static bool throwEscapes(Sema &S, const CXXThrowExpr *E, CFGBlock &ThrowBlock,
  245. CFG *Body) {
  246. SmallVector<CFGBlock *, 16> Stack;
  247. llvm::BitVector Queued(Body->getNumBlockIDs());
  248. Stack.push_back(&ThrowBlock);
  249. Queued[ThrowBlock.getBlockID()] = true;
  250. while (!Stack.empty()) {
  251. CFGBlock &UnwindBlock = *Stack.back();
  252. Stack.pop_back();
  253. for (auto &Succ : UnwindBlock.succs()) {
  254. if (!Succ.isReachable() || Queued[Succ->getBlockID()])
  255. continue;
  256. if (Succ->getBlockID() == Body->getExit().getBlockID())
  257. return true;
  258. if (auto *Catch =
  259. dyn_cast_or_null<CXXCatchStmt>(Succ->getLabel())) {
  260. QualType Caught = Catch->getCaughtType();
  261. if (Caught.isNull() || // catch (...) catches everything
  262. !E->getSubExpr() || // throw; is considered cuaght by any handler
  263. S.handlerCanCatch(Caught, E->getSubExpr()->getType()))
  264. // Exception doesn't escape via this path.
  265. break;
  266. } else {
  267. Stack.push_back(Succ);
  268. Queued[Succ->getBlockID()] = true;
  269. }
  270. }
  271. }
  272. return false;
  273. }
  274. static void visitReachableThrows(
  275. CFG *BodyCFG,
  276. llvm::function_ref<void(const CXXThrowExpr *, CFGBlock &)> Visit) {
  277. llvm::BitVector Reachable(BodyCFG->getNumBlockIDs());
  278. clang::reachable_code::ScanReachableFromBlock(&BodyCFG->getEntry(), Reachable);
  279. for (CFGBlock *B : *BodyCFG) {
  280. if (!Reachable[B->getBlockID()])
  281. continue;
  282. for (CFGElement &E : *B) {
  283. std::optional<CFGStmt> S = E.getAs<CFGStmt>();
  284. if (!S)
  285. continue;
  286. if (auto *Throw = dyn_cast<CXXThrowExpr>(S->getStmt()))
  287. Visit(Throw, *B);
  288. }
  289. }
  290. }
  291. static void EmitDiagForCXXThrowInNonThrowingFunc(Sema &S, SourceLocation OpLoc,
  292. const FunctionDecl *FD) {
  293. if (!S.getSourceManager().isInSystemHeader(OpLoc) &&
  294. FD->getTypeSourceInfo()) {
  295. S.Diag(OpLoc, diag::warn_throw_in_noexcept_func) << FD;
  296. if (S.getLangOpts().CPlusPlus11 &&
  297. (isa<CXXDestructorDecl>(FD) ||
  298. FD->getDeclName().getCXXOverloadedOperator() == OO_Delete ||
  299. FD->getDeclName().getCXXOverloadedOperator() == OO_Array_Delete)) {
  300. if (const auto *Ty = FD->getTypeSourceInfo()->getType()->
  301. getAs<FunctionProtoType>())
  302. S.Diag(FD->getLocation(), diag::note_throw_in_dtor)
  303. << !isa<CXXDestructorDecl>(FD) << !Ty->hasExceptionSpec()
  304. << FD->getExceptionSpecSourceRange();
  305. } else
  306. S.Diag(FD->getLocation(), diag::note_throw_in_function)
  307. << FD->getExceptionSpecSourceRange();
  308. }
  309. }
  310. static void checkThrowInNonThrowingFunc(Sema &S, const FunctionDecl *FD,
  311. AnalysisDeclContext &AC) {
  312. CFG *BodyCFG = AC.getCFG();
  313. if (!BodyCFG)
  314. return;
  315. if (BodyCFG->getExit().pred_empty())
  316. return;
  317. visitReachableThrows(BodyCFG, [&](const CXXThrowExpr *Throw, CFGBlock &Block) {
  318. if (throwEscapes(S, Throw, Block, BodyCFG))
  319. EmitDiagForCXXThrowInNonThrowingFunc(S, Throw->getThrowLoc(), FD);
  320. });
  321. }
  322. static bool isNoexcept(const FunctionDecl *FD) {
  323. const auto *FPT = FD->getType()->castAs<FunctionProtoType>();
  324. if (FPT->isNothrow() || FD->hasAttr<NoThrowAttr>())
  325. return true;
  326. return false;
  327. }
  328. //===----------------------------------------------------------------------===//
  329. // Check for missing return value.
  330. //===----------------------------------------------------------------------===//
  331. enum ControlFlowKind {
  332. UnknownFallThrough,
  333. NeverFallThrough,
  334. MaybeFallThrough,
  335. AlwaysFallThrough,
  336. NeverFallThroughOrReturn
  337. };
  338. /// CheckFallThrough - Check that we don't fall off the end of a
  339. /// Statement that should return a value.
  340. ///
  341. /// \returns AlwaysFallThrough iff we always fall off the end of the statement,
  342. /// MaybeFallThrough iff we might or might not fall off the end,
  343. /// NeverFallThroughOrReturn iff we never fall off the end of the statement or
  344. /// return. We assume NeverFallThrough iff we never fall off the end of the
  345. /// statement but we may return. We assume that functions not marked noreturn
  346. /// will return.
  347. static ControlFlowKind CheckFallThrough(AnalysisDeclContext &AC) {
  348. CFG *cfg = AC.getCFG();
  349. if (!cfg) return UnknownFallThrough;
  350. // The CFG leaves in dead things, and we don't want the dead code paths to
  351. // confuse us, so we mark all live things first.
  352. llvm::BitVector live(cfg->getNumBlockIDs());
  353. unsigned count = reachable_code::ScanReachableFromBlock(&cfg->getEntry(),
  354. live);
  355. bool AddEHEdges = AC.getAddEHEdges();
  356. if (!AddEHEdges && count != cfg->getNumBlockIDs())
  357. // When there are things remaining dead, and we didn't add EH edges
  358. // from CallExprs to the catch clauses, we have to go back and
  359. // mark them as live.
  360. for (const auto *B : *cfg) {
  361. if (!live[B->getBlockID()]) {
  362. if (B->pred_begin() == B->pred_end()) {
  363. const Stmt *Term = B->getTerminatorStmt();
  364. if (Term && isa<CXXTryStmt>(Term))
  365. // When not adding EH edges from calls, catch clauses
  366. // can otherwise seem dead. Avoid noting them as dead.
  367. count += reachable_code::ScanReachableFromBlock(B, live);
  368. continue;
  369. }
  370. }
  371. }
  372. // Now we know what is live, we check the live precessors of the exit block
  373. // and look for fall through paths, being careful to ignore normal returns,
  374. // and exceptional paths.
  375. bool HasLiveReturn = false;
  376. bool HasFakeEdge = false;
  377. bool HasPlainEdge = false;
  378. bool HasAbnormalEdge = false;
  379. // Ignore default cases that aren't likely to be reachable because all
  380. // enums in a switch(X) have explicit case statements.
  381. CFGBlock::FilterOptions FO;
  382. FO.IgnoreDefaultsWithCoveredEnums = 1;
  383. for (CFGBlock::filtered_pred_iterator I =
  384. cfg->getExit().filtered_pred_start_end(FO);
  385. I.hasMore(); ++I) {
  386. const CFGBlock &B = **I;
  387. if (!live[B.getBlockID()])
  388. continue;
  389. // Skip blocks which contain an element marked as no-return. They don't
  390. // represent actually viable edges into the exit block, so mark them as
  391. // abnormal.
  392. if (B.hasNoReturnElement()) {
  393. HasAbnormalEdge = true;
  394. continue;
  395. }
  396. // Destructors can appear after the 'return' in the CFG. This is
  397. // normal. We need to look pass the destructors for the return
  398. // statement (if it exists).
  399. CFGBlock::const_reverse_iterator ri = B.rbegin(), re = B.rend();
  400. for ( ; ri != re ; ++ri)
  401. if (ri->getAs<CFGStmt>())
  402. break;
  403. // No more CFGElements in the block?
  404. if (ri == re) {
  405. const Stmt *Term = B.getTerminatorStmt();
  406. if (Term && (isa<CXXTryStmt>(Term) || isa<ObjCAtTryStmt>(Term))) {
  407. HasAbnormalEdge = true;
  408. continue;
  409. }
  410. // A labeled empty statement, or the entry block...
  411. HasPlainEdge = true;
  412. continue;
  413. }
  414. CFGStmt CS = ri->castAs<CFGStmt>();
  415. const Stmt *S = CS.getStmt();
  416. if (isa<ReturnStmt>(S) || isa<CoreturnStmt>(S)) {
  417. HasLiveReturn = true;
  418. continue;
  419. }
  420. if (isa<ObjCAtThrowStmt>(S)) {
  421. HasFakeEdge = true;
  422. continue;
  423. }
  424. if (isa<CXXThrowExpr>(S)) {
  425. HasFakeEdge = true;
  426. continue;
  427. }
  428. if (isa<MSAsmStmt>(S)) {
  429. // TODO: Verify this is correct.
  430. HasFakeEdge = true;
  431. HasLiveReturn = true;
  432. continue;
  433. }
  434. if (isa<CXXTryStmt>(S)) {
  435. HasAbnormalEdge = true;
  436. continue;
  437. }
  438. if (!llvm::is_contained(B.succs(), &cfg->getExit())) {
  439. HasAbnormalEdge = true;
  440. continue;
  441. }
  442. HasPlainEdge = true;
  443. }
  444. if (!HasPlainEdge) {
  445. if (HasLiveReturn)
  446. return NeverFallThrough;
  447. return NeverFallThroughOrReturn;
  448. }
  449. if (HasAbnormalEdge || HasFakeEdge || HasLiveReturn)
  450. return MaybeFallThrough;
  451. // This says AlwaysFallThrough for calls to functions that are not marked
  452. // noreturn, that don't return. If people would like this warning to be more
  453. // accurate, such functions should be marked as noreturn.
  454. return AlwaysFallThrough;
  455. }
  456. namespace {
  457. struct CheckFallThroughDiagnostics {
  458. unsigned diag_MaybeFallThrough_HasNoReturn;
  459. unsigned diag_MaybeFallThrough_ReturnsNonVoid;
  460. unsigned diag_AlwaysFallThrough_HasNoReturn;
  461. unsigned diag_AlwaysFallThrough_ReturnsNonVoid;
  462. unsigned diag_NeverFallThroughOrReturn;
  463. enum { Function, Block, Lambda, Coroutine } funMode;
  464. SourceLocation FuncLoc;
  465. static CheckFallThroughDiagnostics MakeForFunction(const Decl *Func) {
  466. CheckFallThroughDiagnostics D;
  467. D.FuncLoc = Func->getLocation();
  468. D.diag_MaybeFallThrough_HasNoReturn =
  469. diag::warn_falloff_noreturn_function;
  470. D.diag_MaybeFallThrough_ReturnsNonVoid =
  471. diag::warn_maybe_falloff_nonvoid_function;
  472. D.diag_AlwaysFallThrough_HasNoReturn =
  473. diag::warn_falloff_noreturn_function;
  474. D.diag_AlwaysFallThrough_ReturnsNonVoid =
  475. diag::warn_falloff_nonvoid_function;
  476. // Don't suggest that virtual functions be marked "noreturn", since they
  477. // might be overridden by non-noreturn functions.
  478. bool isVirtualMethod = false;
  479. if (const CXXMethodDecl *Method = dyn_cast<CXXMethodDecl>(Func))
  480. isVirtualMethod = Method->isVirtual();
  481. // Don't suggest that template instantiations be marked "noreturn"
  482. bool isTemplateInstantiation = false;
  483. if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(Func))
  484. isTemplateInstantiation = Function->isTemplateInstantiation();
  485. if (!isVirtualMethod && !isTemplateInstantiation)
  486. D.diag_NeverFallThroughOrReturn =
  487. diag::warn_suggest_noreturn_function;
  488. else
  489. D.diag_NeverFallThroughOrReturn = 0;
  490. D.funMode = Function;
  491. return D;
  492. }
  493. static CheckFallThroughDiagnostics MakeForCoroutine(const Decl *Func) {
  494. CheckFallThroughDiagnostics D;
  495. D.FuncLoc = Func->getLocation();
  496. D.diag_MaybeFallThrough_HasNoReturn = 0;
  497. D.diag_MaybeFallThrough_ReturnsNonVoid =
  498. diag::warn_maybe_falloff_nonvoid_coroutine;
  499. D.diag_AlwaysFallThrough_HasNoReturn = 0;
  500. D.diag_AlwaysFallThrough_ReturnsNonVoid =
  501. diag::warn_falloff_nonvoid_coroutine;
  502. D.funMode = Coroutine;
  503. return D;
  504. }
  505. static CheckFallThroughDiagnostics MakeForBlock() {
  506. CheckFallThroughDiagnostics D;
  507. D.diag_MaybeFallThrough_HasNoReturn =
  508. diag::err_noreturn_block_has_return_expr;
  509. D.diag_MaybeFallThrough_ReturnsNonVoid =
  510. diag::err_maybe_falloff_nonvoid_block;
  511. D.diag_AlwaysFallThrough_HasNoReturn =
  512. diag::err_noreturn_block_has_return_expr;
  513. D.diag_AlwaysFallThrough_ReturnsNonVoid =
  514. diag::err_falloff_nonvoid_block;
  515. D.diag_NeverFallThroughOrReturn = 0;
  516. D.funMode = Block;
  517. return D;
  518. }
  519. static CheckFallThroughDiagnostics MakeForLambda() {
  520. CheckFallThroughDiagnostics D;
  521. D.diag_MaybeFallThrough_HasNoReturn =
  522. diag::err_noreturn_lambda_has_return_expr;
  523. D.diag_MaybeFallThrough_ReturnsNonVoid =
  524. diag::warn_maybe_falloff_nonvoid_lambda;
  525. D.diag_AlwaysFallThrough_HasNoReturn =
  526. diag::err_noreturn_lambda_has_return_expr;
  527. D.diag_AlwaysFallThrough_ReturnsNonVoid =
  528. diag::warn_falloff_nonvoid_lambda;
  529. D.diag_NeverFallThroughOrReturn = 0;
  530. D.funMode = Lambda;
  531. return D;
  532. }
  533. bool checkDiagnostics(DiagnosticsEngine &D, bool ReturnsVoid,
  534. bool HasNoReturn) const {
  535. if (funMode == Function) {
  536. return (ReturnsVoid ||
  537. D.isIgnored(diag::warn_maybe_falloff_nonvoid_function,
  538. FuncLoc)) &&
  539. (!HasNoReturn ||
  540. D.isIgnored(diag::warn_noreturn_function_has_return_expr,
  541. FuncLoc)) &&
  542. (!ReturnsVoid ||
  543. D.isIgnored(diag::warn_suggest_noreturn_block, FuncLoc));
  544. }
  545. if (funMode == Coroutine) {
  546. return (ReturnsVoid ||
  547. D.isIgnored(diag::warn_maybe_falloff_nonvoid_function, FuncLoc) ||
  548. D.isIgnored(diag::warn_maybe_falloff_nonvoid_coroutine,
  549. FuncLoc)) &&
  550. (!HasNoReturn);
  551. }
  552. // For blocks / lambdas.
  553. return ReturnsVoid && !HasNoReturn;
  554. }
  555. };
  556. } // anonymous namespace
  557. /// CheckFallThroughForBody - Check that we don't fall off the end of a
  558. /// function that should return a value. Check that we don't fall off the end
  559. /// of a noreturn function. We assume that functions and blocks not marked
  560. /// noreturn will return.
  561. static void CheckFallThroughForBody(Sema &S, const Decl *D, const Stmt *Body,
  562. QualType BlockType,
  563. const CheckFallThroughDiagnostics &CD,
  564. AnalysisDeclContext &AC,
  565. sema::FunctionScopeInfo *FSI) {
  566. bool ReturnsVoid = false;
  567. bool HasNoReturn = false;
  568. bool IsCoroutine = FSI->isCoroutine();
  569. if (const auto *FD = dyn_cast<FunctionDecl>(D)) {
  570. if (const auto *CBody = dyn_cast<CoroutineBodyStmt>(Body))
  571. ReturnsVoid = CBody->getFallthroughHandler() != nullptr;
  572. else
  573. ReturnsVoid = FD->getReturnType()->isVoidType();
  574. HasNoReturn = FD->isNoReturn();
  575. }
  576. else if (const auto *MD = dyn_cast<ObjCMethodDecl>(D)) {
  577. ReturnsVoid = MD->getReturnType()->isVoidType();
  578. HasNoReturn = MD->hasAttr<NoReturnAttr>();
  579. }
  580. else if (isa<BlockDecl>(D)) {
  581. if (const FunctionType *FT =
  582. BlockType->getPointeeType()->getAs<FunctionType>()) {
  583. if (FT->getReturnType()->isVoidType())
  584. ReturnsVoid = true;
  585. if (FT->getNoReturnAttr())
  586. HasNoReturn = true;
  587. }
  588. }
  589. DiagnosticsEngine &Diags = S.getDiagnostics();
  590. // Short circuit for compilation speed.
  591. if (CD.checkDiagnostics(Diags, ReturnsVoid, HasNoReturn))
  592. return;
  593. SourceLocation LBrace = Body->getBeginLoc(), RBrace = Body->getEndLoc();
  594. auto EmitDiag = [&](SourceLocation Loc, unsigned DiagID) {
  595. if (IsCoroutine)
  596. S.Diag(Loc, DiagID) << FSI->CoroutinePromise->getType();
  597. else
  598. S.Diag(Loc, DiagID);
  599. };
  600. // cpu_dispatch functions permit empty function bodies for ICC compatibility.
  601. if (D->getAsFunction() && D->getAsFunction()->isCPUDispatchMultiVersion())
  602. return;
  603. // Either in a function body compound statement, or a function-try-block.
  604. switch (CheckFallThrough(AC)) {
  605. case UnknownFallThrough:
  606. break;
  607. case MaybeFallThrough:
  608. if (HasNoReturn)
  609. EmitDiag(RBrace, CD.diag_MaybeFallThrough_HasNoReturn);
  610. else if (!ReturnsVoid)
  611. EmitDiag(RBrace, CD.diag_MaybeFallThrough_ReturnsNonVoid);
  612. break;
  613. case AlwaysFallThrough:
  614. if (HasNoReturn)
  615. EmitDiag(RBrace, CD.diag_AlwaysFallThrough_HasNoReturn);
  616. else if (!ReturnsVoid)
  617. EmitDiag(RBrace, CD.diag_AlwaysFallThrough_ReturnsNonVoid);
  618. break;
  619. case NeverFallThroughOrReturn:
  620. if (ReturnsVoid && !HasNoReturn && CD.diag_NeverFallThroughOrReturn) {
  621. if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
  622. S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 0 << FD;
  623. } else if (const ObjCMethodDecl *MD = dyn_cast<ObjCMethodDecl>(D)) {
  624. S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn) << 1 << MD;
  625. } else {
  626. S.Diag(LBrace, CD.diag_NeverFallThroughOrReturn);
  627. }
  628. }
  629. break;
  630. case NeverFallThrough:
  631. break;
  632. }
  633. }
  634. //===----------------------------------------------------------------------===//
  635. // -Wuninitialized
  636. //===----------------------------------------------------------------------===//
  637. namespace {
  638. /// ContainsReference - A visitor class to search for references to
  639. /// a particular declaration (the needle) within any evaluated component of an
  640. /// expression (recursively).
  641. class ContainsReference : public ConstEvaluatedExprVisitor<ContainsReference> {
  642. bool FoundReference;
  643. const DeclRefExpr *Needle;
  644. public:
  645. typedef ConstEvaluatedExprVisitor<ContainsReference> Inherited;
  646. ContainsReference(ASTContext &Context, const DeclRefExpr *Needle)
  647. : Inherited(Context), FoundReference(false), Needle(Needle) {}
  648. void VisitExpr(const Expr *E) {
  649. // Stop evaluating if we already have a reference.
  650. if (FoundReference)
  651. return;
  652. Inherited::VisitExpr(E);
  653. }
  654. void VisitDeclRefExpr(const DeclRefExpr *E) {
  655. if (E == Needle)
  656. FoundReference = true;
  657. else
  658. Inherited::VisitDeclRefExpr(E);
  659. }
  660. bool doesContainReference() const { return FoundReference; }
  661. };
  662. } // anonymous namespace
  663. static bool SuggestInitializationFixit(Sema &S, const VarDecl *VD) {
  664. QualType VariableTy = VD->getType().getCanonicalType();
  665. if (VariableTy->isBlockPointerType() &&
  666. !VD->hasAttr<BlocksAttr>()) {
  667. S.Diag(VD->getLocation(), diag::note_block_var_fixit_add_initialization)
  668. << VD->getDeclName()
  669. << FixItHint::CreateInsertion(VD->getLocation(), "__block ");
  670. return true;
  671. }
  672. // Don't issue a fixit if there is already an initializer.
  673. if (VD->getInit())
  674. return false;
  675. // Don't suggest a fixit inside macros.
  676. if (VD->getEndLoc().isMacroID())
  677. return false;
  678. SourceLocation Loc = S.getLocForEndOfToken(VD->getEndLoc());
  679. // Suggest possible initialization (if any).
  680. std::string Init = S.getFixItZeroInitializerForType(VariableTy, Loc);
  681. if (Init.empty())
  682. return false;
  683. S.Diag(Loc, diag::note_var_fixit_add_initialization) << VD->getDeclName()
  684. << FixItHint::CreateInsertion(Loc, Init);
  685. return true;
  686. }
  687. /// Create a fixit to remove an if-like statement, on the assumption that its
  688. /// condition is CondVal.
  689. static void CreateIfFixit(Sema &S, const Stmt *If, const Stmt *Then,
  690. const Stmt *Else, bool CondVal,
  691. FixItHint &Fixit1, FixItHint &Fixit2) {
  692. if (CondVal) {
  693. // If condition is always true, remove all but the 'then'.
  694. Fixit1 = FixItHint::CreateRemoval(
  695. CharSourceRange::getCharRange(If->getBeginLoc(), Then->getBeginLoc()));
  696. if (Else) {
  697. SourceLocation ElseKwLoc = S.getLocForEndOfToken(Then->getEndLoc());
  698. Fixit2 =
  699. FixItHint::CreateRemoval(SourceRange(ElseKwLoc, Else->getEndLoc()));
  700. }
  701. } else {
  702. // If condition is always false, remove all but the 'else'.
  703. if (Else)
  704. Fixit1 = FixItHint::CreateRemoval(CharSourceRange::getCharRange(
  705. If->getBeginLoc(), Else->getBeginLoc()));
  706. else
  707. Fixit1 = FixItHint::CreateRemoval(If->getSourceRange());
  708. }
  709. }
  710. /// DiagUninitUse -- Helper function to produce a diagnostic for an
  711. /// uninitialized use of a variable.
  712. static void DiagUninitUse(Sema &S, const VarDecl *VD, const UninitUse &Use,
  713. bool IsCapturedByBlock) {
  714. bool Diagnosed = false;
  715. switch (Use.getKind()) {
  716. case UninitUse::Always:
  717. S.Diag(Use.getUser()->getBeginLoc(), diag::warn_uninit_var)
  718. << VD->getDeclName() << IsCapturedByBlock
  719. << Use.getUser()->getSourceRange();
  720. return;
  721. case UninitUse::AfterDecl:
  722. case UninitUse::AfterCall:
  723. S.Diag(VD->getLocation(), diag::warn_sometimes_uninit_var)
  724. << VD->getDeclName() << IsCapturedByBlock
  725. << (Use.getKind() == UninitUse::AfterDecl ? 4 : 5)
  726. << const_cast<DeclContext*>(VD->getLexicalDeclContext())
  727. << VD->getSourceRange();
  728. S.Diag(Use.getUser()->getBeginLoc(), diag::note_uninit_var_use)
  729. << IsCapturedByBlock << Use.getUser()->getSourceRange();
  730. return;
  731. case UninitUse::Maybe:
  732. case UninitUse::Sometimes:
  733. // Carry on to report sometimes-uninitialized branches, if possible,
  734. // or a 'may be used uninitialized' diagnostic otherwise.
  735. break;
  736. }
  737. // Diagnose each branch which leads to a sometimes-uninitialized use.
  738. for (UninitUse::branch_iterator I = Use.branch_begin(), E = Use.branch_end();
  739. I != E; ++I) {
  740. assert(Use.getKind() == UninitUse::Sometimes);
  741. const Expr *User = Use.getUser();
  742. const Stmt *Term = I->Terminator;
  743. // Information used when building the diagnostic.
  744. unsigned DiagKind;
  745. StringRef Str;
  746. SourceRange Range;
  747. // FixIts to suppress the diagnostic by removing the dead condition.
  748. // For all binary terminators, branch 0 is taken if the condition is true,
  749. // and branch 1 is taken if the condition is false.
  750. int RemoveDiagKind = -1;
  751. const char *FixitStr =
  752. S.getLangOpts().CPlusPlus ? (I->Output ? "true" : "false")
  753. : (I->Output ? "1" : "0");
  754. FixItHint Fixit1, Fixit2;
  755. switch (Term ? Term->getStmtClass() : Stmt::DeclStmtClass) {
  756. default:
  757. // Don't know how to report this. Just fall back to 'may be used
  758. // uninitialized'. FIXME: Can this happen?
  759. continue;
  760. // "condition is true / condition is false".
  761. case Stmt::IfStmtClass: {
  762. const IfStmt *IS = cast<IfStmt>(Term);
  763. DiagKind = 0;
  764. Str = "if";
  765. Range = IS->getCond()->getSourceRange();
  766. RemoveDiagKind = 0;
  767. CreateIfFixit(S, IS, IS->getThen(), IS->getElse(),
  768. I->Output, Fixit1, Fixit2);
  769. break;
  770. }
  771. case Stmt::ConditionalOperatorClass: {
  772. const ConditionalOperator *CO = cast<ConditionalOperator>(Term);
  773. DiagKind = 0;
  774. Str = "?:";
  775. Range = CO->getCond()->getSourceRange();
  776. RemoveDiagKind = 0;
  777. CreateIfFixit(S, CO, CO->getTrueExpr(), CO->getFalseExpr(),
  778. I->Output, Fixit1, Fixit2);
  779. break;
  780. }
  781. case Stmt::BinaryOperatorClass: {
  782. const BinaryOperator *BO = cast<BinaryOperator>(Term);
  783. if (!BO->isLogicalOp())
  784. continue;
  785. DiagKind = 0;
  786. Str = BO->getOpcodeStr();
  787. Range = BO->getLHS()->getSourceRange();
  788. RemoveDiagKind = 0;
  789. if ((BO->getOpcode() == BO_LAnd && I->Output) ||
  790. (BO->getOpcode() == BO_LOr && !I->Output))
  791. // true && y -> y, false || y -> y.
  792. Fixit1 = FixItHint::CreateRemoval(
  793. SourceRange(BO->getBeginLoc(), BO->getOperatorLoc()));
  794. else
  795. // false && y -> false, true || y -> true.
  796. Fixit1 = FixItHint::CreateReplacement(BO->getSourceRange(), FixitStr);
  797. break;
  798. }
  799. // "loop is entered / loop is exited".
  800. case Stmt::WhileStmtClass:
  801. DiagKind = 1;
  802. Str = "while";
  803. Range = cast<WhileStmt>(Term)->getCond()->getSourceRange();
  804. RemoveDiagKind = 1;
  805. Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
  806. break;
  807. case Stmt::ForStmtClass:
  808. DiagKind = 1;
  809. Str = "for";
  810. Range = cast<ForStmt>(Term)->getCond()->getSourceRange();
  811. RemoveDiagKind = 1;
  812. if (I->Output)
  813. Fixit1 = FixItHint::CreateRemoval(Range);
  814. else
  815. Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
  816. break;
  817. case Stmt::CXXForRangeStmtClass:
  818. if (I->Output == 1) {
  819. // The use occurs if a range-based for loop's body never executes.
  820. // That may be impossible, and there's no syntactic fix for this,
  821. // so treat it as a 'may be uninitialized' case.
  822. continue;
  823. }
  824. DiagKind = 1;
  825. Str = "for";
  826. Range = cast<CXXForRangeStmt>(Term)->getRangeInit()->getSourceRange();
  827. break;
  828. // "condition is true / loop is exited".
  829. case Stmt::DoStmtClass:
  830. DiagKind = 2;
  831. Str = "do";
  832. Range = cast<DoStmt>(Term)->getCond()->getSourceRange();
  833. RemoveDiagKind = 1;
  834. Fixit1 = FixItHint::CreateReplacement(Range, FixitStr);
  835. break;
  836. // "switch case is taken".
  837. case Stmt::CaseStmtClass:
  838. DiagKind = 3;
  839. Str = "case";
  840. Range = cast<CaseStmt>(Term)->getLHS()->getSourceRange();
  841. break;
  842. case Stmt::DefaultStmtClass:
  843. DiagKind = 3;
  844. Str = "default";
  845. Range = cast<DefaultStmt>(Term)->getDefaultLoc();
  846. break;
  847. }
  848. S.Diag(Range.getBegin(), diag::warn_sometimes_uninit_var)
  849. << VD->getDeclName() << IsCapturedByBlock << DiagKind
  850. << Str << I->Output << Range;
  851. S.Diag(User->getBeginLoc(), diag::note_uninit_var_use)
  852. << IsCapturedByBlock << User->getSourceRange();
  853. if (RemoveDiagKind != -1)
  854. S.Diag(Fixit1.RemoveRange.getBegin(), diag::note_uninit_fixit_remove_cond)
  855. << RemoveDiagKind << Str << I->Output << Fixit1 << Fixit2;
  856. Diagnosed = true;
  857. }
  858. if (!Diagnosed)
  859. S.Diag(Use.getUser()->getBeginLoc(), diag::warn_maybe_uninit_var)
  860. << VD->getDeclName() << IsCapturedByBlock
  861. << Use.getUser()->getSourceRange();
  862. }
  863. /// Diagnose uninitialized const reference usages.
  864. static bool DiagnoseUninitializedConstRefUse(Sema &S, const VarDecl *VD,
  865. const UninitUse &Use) {
  866. S.Diag(Use.getUser()->getBeginLoc(), diag::warn_uninit_const_reference)
  867. << VD->getDeclName() << Use.getUser()->getSourceRange();
  868. return true;
  869. }
  870. /// DiagnoseUninitializedUse -- Helper function for diagnosing uses of an
  871. /// uninitialized variable. This manages the different forms of diagnostic
  872. /// emitted for particular types of uses. Returns true if the use was diagnosed
  873. /// as a warning. If a particular use is one we omit warnings for, returns
  874. /// false.
  875. static bool DiagnoseUninitializedUse(Sema &S, const VarDecl *VD,
  876. const UninitUse &Use,
  877. bool alwaysReportSelfInit = false) {
  878. if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Use.getUser())) {
  879. // Inspect the initializer of the variable declaration which is
  880. // being referenced prior to its initialization. We emit
  881. // specialized diagnostics for self-initialization, and we
  882. // specifically avoid warning about self references which take the
  883. // form of:
  884. //
  885. // int x = x;
  886. //
  887. // This is used to indicate to GCC that 'x' is intentionally left
  888. // uninitialized. Proven code paths which access 'x' in
  889. // an uninitialized state after this will still warn.
  890. if (const Expr *Initializer = VD->getInit()) {
  891. if (!alwaysReportSelfInit && DRE == Initializer->IgnoreParenImpCasts())
  892. return false;
  893. ContainsReference CR(S.Context, DRE);
  894. CR.Visit(Initializer);
  895. if (CR.doesContainReference()) {
  896. S.Diag(DRE->getBeginLoc(), diag::warn_uninit_self_reference_in_init)
  897. << VD->getDeclName() << VD->getLocation() << DRE->getSourceRange();
  898. return true;
  899. }
  900. }
  901. DiagUninitUse(S, VD, Use, false);
  902. } else {
  903. const BlockExpr *BE = cast<BlockExpr>(Use.getUser());
  904. if (VD->getType()->isBlockPointerType() && !VD->hasAttr<BlocksAttr>())
  905. S.Diag(BE->getBeginLoc(),
  906. diag::warn_uninit_byref_blockvar_captured_by_block)
  907. << VD->getDeclName()
  908. << VD->getType().getQualifiers().hasObjCLifetime();
  909. else
  910. DiagUninitUse(S, VD, Use, true);
  911. }
  912. // Report where the variable was declared when the use wasn't within
  913. // the initializer of that declaration & we didn't already suggest
  914. // an initialization fixit.
  915. if (!SuggestInitializationFixit(S, VD))
  916. S.Diag(VD->getBeginLoc(), diag::note_var_declared_here)
  917. << VD->getDeclName();
  918. return true;
  919. }
  920. namespace {
  921. class FallthroughMapper : public RecursiveASTVisitor<FallthroughMapper> {
  922. public:
  923. FallthroughMapper(Sema &S)
  924. : FoundSwitchStatements(false),
  925. S(S) {
  926. }
  927. bool foundSwitchStatements() const { return FoundSwitchStatements; }
  928. void markFallthroughVisited(const AttributedStmt *Stmt) {
  929. bool Found = FallthroughStmts.erase(Stmt);
  930. assert(Found);
  931. (void)Found;
  932. }
  933. typedef llvm::SmallPtrSet<const AttributedStmt*, 8> AttrStmts;
  934. const AttrStmts &getFallthroughStmts() const {
  935. return FallthroughStmts;
  936. }
  937. void fillReachableBlocks(CFG *Cfg) {
  938. assert(ReachableBlocks.empty() && "ReachableBlocks already filled");
  939. std::deque<const CFGBlock *> BlockQueue;
  940. ReachableBlocks.insert(&Cfg->getEntry());
  941. BlockQueue.push_back(&Cfg->getEntry());
  942. // Mark all case blocks reachable to avoid problems with switching on
  943. // constants, covered enums, etc.
  944. // These blocks can contain fall-through annotations, and we don't want to
  945. // issue a warn_fallthrough_attr_unreachable for them.
  946. for (const auto *B : *Cfg) {
  947. const Stmt *L = B->getLabel();
  948. if (L && isa<SwitchCase>(L) && ReachableBlocks.insert(B).second)
  949. BlockQueue.push_back(B);
  950. }
  951. while (!BlockQueue.empty()) {
  952. const CFGBlock *P = BlockQueue.front();
  953. BlockQueue.pop_front();
  954. for (const CFGBlock *B : P->succs()) {
  955. if (B && ReachableBlocks.insert(B).second)
  956. BlockQueue.push_back(B);
  957. }
  958. }
  959. }
  960. bool checkFallThroughIntoBlock(const CFGBlock &B, int &AnnotatedCnt,
  961. bool IsTemplateInstantiation) {
  962. assert(!ReachableBlocks.empty() && "ReachableBlocks empty");
  963. int UnannotatedCnt = 0;
  964. AnnotatedCnt = 0;
  965. std::deque<const CFGBlock*> BlockQueue(B.pred_begin(), B.pred_end());
  966. while (!BlockQueue.empty()) {
  967. const CFGBlock *P = BlockQueue.front();
  968. BlockQueue.pop_front();
  969. if (!P) continue;
  970. const Stmt *Term = P->getTerminatorStmt();
  971. if (Term && isa<SwitchStmt>(Term))
  972. continue; // Switch statement, good.
  973. const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(P->getLabel());
  974. if (SW && SW->getSubStmt() == B.getLabel() && P->begin() == P->end())
  975. continue; // Previous case label has no statements, good.
  976. const LabelStmt *L = dyn_cast_or_null<LabelStmt>(P->getLabel());
  977. if (L && L->getSubStmt() == B.getLabel() && P->begin() == P->end())
  978. continue; // Case label is preceded with a normal label, good.
  979. if (!ReachableBlocks.count(P)) {
  980. for (const CFGElement &Elem : llvm::reverse(*P)) {
  981. if (std::optional<CFGStmt> CS = Elem.getAs<CFGStmt>()) {
  982. if (const AttributedStmt *AS = asFallThroughAttr(CS->getStmt())) {
  983. // Don't issue a warning for an unreachable fallthrough
  984. // attribute in template instantiations as it may not be
  985. // unreachable in all instantiations of the template.
  986. if (!IsTemplateInstantiation)
  987. S.Diag(AS->getBeginLoc(),
  988. diag::warn_unreachable_fallthrough_attr);
  989. markFallthroughVisited(AS);
  990. ++AnnotatedCnt;
  991. break;
  992. }
  993. // Don't care about other unreachable statements.
  994. }
  995. }
  996. // If there are no unreachable statements, this may be a special
  997. // case in CFG:
  998. // case X: {
  999. // A a; // A has a destructor.
  1000. // break;
  1001. // }
  1002. // // <<<< This place is represented by a 'hanging' CFG block.
  1003. // case Y:
  1004. continue;
  1005. }
  1006. const Stmt *LastStmt = getLastStmt(*P);
  1007. if (const AttributedStmt *AS = asFallThroughAttr(LastStmt)) {
  1008. markFallthroughVisited(AS);
  1009. ++AnnotatedCnt;
  1010. continue; // Fallthrough annotation, good.
  1011. }
  1012. if (!LastStmt) { // This block contains no executable statements.
  1013. // Traverse its predecessors.
  1014. std::copy(P->pred_begin(), P->pred_end(),
  1015. std::back_inserter(BlockQueue));
  1016. continue;
  1017. }
  1018. ++UnannotatedCnt;
  1019. }
  1020. return !!UnannotatedCnt;
  1021. }
  1022. // RecursiveASTVisitor setup.
  1023. bool shouldWalkTypesOfTypeLocs() const { return false; }
  1024. bool VisitAttributedStmt(AttributedStmt *S) {
  1025. if (asFallThroughAttr(S))
  1026. FallthroughStmts.insert(S);
  1027. return true;
  1028. }
  1029. bool VisitSwitchStmt(SwitchStmt *S) {
  1030. FoundSwitchStatements = true;
  1031. return true;
  1032. }
  1033. // We don't want to traverse local type declarations. We analyze their
  1034. // methods separately.
  1035. bool TraverseDecl(Decl *D) { return true; }
  1036. // We analyze lambda bodies separately. Skip them here.
  1037. bool TraverseLambdaExpr(LambdaExpr *LE) {
  1038. // Traverse the captures, but not the body.
  1039. for (const auto C : zip(LE->captures(), LE->capture_inits()))
  1040. TraverseLambdaCapture(LE, &std::get<0>(C), std::get<1>(C));
  1041. return true;
  1042. }
  1043. private:
  1044. static const AttributedStmt *asFallThroughAttr(const Stmt *S) {
  1045. if (const AttributedStmt *AS = dyn_cast_or_null<AttributedStmt>(S)) {
  1046. if (hasSpecificAttr<FallThroughAttr>(AS->getAttrs()))
  1047. return AS;
  1048. }
  1049. return nullptr;
  1050. }
  1051. static const Stmt *getLastStmt(const CFGBlock &B) {
  1052. if (const Stmt *Term = B.getTerminatorStmt())
  1053. return Term;
  1054. for (const CFGElement &Elem : llvm::reverse(B))
  1055. if (std::optional<CFGStmt> CS = Elem.getAs<CFGStmt>())
  1056. return CS->getStmt();
  1057. // Workaround to detect a statement thrown out by CFGBuilder:
  1058. // case X: {} case Y:
  1059. // case X: ; case Y:
  1060. if (const SwitchCase *SW = dyn_cast_or_null<SwitchCase>(B.getLabel()))
  1061. if (!isa<SwitchCase>(SW->getSubStmt()))
  1062. return SW->getSubStmt();
  1063. return nullptr;
  1064. }
  1065. bool FoundSwitchStatements;
  1066. AttrStmts FallthroughStmts;
  1067. Sema &S;
  1068. llvm::SmallPtrSet<const CFGBlock *, 16> ReachableBlocks;
  1069. };
  1070. } // anonymous namespace
  1071. static StringRef getFallthroughAttrSpelling(Preprocessor &PP,
  1072. SourceLocation Loc) {
  1073. TokenValue FallthroughTokens[] = {
  1074. tok::l_square, tok::l_square,
  1075. PP.getIdentifierInfo("fallthrough"),
  1076. tok::r_square, tok::r_square
  1077. };
  1078. TokenValue ClangFallthroughTokens[] = {
  1079. tok::l_square, tok::l_square, PP.getIdentifierInfo("clang"),
  1080. tok::coloncolon, PP.getIdentifierInfo("fallthrough"),
  1081. tok::r_square, tok::r_square
  1082. };
  1083. bool PreferClangAttr = !PP.getLangOpts().CPlusPlus17 && !PP.getLangOpts().C2x;
  1084. StringRef MacroName;
  1085. if (PreferClangAttr)
  1086. MacroName = PP.getLastMacroWithSpelling(Loc, ClangFallthroughTokens);
  1087. if (MacroName.empty())
  1088. MacroName = PP.getLastMacroWithSpelling(Loc, FallthroughTokens);
  1089. if (MacroName.empty() && !PreferClangAttr)
  1090. MacroName = PP.getLastMacroWithSpelling(Loc, ClangFallthroughTokens);
  1091. if (MacroName.empty()) {
  1092. if (!PreferClangAttr)
  1093. MacroName = "[[fallthrough]]";
  1094. else if (PP.getLangOpts().CPlusPlus)
  1095. MacroName = "[[clang::fallthrough]]";
  1096. else
  1097. MacroName = "__attribute__((fallthrough))";
  1098. }
  1099. return MacroName;
  1100. }
  1101. static void DiagnoseSwitchLabelsFallthrough(Sema &S, AnalysisDeclContext &AC,
  1102. bool PerFunction) {
  1103. FallthroughMapper FM(S);
  1104. FM.TraverseStmt(AC.getBody());
  1105. if (!FM.foundSwitchStatements())
  1106. return;
  1107. if (PerFunction && FM.getFallthroughStmts().empty())
  1108. return;
  1109. CFG *Cfg = AC.getCFG();
  1110. if (!Cfg)
  1111. return;
  1112. FM.fillReachableBlocks(Cfg);
  1113. for (const CFGBlock *B : llvm::reverse(*Cfg)) {
  1114. const Stmt *Label = B->getLabel();
  1115. if (!isa_and_nonnull<SwitchCase>(Label))
  1116. continue;
  1117. int AnnotatedCnt;
  1118. bool IsTemplateInstantiation = false;
  1119. if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(AC.getDecl()))
  1120. IsTemplateInstantiation = Function->isTemplateInstantiation();
  1121. if (!FM.checkFallThroughIntoBlock(*B, AnnotatedCnt,
  1122. IsTemplateInstantiation))
  1123. continue;
  1124. S.Diag(Label->getBeginLoc(),
  1125. PerFunction ? diag::warn_unannotated_fallthrough_per_function
  1126. : diag::warn_unannotated_fallthrough);
  1127. if (!AnnotatedCnt) {
  1128. SourceLocation L = Label->getBeginLoc();
  1129. if (L.isMacroID())
  1130. continue;
  1131. const Stmt *Term = B->getTerminatorStmt();
  1132. // Skip empty cases.
  1133. while (B->empty() && !Term && B->succ_size() == 1) {
  1134. B = *B->succ_begin();
  1135. Term = B->getTerminatorStmt();
  1136. }
  1137. if (!(B->empty() && Term && isa<BreakStmt>(Term))) {
  1138. Preprocessor &PP = S.getPreprocessor();
  1139. StringRef AnnotationSpelling = getFallthroughAttrSpelling(PP, L);
  1140. SmallString<64> TextToInsert(AnnotationSpelling);
  1141. TextToInsert += "; ";
  1142. S.Diag(L, diag::note_insert_fallthrough_fixit)
  1143. << AnnotationSpelling
  1144. << FixItHint::CreateInsertion(L, TextToInsert);
  1145. }
  1146. S.Diag(L, diag::note_insert_break_fixit)
  1147. << FixItHint::CreateInsertion(L, "break; ");
  1148. }
  1149. }
  1150. for (const auto *F : FM.getFallthroughStmts())
  1151. S.Diag(F->getBeginLoc(), diag::err_fallthrough_attr_invalid_placement);
  1152. }
  1153. static bool isInLoop(const ASTContext &Ctx, const ParentMap &PM,
  1154. const Stmt *S) {
  1155. assert(S);
  1156. do {
  1157. switch (S->getStmtClass()) {
  1158. case Stmt::ForStmtClass:
  1159. case Stmt::WhileStmtClass:
  1160. case Stmt::CXXForRangeStmtClass:
  1161. case Stmt::ObjCForCollectionStmtClass:
  1162. return true;
  1163. case Stmt::DoStmtClass: {
  1164. Expr::EvalResult Result;
  1165. if (!cast<DoStmt>(S)->getCond()->EvaluateAsInt(Result, Ctx))
  1166. return true;
  1167. return Result.Val.getInt().getBoolValue();
  1168. }
  1169. default:
  1170. break;
  1171. }
  1172. } while ((S = PM.getParent(S)));
  1173. return false;
  1174. }
  1175. static void diagnoseRepeatedUseOfWeak(Sema &S,
  1176. const sema::FunctionScopeInfo *CurFn,
  1177. const Decl *D,
  1178. const ParentMap &PM) {
  1179. typedef sema::FunctionScopeInfo::WeakObjectProfileTy WeakObjectProfileTy;
  1180. typedef sema::FunctionScopeInfo::WeakObjectUseMap WeakObjectUseMap;
  1181. typedef sema::FunctionScopeInfo::WeakUseVector WeakUseVector;
  1182. typedef std::pair<const Stmt *, WeakObjectUseMap::const_iterator>
  1183. StmtUsesPair;
  1184. ASTContext &Ctx = S.getASTContext();
  1185. const WeakObjectUseMap &WeakMap = CurFn->getWeakObjectUses();
  1186. // Extract all weak objects that are referenced more than once.
  1187. SmallVector<StmtUsesPair, 8> UsesByStmt;
  1188. for (WeakObjectUseMap::const_iterator I = WeakMap.begin(), E = WeakMap.end();
  1189. I != E; ++I) {
  1190. const WeakUseVector &Uses = I->second;
  1191. // Find the first read of the weak object.
  1192. WeakUseVector::const_iterator UI = Uses.begin(), UE = Uses.end();
  1193. for ( ; UI != UE; ++UI) {
  1194. if (UI->isUnsafe())
  1195. break;
  1196. }
  1197. // If there were only writes to this object, don't warn.
  1198. if (UI == UE)
  1199. continue;
  1200. // If there was only one read, followed by any number of writes, and the
  1201. // read is not within a loop, don't warn. Additionally, don't warn in a
  1202. // loop if the base object is a local variable -- local variables are often
  1203. // changed in loops.
  1204. if (UI == Uses.begin()) {
  1205. WeakUseVector::const_iterator UI2 = UI;
  1206. for (++UI2; UI2 != UE; ++UI2)
  1207. if (UI2->isUnsafe())
  1208. break;
  1209. if (UI2 == UE) {
  1210. if (!isInLoop(Ctx, PM, UI->getUseExpr()))
  1211. continue;
  1212. const WeakObjectProfileTy &Profile = I->first;
  1213. if (!Profile.isExactProfile())
  1214. continue;
  1215. const NamedDecl *Base = Profile.getBase();
  1216. if (!Base)
  1217. Base = Profile.getProperty();
  1218. assert(Base && "A profile always has a base or property.");
  1219. if (const VarDecl *BaseVar = dyn_cast<VarDecl>(Base))
  1220. if (BaseVar->hasLocalStorage() && !isa<ParmVarDecl>(Base))
  1221. continue;
  1222. }
  1223. }
  1224. UsesByStmt.push_back(StmtUsesPair(UI->getUseExpr(), I));
  1225. }
  1226. if (UsesByStmt.empty())
  1227. return;
  1228. // Sort by first use so that we emit the warnings in a deterministic order.
  1229. SourceManager &SM = S.getSourceManager();
  1230. llvm::sort(UsesByStmt,
  1231. [&SM](const StmtUsesPair &LHS, const StmtUsesPair &RHS) {
  1232. return SM.isBeforeInTranslationUnit(LHS.first->getBeginLoc(),
  1233. RHS.first->getBeginLoc());
  1234. });
  1235. // Classify the current code body for better warning text.
  1236. // This enum should stay in sync with the cases in
  1237. // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
  1238. // FIXME: Should we use a common classification enum and the same set of
  1239. // possibilities all throughout Sema?
  1240. enum {
  1241. Function,
  1242. Method,
  1243. Block,
  1244. Lambda
  1245. } FunctionKind;
  1246. if (isa<sema::BlockScopeInfo>(CurFn))
  1247. FunctionKind = Block;
  1248. else if (isa<sema::LambdaScopeInfo>(CurFn))
  1249. FunctionKind = Lambda;
  1250. else if (isa<ObjCMethodDecl>(D))
  1251. FunctionKind = Method;
  1252. else
  1253. FunctionKind = Function;
  1254. // Iterate through the sorted problems and emit warnings for each.
  1255. for (const auto &P : UsesByStmt) {
  1256. const Stmt *FirstRead = P.first;
  1257. const WeakObjectProfileTy &Key = P.second->first;
  1258. const WeakUseVector &Uses = P.second->second;
  1259. // For complicated expressions like 'a.b.c' and 'x.b.c', WeakObjectProfileTy
  1260. // may not contain enough information to determine that these are different
  1261. // properties. We can only be 100% sure of a repeated use in certain cases,
  1262. // and we adjust the diagnostic kind accordingly so that the less certain
  1263. // case can be turned off if it is too noisy.
  1264. unsigned DiagKind;
  1265. if (Key.isExactProfile())
  1266. DiagKind = diag::warn_arc_repeated_use_of_weak;
  1267. else
  1268. DiagKind = diag::warn_arc_possible_repeated_use_of_weak;
  1269. // Classify the weak object being accessed for better warning text.
  1270. // This enum should stay in sync with the cases in
  1271. // warn_arc_repeated_use_of_weak and warn_arc_possible_repeated_use_of_weak.
  1272. enum {
  1273. Variable,
  1274. Property,
  1275. ImplicitProperty,
  1276. Ivar
  1277. } ObjectKind;
  1278. const NamedDecl *KeyProp = Key.getProperty();
  1279. if (isa<VarDecl>(KeyProp))
  1280. ObjectKind = Variable;
  1281. else if (isa<ObjCPropertyDecl>(KeyProp))
  1282. ObjectKind = Property;
  1283. else if (isa<ObjCMethodDecl>(KeyProp))
  1284. ObjectKind = ImplicitProperty;
  1285. else if (isa<ObjCIvarDecl>(KeyProp))
  1286. ObjectKind = Ivar;
  1287. else
  1288. llvm_unreachable("Unexpected weak object kind!");
  1289. // Do not warn about IBOutlet weak property receivers being set to null
  1290. // since they are typically only used from the main thread.
  1291. if (const ObjCPropertyDecl *Prop = dyn_cast<ObjCPropertyDecl>(KeyProp))
  1292. if (Prop->hasAttr<IBOutletAttr>())
  1293. continue;
  1294. // Show the first time the object was read.
  1295. S.Diag(FirstRead->getBeginLoc(), DiagKind)
  1296. << int(ObjectKind) << KeyProp << int(FunctionKind)
  1297. << FirstRead->getSourceRange();
  1298. // Print all the other accesses as notes.
  1299. for (const auto &Use : Uses) {
  1300. if (Use.getUseExpr() == FirstRead)
  1301. continue;
  1302. S.Diag(Use.getUseExpr()->getBeginLoc(),
  1303. diag::note_arc_weak_also_accessed_here)
  1304. << Use.getUseExpr()->getSourceRange();
  1305. }
  1306. }
  1307. }
  1308. namespace clang {
  1309. namespace {
  1310. typedef SmallVector<PartialDiagnosticAt, 1> OptionalNotes;
  1311. typedef std::pair<PartialDiagnosticAt, OptionalNotes> DelayedDiag;
  1312. typedef std::list<DelayedDiag> DiagList;
  1313. struct SortDiagBySourceLocation {
  1314. SourceManager &SM;
  1315. SortDiagBySourceLocation(SourceManager &SM) : SM(SM) {}
  1316. bool operator()(const DelayedDiag &left, const DelayedDiag &right) {
  1317. // Although this call will be slow, this is only called when outputting
  1318. // multiple warnings.
  1319. return SM.isBeforeInTranslationUnit(left.first.first, right.first.first);
  1320. }
  1321. };
  1322. } // anonymous namespace
  1323. } // namespace clang
  1324. namespace {
  1325. class UninitValsDiagReporter : public UninitVariablesHandler {
  1326. Sema &S;
  1327. typedef SmallVector<UninitUse, 2> UsesVec;
  1328. typedef llvm::PointerIntPair<UsesVec *, 1, bool> MappedType;
  1329. // Prefer using MapVector to DenseMap, so that iteration order will be
  1330. // the same as insertion order. This is needed to obtain a deterministic
  1331. // order of diagnostics when calling flushDiagnostics().
  1332. typedef llvm::MapVector<const VarDecl *, MappedType> UsesMap;
  1333. UsesMap uses;
  1334. UsesMap constRefUses;
  1335. public:
  1336. UninitValsDiagReporter(Sema &S) : S(S) {}
  1337. ~UninitValsDiagReporter() override { flushDiagnostics(); }
  1338. MappedType &getUses(UsesMap &um, const VarDecl *vd) {
  1339. MappedType &V = um[vd];
  1340. if (!V.getPointer())
  1341. V.setPointer(new UsesVec());
  1342. return V;
  1343. }
  1344. void handleUseOfUninitVariable(const VarDecl *vd,
  1345. const UninitUse &use) override {
  1346. getUses(uses, vd).getPointer()->push_back(use);
  1347. }
  1348. void handleConstRefUseOfUninitVariable(const VarDecl *vd,
  1349. const UninitUse &use) override {
  1350. getUses(constRefUses, vd).getPointer()->push_back(use);
  1351. }
  1352. void handleSelfInit(const VarDecl *vd) override {
  1353. getUses(uses, vd).setInt(true);
  1354. getUses(constRefUses, vd).setInt(true);
  1355. }
  1356. void flushDiagnostics() {
  1357. for (const auto &P : uses) {
  1358. const VarDecl *vd = P.first;
  1359. const MappedType &V = P.second;
  1360. UsesVec *vec = V.getPointer();
  1361. bool hasSelfInit = V.getInt();
  1362. // Specially handle the case where we have uses of an uninitialized
  1363. // variable, but the root cause is an idiomatic self-init. We want
  1364. // to report the diagnostic at the self-init since that is the root cause.
  1365. if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec))
  1366. DiagnoseUninitializedUse(S, vd,
  1367. UninitUse(vd->getInit()->IgnoreParenCasts(),
  1368. /* isAlwaysUninit */ true),
  1369. /* alwaysReportSelfInit */ true);
  1370. else {
  1371. // Sort the uses by their SourceLocations. While not strictly
  1372. // guaranteed to produce them in line/column order, this will provide
  1373. // a stable ordering.
  1374. llvm::sort(*vec, [](const UninitUse &a, const UninitUse &b) {
  1375. // Prefer a more confident report over a less confident one.
  1376. if (a.getKind() != b.getKind())
  1377. return a.getKind() > b.getKind();
  1378. return a.getUser()->getBeginLoc() < b.getUser()->getBeginLoc();
  1379. });
  1380. for (const auto &U : *vec) {
  1381. // If we have self-init, downgrade all uses to 'may be uninitialized'.
  1382. UninitUse Use = hasSelfInit ? UninitUse(U.getUser(), false) : U;
  1383. if (DiagnoseUninitializedUse(S, vd, Use))
  1384. // Skip further diagnostics for this variable. We try to warn only
  1385. // on the first point at which a variable is used uninitialized.
  1386. break;
  1387. }
  1388. }
  1389. // Release the uses vector.
  1390. delete vec;
  1391. }
  1392. uses.clear();
  1393. // Flush all const reference uses diags.
  1394. for (const auto &P : constRefUses) {
  1395. const VarDecl *vd = P.first;
  1396. const MappedType &V = P.second;
  1397. UsesVec *vec = V.getPointer();
  1398. bool hasSelfInit = V.getInt();
  1399. if (!vec->empty() && hasSelfInit && hasAlwaysUninitializedUse(vec))
  1400. DiagnoseUninitializedUse(S, vd,
  1401. UninitUse(vd->getInit()->IgnoreParenCasts(),
  1402. /* isAlwaysUninit */ true),
  1403. /* alwaysReportSelfInit */ true);
  1404. else {
  1405. for (const auto &U : *vec) {
  1406. if (DiagnoseUninitializedConstRefUse(S, vd, U))
  1407. break;
  1408. }
  1409. }
  1410. // Release the uses vector.
  1411. delete vec;
  1412. }
  1413. constRefUses.clear();
  1414. }
  1415. private:
  1416. static bool hasAlwaysUninitializedUse(const UsesVec* vec) {
  1417. return llvm::any_of(*vec, [](const UninitUse &U) {
  1418. return U.getKind() == UninitUse::Always ||
  1419. U.getKind() == UninitUse::AfterCall ||
  1420. U.getKind() == UninitUse::AfterDecl;
  1421. });
  1422. }
  1423. };
  1424. /// Inter-procedural data for the called-once checker.
  1425. class CalledOnceInterProceduralData {
  1426. public:
  1427. // Add the delayed warning for the given block.
  1428. void addDelayedWarning(const BlockDecl *Block,
  1429. PartialDiagnosticAt &&Warning) {
  1430. DelayedBlockWarnings[Block].emplace_back(std::move(Warning));
  1431. }
  1432. // Report all of the warnings we've gathered for the given block.
  1433. void flushWarnings(const BlockDecl *Block, Sema &S) {
  1434. for (const PartialDiagnosticAt &Delayed : DelayedBlockWarnings[Block])
  1435. S.Diag(Delayed.first, Delayed.second);
  1436. discardWarnings(Block);
  1437. }
  1438. // Discard all of the warnings we've gathered for the given block.
  1439. void discardWarnings(const BlockDecl *Block) {
  1440. DelayedBlockWarnings.erase(Block);
  1441. }
  1442. private:
  1443. using DelayedDiagnostics = SmallVector<PartialDiagnosticAt, 2>;
  1444. llvm::DenseMap<const BlockDecl *, DelayedDiagnostics> DelayedBlockWarnings;
  1445. };
  1446. class CalledOnceCheckReporter : public CalledOnceCheckHandler {
  1447. public:
  1448. CalledOnceCheckReporter(Sema &S, CalledOnceInterProceduralData &Data)
  1449. : S(S), Data(Data) {}
  1450. void handleDoubleCall(const ParmVarDecl *Parameter, const Expr *Call,
  1451. const Expr *PrevCall, bool IsCompletionHandler,
  1452. bool Poised) override {
  1453. auto DiagToReport = IsCompletionHandler
  1454. ? diag::warn_completion_handler_called_twice
  1455. : diag::warn_called_once_gets_called_twice;
  1456. S.Diag(Call->getBeginLoc(), DiagToReport) << Parameter;
  1457. S.Diag(PrevCall->getBeginLoc(), diag::note_called_once_gets_called_twice)
  1458. << Poised;
  1459. }
  1460. void handleNeverCalled(const ParmVarDecl *Parameter,
  1461. bool IsCompletionHandler) override {
  1462. auto DiagToReport = IsCompletionHandler
  1463. ? diag::warn_completion_handler_never_called
  1464. : diag::warn_called_once_never_called;
  1465. S.Diag(Parameter->getBeginLoc(), DiagToReport)
  1466. << Parameter << /* Captured */ false;
  1467. }
  1468. void handleNeverCalled(const ParmVarDecl *Parameter, const Decl *Function,
  1469. const Stmt *Where, NeverCalledReason Reason,
  1470. bool IsCalledDirectly,
  1471. bool IsCompletionHandler) override {
  1472. auto DiagToReport = IsCompletionHandler
  1473. ? diag::warn_completion_handler_never_called_when
  1474. : diag::warn_called_once_never_called_when;
  1475. PartialDiagnosticAt Warning(Where->getBeginLoc(), S.PDiag(DiagToReport)
  1476. << Parameter
  1477. << IsCalledDirectly
  1478. << (unsigned)Reason);
  1479. if (const auto *Block = dyn_cast<BlockDecl>(Function)) {
  1480. // We shouldn't report these warnings on blocks immediately
  1481. Data.addDelayedWarning(Block, std::move(Warning));
  1482. } else {
  1483. S.Diag(Warning.first, Warning.second);
  1484. }
  1485. }
  1486. void handleCapturedNeverCalled(const ParmVarDecl *Parameter,
  1487. const Decl *Where,
  1488. bool IsCompletionHandler) override {
  1489. auto DiagToReport = IsCompletionHandler
  1490. ? diag::warn_completion_handler_never_called
  1491. : diag::warn_called_once_never_called;
  1492. S.Diag(Where->getBeginLoc(), DiagToReport)
  1493. << Parameter << /* Captured */ true;
  1494. }
  1495. void
  1496. handleBlockThatIsGuaranteedToBeCalledOnce(const BlockDecl *Block) override {
  1497. Data.flushWarnings(Block, S);
  1498. }
  1499. void handleBlockWithNoGuarantees(const BlockDecl *Block) override {
  1500. Data.discardWarnings(Block);
  1501. }
  1502. private:
  1503. Sema &S;
  1504. CalledOnceInterProceduralData &Data;
  1505. };
  1506. constexpr unsigned CalledOnceWarnings[] = {
  1507. diag::warn_called_once_never_called,
  1508. diag::warn_called_once_never_called_when,
  1509. diag::warn_called_once_gets_called_twice};
  1510. constexpr unsigned CompletionHandlerWarnings[]{
  1511. diag::warn_completion_handler_never_called,
  1512. diag::warn_completion_handler_never_called_when,
  1513. diag::warn_completion_handler_called_twice};
  1514. bool shouldAnalyzeCalledOnceImpl(llvm::ArrayRef<unsigned> DiagIDs,
  1515. const DiagnosticsEngine &Diags,
  1516. SourceLocation At) {
  1517. return llvm::any_of(DiagIDs, [&Diags, At](unsigned DiagID) {
  1518. return !Diags.isIgnored(DiagID, At);
  1519. });
  1520. }
  1521. bool shouldAnalyzeCalledOnceConventions(const DiagnosticsEngine &Diags,
  1522. SourceLocation At) {
  1523. return shouldAnalyzeCalledOnceImpl(CompletionHandlerWarnings, Diags, At);
  1524. }
  1525. bool shouldAnalyzeCalledOnceParameters(const DiagnosticsEngine &Diags,
  1526. SourceLocation At) {
  1527. return shouldAnalyzeCalledOnceImpl(CalledOnceWarnings, Diags, At) ||
  1528. shouldAnalyzeCalledOnceConventions(Diags, At);
  1529. }
  1530. } // anonymous namespace
  1531. //===----------------------------------------------------------------------===//
  1532. // -Wthread-safety
  1533. //===----------------------------------------------------------------------===//
  1534. namespace clang {
  1535. namespace threadSafety {
  1536. namespace {
  1537. class ThreadSafetyReporter : public clang::threadSafety::ThreadSafetyHandler {
  1538. Sema &S;
  1539. DiagList Warnings;
  1540. SourceLocation FunLocation, FunEndLocation;
  1541. const FunctionDecl *CurrentFunction;
  1542. bool Verbose;
  1543. OptionalNotes getNotes() const {
  1544. if (Verbose && CurrentFunction) {
  1545. PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
  1546. S.PDiag(diag::note_thread_warning_in_fun)
  1547. << CurrentFunction);
  1548. return OptionalNotes(1, FNote);
  1549. }
  1550. return OptionalNotes();
  1551. }
  1552. OptionalNotes getNotes(const PartialDiagnosticAt &Note) const {
  1553. OptionalNotes ONS(1, Note);
  1554. if (Verbose && CurrentFunction) {
  1555. PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
  1556. S.PDiag(diag::note_thread_warning_in_fun)
  1557. << CurrentFunction);
  1558. ONS.push_back(std::move(FNote));
  1559. }
  1560. return ONS;
  1561. }
  1562. OptionalNotes getNotes(const PartialDiagnosticAt &Note1,
  1563. const PartialDiagnosticAt &Note2) const {
  1564. OptionalNotes ONS;
  1565. ONS.push_back(Note1);
  1566. ONS.push_back(Note2);
  1567. if (Verbose && CurrentFunction) {
  1568. PartialDiagnosticAt FNote(CurrentFunction->getBody()->getBeginLoc(),
  1569. S.PDiag(diag::note_thread_warning_in_fun)
  1570. << CurrentFunction);
  1571. ONS.push_back(std::move(FNote));
  1572. }
  1573. return ONS;
  1574. }
  1575. OptionalNotes makeLockedHereNote(SourceLocation LocLocked, StringRef Kind) {
  1576. return LocLocked.isValid()
  1577. ? getNotes(PartialDiagnosticAt(
  1578. LocLocked, S.PDiag(diag::note_locked_here) << Kind))
  1579. : getNotes();
  1580. }
  1581. OptionalNotes makeUnlockedHereNote(SourceLocation LocUnlocked,
  1582. StringRef Kind) {
  1583. return LocUnlocked.isValid()
  1584. ? getNotes(PartialDiagnosticAt(
  1585. LocUnlocked, S.PDiag(diag::note_unlocked_here) << Kind))
  1586. : getNotes();
  1587. }
  1588. public:
  1589. ThreadSafetyReporter(Sema &S, SourceLocation FL, SourceLocation FEL)
  1590. : S(S), FunLocation(FL), FunEndLocation(FEL),
  1591. CurrentFunction(nullptr), Verbose(false) {}
  1592. void setVerbose(bool b) { Verbose = b; }
  1593. /// Emit all buffered diagnostics in order of sourcelocation.
  1594. /// We need to output diagnostics produced while iterating through
  1595. /// the lockset in deterministic order, so this function orders diagnostics
  1596. /// and outputs them.
  1597. void emitDiagnostics() {
  1598. Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
  1599. for (const auto &Diag : Warnings) {
  1600. S.Diag(Diag.first.first, Diag.first.second);
  1601. for (const auto &Note : Diag.second)
  1602. S.Diag(Note.first, Note.second);
  1603. }
  1604. }
  1605. void handleInvalidLockExp(SourceLocation Loc) override {
  1606. PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_cannot_resolve_lock)
  1607. << Loc);
  1608. Warnings.emplace_back(std::move(Warning), getNotes());
  1609. }
  1610. void handleUnmatchedUnlock(StringRef Kind, Name LockName, SourceLocation Loc,
  1611. SourceLocation LocPreviousUnlock) override {
  1612. if (Loc.isInvalid())
  1613. Loc = FunLocation;
  1614. PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_unlock_but_no_lock)
  1615. << Kind << LockName);
  1616. Warnings.emplace_back(std::move(Warning),
  1617. makeUnlockedHereNote(LocPreviousUnlock, Kind));
  1618. }
  1619. void handleIncorrectUnlockKind(StringRef Kind, Name LockName,
  1620. LockKind Expected, LockKind Received,
  1621. SourceLocation LocLocked,
  1622. SourceLocation LocUnlock) override {
  1623. if (LocUnlock.isInvalid())
  1624. LocUnlock = FunLocation;
  1625. PartialDiagnosticAt Warning(
  1626. LocUnlock, S.PDiag(diag::warn_unlock_kind_mismatch)
  1627. << Kind << LockName << Received << Expected);
  1628. Warnings.emplace_back(std::move(Warning),
  1629. makeLockedHereNote(LocLocked, Kind));
  1630. }
  1631. void handleDoubleLock(StringRef Kind, Name LockName, SourceLocation LocLocked,
  1632. SourceLocation LocDoubleLock) override {
  1633. if (LocDoubleLock.isInvalid())
  1634. LocDoubleLock = FunLocation;
  1635. PartialDiagnosticAt Warning(LocDoubleLock, S.PDiag(diag::warn_double_lock)
  1636. << Kind << LockName);
  1637. Warnings.emplace_back(std::move(Warning),
  1638. makeLockedHereNote(LocLocked, Kind));
  1639. }
  1640. void handleMutexHeldEndOfScope(StringRef Kind, Name LockName,
  1641. SourceLocation LocLocked,
  1642. SourceLocation LocEndOfScope,
  1643. LockErrorKind LEK) override {
  1644. unsigned DiagID = 0;
  1645. switch (LEK) {
  1646. case LEK_LockedSomePredecessors:
  1647. DiagID = diag::warn_lock_some_predecessors;
  1648. break;
  1649. case LEK_LockedSomeLoopIterations:
  1650. DiagID = diag::warn_expecting_lock_held_on_loop;
  1651. break;
  1652. case LEK_LockedAtEndOfFunction:
  1653. DiagID = diag::warn_no_unlock;
  1654. break;
  1655. case LEK_NotLockedAtEndOfFunction:
  1656. DiagID = diag::warn_expecting_locked;
  1657. break;
  1658. }
  1659. if (LocEndOfScope.isInvalid())
  1660. LocEndOfScope = FunEndLocation;
  1661. PartialDiagnosticAt Warning(LocEndOfScope, S.PDiag(DiagID) << Kind
  1662. << LockName);
  1663. Warnings.emplace_back(std::move(Warning),
  1664. makeLockedHereNote(LocLocked, Kind));
  1665. }
  1666. void handleExclusiveAndShared(StringRef Kind, Name LockName,
  1667. SourceLocation Loc1,
  1668. SourceLocation Loc2) override {
  1669. PartialDiagnosticAt Warning(Loc1,
  1670. S.PDiag(diag::warn_lock_exclusive_and_shared)
  1671. << Kind << LockName);
  1672. PartialDiagnosticAt Note(Loc2, S.PDiag(diag::note_lock_exclusive_and_shared)
  1673. << Kind << LockName);
  1674. Warnings.emplace_back(std::move(Warning), getNotes(Note));
  1675. }
  1676. void handleNoMutexHeld(const NamedDecl *D, ProtectedOperationKind POK,
  1677. AccessKind AK, SourceLocation Loc) override {
  1678. assert((POK == POK_VarAccess || POK == POK_VarDereference) &&
  1679. "Only works for variables");
  1680. unsigned DiagID = POK == POK_VarAccess?
  1681. diag::warn_variable_requires_any_lock:
  1682. diag::warn_var_deref_requires_any_lock;
  1683. PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID)
  1684. << D << getLockKindFromAccessKind(AK));
  1685. Warnings.emplace_back(std::move(Warning), getNotes());
  1686. }
  1687. void handleMutexNotHeld(StringRef Kind, const NamedDecl *D,
  1688. ProtectedOperationKind POK, Name LockName,
  1689. LockKind LK, SourceLocation Loc,
  1690. Name *PossibleMatch) override {
  1691. unsigned DiagID = 0;
  1692. if (PossibleMatch) {
  1693. switch (POK) {
  1694. case POK_VarAccess:
  1695. DiagID = diag::warn_variable_requires_lock_precise;
  1696. break;
  1697. case POK_VarDereference:
  1698. DiagID = diag::warn_var_deref_requires_lock_precise;
  1699. break;
  1700. case POK_FunctionCall:
  1701. DiagID = diag::warn_fun_requires_lock_precise;
  1702. break;
  1703. case POK_PassByRef:
  1704. DiagID = diag::warn_guarded_pass_by_reference;
  1705. break;
  1706. case POK_PtPassByRef:
  1707. DiagID = diag::warn_pt_guarded_pass_by_reference;
  1708. break;
  1709. }
  1710. PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
  1711. << D
  1712. << LockName << LK);
  1713. PartialDiagnosticAt Note(Loc, S.PDiag(diag::note_found_mutex_near_match)
  1714. << *PossibleMatch);
  1715. if (Verbose && POK == POK_VarAccess) {
  1716. PartialDiagnosticAt VNote(D->getLocation(),
  1717. S.PDiag(diag::note_guarded_by_declared_here)
  1718. << D->getDeclName());
  1719. Warnings.emplace_back(std::move(Warning), getNotes(Note, VNote));
  1720. } else
  1721. Warnings.emplace_back(std::move(Warning), getNotes(Note));
  1722. } else {
  1723. switch (POK) {
  1724. case POK_VarAccess:
  1725. DiagID = diag::warn_variable_requires_lock;
  1726. break;
  1727. case POK_VarDereference:
  1728. DiagID = diag::warn_var_deref_requires_lock;
  1729. break;
  1730. case POK_FunctionCall:
  1731. DiagID = diag::warn_fun_requires_lock;
  1732. break;
  1733. case POK_PassByRef:
  1734. DiagID = diag::warn_guarded_pass_by_reference;
  1735. break;
  1736. case POK_PtPassByRef:
  1737. DiagID = diag::warn_pt_guarded_pass_by_reference;
  1738. break;
  1739. }
  1740. PartialDiagnosticAt Warning(Loc, S.PDiag(DiagID) << Kind
  1741. << D
  1742. << LockName << LK);
  1743. if (Verbose && POK == POK_VarAccess) {
  1744. PartialDiagnosticAt Note(D->getLocation(),
  1745. S.PDiag(diag::note_guarded_by_declared_here));
  1746. Warnings.emplace_back(std::move(Warning), getNotes(Note));
  1747. } else
  1748. Warnings.emplace_back(std::move(Warning), getNotes());
  1749. }
  1750. }
  1751. void handleNegativeNotHeld(StringRef Kind, Name LockName, Name Neg,
  1752. SourceLocation Loc) override {
  1753. PartialDiagnosticAt Warning(Loc,
  1754. S.PDiag(diag::warn_acquire_requires_negative_cap)
  1755. << Kind << LockName << Neg);
  1756. Warnings.emplace_back(std::move(Warning), getNotes());
  1757. }
  1758. void handleNegativeNotHeld(const NamedDecl *D, Name LockName,
  1759. SourceLocation Loc) override {
  1760. PartialDiagnosticAt Warning(
  1761. Loc, S.PDiag(diag::warn_fun_requires_negative_cap) << D << LockName);
  1762. Warnings.emplace_back(std::move(Warning), getNotes());
  1763. }
  1764. void handleFunExcludesLock(StringRef Kind, Name FunName, Name LockName,
  1765. SourceLocation Loc) override {
  1766. PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_fun_excludes_mutex)
  1767. << Kind << FunName << LockName);
  1768. Warnings.emplace_back(std::move(Warning), getNotes());
  1769. }
  1770. void handleLockAcquiredBefore(StringRef Kind, Name L1Name, Name L2Name,
  1771. SourceLocation Loc) override {
  1772. PartialDiagnosticAt Warning(Loc,
  1773. S.PDiag(diag::warn_acquired_before) << Kind << L1Name << L2Name);
  1774. Warnings.emplace_back(std::move(Warning), getNotes());
  1775. }
  1776. void handleBeforeAfterCycle(Name L1Name, SourceLocation Loc) override {
  1777. PartialDiagnosticAt Warning(Loc,
  1778. S.PDiag(diag::warn_acquired_before_after_cycle) << L1Name);
  1779. Warnings.emplace_back(std::move(Warning), getNotes());
  1780. }
  1781. void enterFunction(const FunctionDecl* FD) override {
  1782. CurrentFunction = FD;
  1783. }
  1784. void leaveFunction(const FunctionDecl* FD) override {
  1785. CurrentFunction = nullptr;
  1786. }
  1787. };
  1788. } // anonymous namespace
  1789. } // namespace threadSafety
  1790. } // namespace clang
  1791. //===----------------------------------------------------------------------===//
  1792. // -Wconsumed
  1793. //===----------------------------------------------------------------------===//
  1794. namespace clang {
  1795. namespace consumed {
  1796. namespace {
  1797. class ConsumedWarningsHandler : public ConsumedWarningsHandlerBase {
  1798. Sema &S;
  1799. DiagList Warnings;
  1800. public:
  1801. ConsumedWarningsHandler(Sema &S) : S(S) {}
  1802. void emitDiagnostics() override {
  1803. Warnings.sort(SortDiagBySourceLocation(S.getSourceManager()));
  1804. for (const auto &Diag : Warnings) {
  1805. S.Diag(Diag.first.first, Diag.first.second);
  1806. for (const auto &Note : Diag.second)
  1807. S.Diag(Note.first, Note.second);
  1808. }
  1809. }
  1810. void warnLoopStateMismatch(SourceLocation Loc,
  1811. StringRef VariableName) override {
  1812. PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_loop_state_mismatch) <<
  1813. VariableName);
  1814. Warnings.emplace_back(std::move(Warning), OptionalNotes());
  1815. }
  1816. void warnParamReturnTypestateMismatch(SourceLocation Loc,
  1817. StringRef VariableName,
  1818. StringRef ExpectedState,
  1819. StringRef ObservedState) override {
  1820. PartialDiagnosticAt Warning(Loc, S.PDiag(
  1821. diag::warn_param_return_typestate_mismatch) << VariableName <<
  1822. ExpectedState << ObservedState);
  1823. Warnings.emplace_back(std::move(Warning), OptionalNotes());
  1824. }
  1825. void warnParamTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
  1826. StringRef ObservedState) override {
  1827. PartialDiagnosticAt Warning(Loc, S.PDiag(
  1828. diag::warn_param_typestate_mismatch) << ExpectedState << ObservedState);
  1829. Warnings.emplace_back(std::move(Warning), OptionalNotes());
  1830. }
  1831. void warnReturnTypestateForUnconsumableType(SourceLocation Loc,
  1832. StringRef TypeName) override {
  1833. PartialDiagnosticAt Warning(Loc, S.PDiag(
  1834. diag::warn_return_typestate_for_unconsumable_type) << TypeName);
  1835. Warnings.emplace_back(std::move(Warning), OptionalNotes());
  1836. }
  1837. void warnReturnTypestateMismatch(SourceLocation Loc, StringRef ExpectedState,
  1838. StringRef ObservedState) override {
  1839. PartialDiagnosticAt Warning(Loc, S.PDiag(
  1840. diag::warn_return_typestate_mismatch) << ExpectedState << ObservedState);
  1841. Warnings.emplace_back(std::move(Warning), OptionalNotes());
  1842. }
  1843. void warnUseOfTempInInvalidState(StringRef MethodName, StringRef State,
  1844. SourceLocation Loc) override {
  1845. PartialDiagnosticAt Warning(Loc, S.PDiag(
  1846. diag::warn_use_of_temp_in_invalid_state) << MethodName << State);
  1847. Warnings.emplace_back(std::move(Warning), OptionalNotes());
  1848. }
  1849. void warnUseInInvalidState(StringRef MethodName, StringRef VariableName,
  1850. StringRef State, SourceLocation Loc) override {
  1851. PartialDiagnosticAt Warning(Loc, S.PDiag(diag::warn_use_in_invalid_state) <<
  1852. MethodName << VariableName << State);
  1853. Warnings.emplace_back(std::move(Warning), OptionalNotes());
  1854. }
  1855. };
  1856. } // anonymous namespace
  1857. } // namespace consumed
  1858. } // namespace clang
  1859. //===----------------------------------------------------------------------===//
  1860. // Unsafe buffer usage analysis.
  1861. //===----------------------------------------------------------------------===//
  1862. namespace {
  1863. class UnsafeBufferUsageReporter : public UnsafeBufferUsageHandler {
  1864. Sema &S;
  1865. public:
  1866. UnsafeBufferUsageReporter(Sema &S) : S(S) {}
  1867. void handleUnsafeOperation(const Stmt *Operation,
  1868. bool IsRelatedToDecl) override {
  1869. SourceLocation Loc;
  1870. SourceRange Range;
  1871. unsigned MsgParam = 0;
  1872. if (const auto *ASE = dyn_cast<ArraySubscriptExpr>(Operation)) {
  1873. Loc = ASE->getBase()->getExprLoc();
  1874. Range = ASE->getBase()->getSourceRange();
  1875. MsgParam = 2;
  1876. } else if (const auto *BO = dyn_cast<BinaryOperator>(Operation)) {
  1877. BinaryOperator::Opcode Op = BO->getOpcode();
  1878. if (Op == BO_Add || Op == BO_AddAssign || Op == BO_Sub ||
  1879. Op == BO_SubAssign) {
  1880. if (BO->getRHS()->getType()->isIntegerType()) {
  1881. Loc = BO->getLHS()->getExprLoc();
  1882. Range = BO->getLHS()->getSourceRange();
  1883. } else {
  1884. Loc = BO->getRHS()->getExprLoc();
  1885. Range = BO->getRHS()->getSourceRange();
  1886. }
  1887. MsgParam = 1;
  1888. }
  1889. } else if (const auto *UO = dyn_cast<UnaryOperator>(Operation)) {
  1890. UnaryOperator::Opcode Op = UO->getOpcode();
  1891. if (Op == UO_PreInc || Op == UO_PreDec || Op == UO_PostInc ||
  1892. Op == UO_PostDec) {
  1893. Loc = UO->getSubExpr()->getExprLoc();
  1894. Range = UO->getSubExpr()->getSourceRange();
  1895. MsgParam = 1;
  1896. }
  1897. } else {
  1898. Loc = Operation->getBeginLoc();
  1899. Range = Operation->getSourceRange();
  1900. }
  1901. if (IsRelatedToDecl)
  1902. S.Diag(Loc, diag::note_unsafe_buffer_operation) << MsgParam << Range;
  1903. else
  1904. S.Diag(Loc, diag::warn_unsafe_buffer_operation) << MsgParam << Range;
  1905. }
  1906. // FIXME: rename to handleUnsafeVariable
  1907. void handleFixableVariable(const VarDecl *Variable,
  1908. FixItList &&Fixes) override {
  1909. const auto &D =
  1910. S.Diag(Variable->getLocation(), diag::warn_unsafe_buffer_variable);
  1911. D << Variable;
  1912. D << (Variable->getType()->isPointerType() ? 0 : 1);
  1913. D << Variable->getSourceRange();
  1914. for (const auto &F : Fixes)
  1915. D << F;
  1916. }
  1917. };
  1918. } // namespace
  1919. //===----------------------------------------------------------------------===//
  1920. // AnalysisBasedWarnings - Worker object used by Sema to execute analysis-based
  1921. // warnings on a function, method, or block.
  1922. //===----------------------------------------------------------------------===//
  1923. sema::AnalysisBasedWarnings::Policy::Policy() {
  1924. enableCheckFallThrough = 1;
  1925. enableCheckUnreachable = 0;
  1926. enableThreadSafetyAnalysis = 0;
  1927. enableConsumedAnalysis = 0;
  1928. }
  1929. /// InterProceduralData aims to be a storage of whatever data should be passed
  1930. /// between analyses of different functions.
  1931. ///
  1932. /// At the moment, its primary goal is to make the information gathered during
  1933. /// the analysis of the blocks available during the analysis of the enclosing
  1934. /// function. This is important due to the fact that blocks are analyzed before
  1935. /// the enclosed function is even parsed fully, so it is not viable to access
  1936. /// anything in the outer scope while analyzing the block. On the other hand,
  1937. /// re-building CFG for blocks and re-analyzing them when we do have all the
  1938. /// information (i.e. during the analysis of the enclosing function) seems to be
  1939. /// ill-designed.
  1940. class sema::AnalysisBasedWarnings::InterProceduralData {
  1941. public:
  1942. // It is important to analyze blocks within functions because it's a very
  1943. // common pattern to capture completion handler parameters by blocks.
  1944. CalledOnceInterProceduralData CalledOnceData;
  1945. };
  1946. static unsigned isEnabled(DiagnosticsEngine &D, unsigned diag) {
  1947. return (unsigned)!D.isIgnored(diag, SourceLocation());
  1948. }
  1949. sema::AnalysisBasedWarnings::AnalysisBasedWarnings(Sema &s)
  1950. : S(s), IPData(std::make_unique<InterProceduralData>()),
  1951. NumFunctionsAnalyzed(0), NumFunctionsWithBadCFGs(0), NumCFGBlocks(0),
  1952. MaxCFGBlocksPerFunction(0), NumUninitAnalysisFunctions(0),
  1953. NumUninitAnalysisVariables(0), MaxUninitAnalysisVariablesPerFunction(0),
  1954. NumUninitAnalysisBlockVisits(0),
  1955. MaxUninitAnalysisBlockVisitsPerFunction(0) {
  1956. using namespace diag;
  1957. DiagnosticsEngine &D = S.getDiagnostics();
  1958. DefaultPolicy.enableCheckUnreachable =
  1959. isEnabled(D, warn_unreachable) || isEnabled(D, warn_unreachable_break) ||
  1960. isEnabled(D, warn_unreachable_return) ||
  1961. isEnabled(D, warn_unreachable_loop_increment);
  1962. DefaultPolicy.enableThreadSafetyAnalysis = isEnabled(D, warn_double_lock);
  1963. DefaultPolicy.enableConsumedAnalysis =
  1964. isEnabled(D, warn_use_in_invalid_state);
  1965. }
  1966. // We need this here for unique_ptr with forward declared class.
  1967. sema::AnalysisBasedWarnings::~AnalysisBasedWarnings() = default;
  1968. static void flushDiagnostics(Sema &S, const sema::FunctionScopeInfo *fscope) {
  1969. for (const auto &D : fscope->PossiblyUnreachableDiags)
  1970. S.Diag(D.Loc, D.PD);
  1971. }
  1972. void clang::sema::AnalysisBasedWarnings::IssueWarnings(
  1973. sema::AnalysisBasedWarnings::Policy P, sema::FunctionScopeInfo *fscope,
  1974. const Decl *D, QualType BlockType) {
  1975. // We avoid doing analysis-based warnings when there are errors for
  1976. // two reasons:
  1977. // (1) The CFGs often can't be constructed (if the body is invalid), so
  1978. // don't bother trying.
  1979. // (2) The code already has problems; running the analysis just takes more
  1980. // time.
  1981. DiagnosticsEngine &Diags = S.getDiagnostics();
  1982. // Do not do any analysis if we are going to just ignore them.
  1983. if (Diags.getIgnoreAllWarnings() ||
  1984. (Diags.getSuppressSystemWarnings() &&
  1985. S.SourceMgr.isInSystemHeader(D->getLocation())))
  1986. return;
  1987. // For code in dependent contexts, we'll do this at instantiation time.
  1988. if (cast<DeclContext>(D)->isDependentContext())
  1989. return;
  1990. if (S.hasUncompilableErrorOccurred()) {
  1991. // Flush out any possibly unreachable diagnostics.
  1992. flushDiagnostics(S, fscope);
  1993. return;
  1994. }
  1995. const Stmt *Body = D->getBody();
  1996. assert(Body);
  1997. // Construct the analysis context with the specified CFG build options.
  1998. AnalysisDeclContext AC(/* AnalysisDeclContextManager */ nullptr, D);
  1999. // Don't generate EH edges for CallExprs as we'd like to avoid the n^2
  2000. // explosion for destructors that can result and the compile time hit.
  2001. AC.getCFGBuildOptions().PruneTriviallyFalseEdges = true;
  2002. AC.getCFGBuildOptions().AddEHEdges = false;
  2003. AC.getCFGBuildOptions().AddInitializers = true;
  2004. AC.getCFGBuildOptions().AddImplicitDtors = true;
  2005. AC.getCFGBuildOptions().AddTemporaryDtors = true;
  2006. AC.getCFGBuildOptions().AddCXXNewAllocator = false;
  2007. AC.getCFGBuildOptions().AddCXXDefaultInitExprInCtors = true;
  2008. // Force that certain expressions appear as CFGElements in the CFG. This
  2009. // is used to speed up various analyses.
  2010. // FIXME: This isn't the right factoring. This is here for initial
  2011. // prototyping, but we need a way for analyses to say what expressions they
  2012. // expect to always be CFGElements and then fill in the BuildOptions
  2013. // appropriately. This is essentially a layering violation.
  2014. if (P.enableCheckUnreachable || P.enableThreadSafetyAnalysis ||
  2015. P.enableConsumedAnalysis) {
  2016. // Unreachable code analysis and thread safety require a linearized CFG.
  2017. AC.getCFGBuildOptions().setAllAlwaysAdd();
  2018. }
  2019. else {
  2020. AC.getCFGBuildOptions()
  2021. .setAlwaysAdd(Stmt::BinaryOperatorClass)
  2022. .setAlwaysAdd(Stmt::CompoundAssignOperatorClass)
  2023. .setAlwaysAdd(Stmt::BlockExprClass)
  2024. .setAlwaysAdd(Stmt::CStyleCastExprClass)
  2025. .setAlwaysAdd(Stmt::DeclRefExprClass)
  2026. .setAlwaysAdd(Stmt::ImplicitCastExprClass)
  2027. .setAlwaysAdd(Stmt::UnaryOperatorClass);
  2028. }
  2029. // Install the logical handler.
  2030. std::optional<LogicalErrorHandler> LEH;
  2031. if (LogicalErrorHandler::hasActiveDiagnostics(Diags, D->getBeginLoc())) {
  2032. LEH.emplace(S);
  2033. AC.getCFGBuildOptions().Observer = &*LEH;
  2034. }
  2035. // Emit delayed diagnostics.
  2036. if (!fscope->PossiblyUnreachableDiags.empty()) {
  2037. bool analyzed = false;
  2038. // Register the expressions with the CFGBuilder.
  2039. for (const auto &D : fscope->PossiblyUnreachableDiags) {
  2040. for (const Stmt *S : D.Stmts)
  2041. AC.registerForcedBlockExpression(S);
  2042. }
  2043. if (AC.getCFG()) {
  2044. analyzed = true;
  2045. for (const auto &D : fscope->PossiblyUnreachableDiags) {
  2046. bool AllReachable = true;
  2047. for (const Stmt *S : D.Stmts) {
  2048. const CFGBlock *block = AC.getBlockForRegisteredExpression(S);
  2049. CFGReverseBlockReachabilityAnalysis *cra =
  2050. AC.getCFGReachablityAnalysis();
  2051. // FIXME: We should be able to assert that block is non-null, but
  2052. // the CFG analysis can skip potentially-evaluated expressions in
  2053. // edge cases; see test/Sema/vla-2.c.
  2054. if (block && cra) {
  2055. // Can this block be reached from the entrance?
  2056. if (!cra->isReachable(&AC.getCFG()->getEntry(), block)) {
  2057. AllReachable = false;
  2058. break;
  2059. }
  2060. }
  2061. // If we cannot map to a basic block, assume the statement is
  2062. // reachable.
  2063. }
  2064. if (AllReachable)
  2065. S.Diag(D.Loc, D.PD);
  2066. }
  2067. }
  2068. if (!analyzed)
  2069. flushDiagnostics(S, fscope);
  2070. }
  2071. // Warning: check missing 'return'
  2072. if (P.enableCheckFallThrough) {
  2073. const CheckFallThroughDiagnostics &CD =
  2074. (isa<BlockDecl>(D)
  2075. ? CheckFallThroughDiagnostics::MakeForBlock()
  2076. : (isa<CXXMethodDecl>(D) &&
  2077. cast<CXXMethodDecl>(D)->getOverloadedOperator() == OO_Call &&
  2078. cast<CXXMethodDecl>(D)->getParent()->isLambda())
  2079. ? CheckFallThroughDiagnostics::MakeForLambda()
  2080. : (fscope->isCoroutine()
  2081. ? CheckFallThroughDiagnostics::MakeForCoroutine(D)
  2082. : CheckFallThroughDiagnostics::MakeForFunction(D)));
  2083. CheckFallThroughForBody(S, D, Body, BlockType, CD, AC, fscope);
  2084. }
  2085. // Warning: check for unreachable code
  2086. if (P.enableCheckUnreachable) {
  2087. // Only check for unreachable code on non-template instantiations.
  2088. // Different template instantiations can effectively change the control-flow
  2089. // and it is very difficult to prove that a snippet of code in a template
  2090. // is unreachable for all instantiations.
  2091. bool isTemplateInstantiation = false;
  2092. if (const FunctionDecl *Function = dyn_cast<FunctionDecl>(D))
  2093. isTemplateInstantiation = Function->isTemplateInstantiation();
  2094. if (!isTemplateInstantiation)
  2095. CheckUnreachable(S, AC);
  2096. }
  2097. // Check for thread safety violations
  2098. if (P.enableThreadSafetyAnalysis) {
  2099. SourceLocation FL = AC.getDecl()->getLocation();
  2100. SourceLocation FEL = AC.getDecl()->getEndLoc();
  2101. threadSafety::ThreadSafetyReporter Reporter(S, FL, FEL);
  2102. if (!Diags.isIgnored(diag::warn_thread_safety_beta, D->getBeginLoc()))
  2103. Reporter.setIssueBetaWarnings(true);
  2104. if (!Diags.isIgnored(diag::warn_thread_safety_verbose, D->getBeginLoc()))
  2105. Reporter.setVerbose(true);
  2106. threadSafety::runThreadSafetyAnalysis(AC, Reporter,
  2107. &S.ThreadSafetyDeclCache);
  2108. Reporter.emitDiagnostics();
  2109. }
  2110. // Check for violations of consumed properties.
  2111. if (P.enableConsumedAnalysis) {
  2112. consumed::ConsumedWarningsHandler WarningHandler(S);
  2113. consumed::ConsumedAnalyzer Analyzer(WarningHandler);
  2114. Analyzer.run(AC);
  2115. }
  2116. if (!Diags.isIgnored(diag::warn_uninit_var, D->getBeginLoc()) ||
  2117. !Diags.isIgnored(diag::warn_sometimes_uninit_var, D->getBeginLoc()) ||
  2118. !Diags.isIgnored(diag::warn_maybe_uninit_var, D->getBeginLoc()) ||
  2119. !Diags.isIgnored(diag::warn_uninit_const_reference, D->getBeginLoc())) {
  2120. if (CFG *cfg = AC.getCFG()) {
  2121. UninitValsDiagReporter reporter(S);
  2122. UninitVariablesAnalysisStats stats;
  2123. std::memset(&stats, 0, sizeof(UninitVariablesAnalysisStats));
  2124. runUninitializedVariablesAnalysis(*cast<DeclContext>(D), *cfg, AC,
  2125. reporter, stats);
  2126. if (S.CollectStats && stats.NumVariablesAnalyzed > 0) {
  2127. ++NumUninitAnalysisFunctions;
  2128. NumUninitAnalysisVariables += stats.NumVariablesAnalyzed;
  2129. NumUninitAnalysisBlockVisits += stats.NumBlockVisits;
  2130. MaxUninitAnalysisVariablesPerFunction =
  2131. std::max(MaxUninitAnalysisVariablesPerFunction,
  2132. stats.NumVariablesAnalyzed);
  2133. MaxUninitAnalysisBlockVisitsPerFunction =
  2134. std::max(MaxUninitAnalysisBlockVisitsPerFunction,
  2135. stats.NumBlockVisits);
  2136. }
  2137. }
  2138. }
  2139. // Check for violations of "called once" parameter properties.
  2140. if (S.getLangOpts().ObjC && !S.getLangOpts().CPlusPlus &&
  2141. shouldAnalyzeCalledOnceParameters(Diags, D->getBeginLoc())) {
  2142. if (AC.getCFG()) {
  2143. CalledOnceCheckReporter Reporter(S, IPData->CalledOnceData);
  2144. checkCalledOnceParameters(
  2145. AC, Reporter,
  2146. shouldAnalyzeCalledOnceConventions(Diags, D->getBeginLoc()));
  2147. }
  2148. }
  2149. bool FallThroughDiagFull =
  2150. !Diags.isIgnored(diag::warn_unannotated_fallthrough, D->getBeginLoc());
  2151. bool FallThroughDiagPerFunction = !Diags.isIgnored(
  2152. diag::warn_unannotated_fallthrough_per_function, D->getBeginLoc());
  2153. if (FallThroughDiagFull || FallThroughDiagPerFunction ||
  2154. fscope->HasFallthroughStmt) {
  2155. DiagnoseSwitchLabelsFallthrough(S, AC, !FallThroughDiagFull);
  2156. }
  2157. if (S.getLangOpts().ObjCWeak &&
  2158. !Diags.isIgnored(diag::warn_arc_repeated_use_of_weak, D->getBeginLoc()))
  2159. diagnoseRepeatedUseOfWeak(S, fscope, D, AC.getParentMap());
  2160. // Check for infinite self-recursion in functions
  2161. if (!Diags.isIgnored(diag::warn_infinite_recursive_function,
  2162. D->getBeginLoc())) {
  2163. if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D)) {
  2164. checkRecursiveFunction(S, FD, Body, AC);
  2165. }
  2166. }
  2167. // Check for throw out of non-throwing function.
  2168. if (!Diags.isIgnored(diag::warn_throw_in_noexcept_func, D->getBeginLoc()))
  2169. if (const FunctionDecl *FD = dyn_cast<FunctionDecl>(D))
  2170. if (S.getLangOpts().CPlusPlus && isNoexcept(FD))
  2171. checkThrowInNonThrowingFunc(S, FD, AC);
  2172. // Emit unsafe buffer usage warnings and fixits.
  2173. if (!Diags.isIgnored(diag::warn_unsafe_buffer_operation, D->getBeginLoc()) ||
  2174. !Diags.isIgnored(diag::warn_unsafe_buffer_variable, D->getBeginLoc())) {
  2175. UnsafeBufferUsageReporter R(S);
  2176. checkUnsafeBufferUsage(D, R);
  2177. }
  2178. // If none of the previous checks caused a CFG build, trigger one here
  2179. // for the logical error handler.
  2180. if (LogicalErrorHandler::hasActiveDiagnostics(Diags, D->getBeginLoc())) {
  2181. AC.getCFG();
  2182. }
  2183. // Collect statistics about the CFG if it was built.
  2184. if (S.CollectStats && AC.isCFGBuilt()) {
  2185. ++NumFunctionsAnalyzed;
  2186. if (CFG *cfg = AC.getCFG()) {
  2187. // If we successfully built a CFG for this context, record some more
  2188. // detail information about it.
  2189. NumCFGBlocks += cfg->getNumBlockIDs();
  2190. MaxCFGBlocksPerFunction = std::max(MaxCFGBlocksPerFunction,
  2191. cfg->getNumBlockIDs());
  2192. } else {
  2193. ++NumFunctionsWithBadCFGs;
  2194. }
  2195. }
  2196. }
  2197. void clang::sema::AnalysisBasedWarnings::PrintStats() const {
  2198. llvm::errs() << "\n*** Analysis Based Warnings Stats:\n";
  2199. unsigned NumCFGsBuilt = NumFunctionsAnalyzed - NumFunctionsWithBadCFGs;
  2200. unsigned AvgCFGBlocksPerFunction =
  2201. !NumCFGsBuilt ? 0 : NumCFGBlocks/NumCFGsBuilt;
  2202. llvm::errs() << NumFunctionsAnalyzed << " functions analyzed ("
  2203. << NumFunctionsWithBadCFGs << " w/o CFGs).\n"
  2204. << " " << NumCFGBlocks << " CFG blocks built.\n"
  2205. << " " << AvgCFGBlocksPerFunction
  2206. << " average CFG blocks per function.\n"
  2207. << " " << MaxCFGBlocksPerFunction
  2208. << " max CFG blocks per function.\n";
  2209. unsigned AvgUninitVariablesPerFunction = !NumUninitAnalysisFunctions ? 0
  2210. : NumUninitAnalysisVariables/NumUninitAnalysisFunctions;
  2211. unsigned AvgUninitBlockVisitsPerFunction = !NumUninitAnalysisFunctions ? 0
  2212. : NumUninitAnalysisBlockVisits/NumUninitAnalysisFunctions;
  2213. llvm::errs() << NumUninitAnalysisFunctions
  2214. << " functions analyzed for uninitialiazed variables\n"
  2215. << " " << NumUninitAnalysisVariables << " variables analyzed.\n"
  2216. << " " << AvgUninitVariablesPerFunction
  2217. << " average variables per function.\n"
  2218. << " " << MaxUninitAnalysisVariablesPerFunction
  2219. << " max variables per function.\n"
  2220. << " " << NumUninitAnalysisBlockVisits << " block visits.\n"
  2221. << " " << AvgUninitBlockVisitsPerFunction
  2222. << " average block visits per function.\n"
  2223. << " " << MaxUninitAnalysisBlockVisitsPerFunction
  2224. << " max block visits per function.\n";
  2225. }