StackAddrEscapeChecker.cpp 17 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442
  1. //=== StackAddrEscapeChecker.cpp ----------------------------------*- C++ -*--//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file defines stack address leak checker, which checks if an invalid
  10. // stack address is stored into a global or heap location. See CERT DCL30-C.
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "clang/AST/ExprCXX.h"
  14. #include "clang/Basic/SourceManager.h"
  15. #include "clang/StaticAnalyzer/Checkers/BuiltinCheckerRegistration.h"
  16. #include "clang/StaticAnalyzer/Core/BugReporter/BugType.h"
  17. #include "clang/StaticAnalyzer/Core/Checker.h"
  18. #include "clang/StaticAnalyzer/Core/CheckerManager.h"
  19. #include "clang/StaticAnalyzer/Core/PathSensitive/CallEvent.h"
  20. #include "clang/StaticAnalyzer/Core/PathSensitive/CheckerContext.h"
  21. #include "clang/StaticAnalyzer/Core/PathSensitive/ProgramState.h"
  22. #include "llvm/ADT/SmallString.h"
  23. #include "llvm/Support/raw_ostream.h"
  24. using namespace clang;
  25. using namespace ento;
  26. namespace {
  27. class StackAddrEscapeChecker
  28. : public Checker<check::PreCall, check::PreStmt<ReturnStmt>,
  29. check::EndFunction> {
  30. mutable IdentifierInfo *dispatch_semaphore_tII;
  31. mutable std::unique_ptr<BuiltinBug> BT_stackleak;
  32. mutable std::unique_ptr<BuiltinBug> BT_returnstack;
  33. mutable std::unique_ptr<BuiltinBug> BT_capturedstackasync;
  34. mutable std::unique_ptr<BuiltinBug> BT_capturedstackret;
  35. public:
  36. enum CheckKind {
  37. CK_StackAddrEscapeChecker,
  38. CK_StackAddrAsyncEscapeChecker,
  39. CK_NumCheckKinds
  40. };
  41. DefaultBool ChecksEnabled[CK_NumCheckKinds];
  42. CheckerNameRef CheckNames[CK_NumCheckKinds];
  43. void checkPreCall(const CallEvent &Call, CheckerContext &C) const;
  44. void checkPreStmt(const ReturnStmt *RS, CheckerContext &C) const;
  45. void checkEndFunction(const ReturnStmt *RS, CheckerContext &Ctx) const;
  46. private:
  47. void checkReturnedBlockCaptures(const BlockDataRegion &B,
  48. CheckerContext &C) const;
  49. void checkAsyncExecutedBlockCaptures(const BlockDataRegion &B,
  50. CheckerContext &C) const;
  51. void EmitStackError(CheckerContext &C, const MemRegion *R,
  52. const Expr *RetE) const;
  53. bool isSemaphoreCaptured(const BlockDecl &B) const;
  54. static SourceRange genName(raw_ostream &os, const MemRegion *R,
  55. ASTContext &Ctx);
  56. static SmallVector<const MemRegion *, 4>
  57. getCapturedStackRegions(const BlockDataRegion &B, CheckerContext &C);
  58. static bool isArcManagedBlock(const MemRegion *R, CheckerContext &C);
  59. static bool isNotInCurrentFrame(const MemRegion *R, CheckerContext &C);
  60. };
  61. } // namespace
  62. SourceRange StackAddrEscapeChecker::genName(raw_ostream &os, const MemRegion *R,
  63. ASTContext &Ctx) {
  64. // Get the base region, stripping away fields and elements.
  65. R = R->getBaseRegion();
  66. SourceManager &SM = Ctx.getSourceManager();
  67. SourceRange range;
  68. os << "Address of ";
  69. // Check if the region is a compound literal.
  70. if (const auto *CR = dyn_cast<CompoundLiteralRegion>(R)) {
  71. const CompoundLiteralExpr *CL = CR->getLiteralExpr();
  72. os << "stack memory associated with a compound literal "
  73. "declared on line "
  74. << SM.getExpansionLineNumber(CL->getBeginLoc()) << " returned to caller";
  75. range = CL->getSourceRange();
  76. } else if (const auto *AR = dyn_cast<AllocaRegion>(R)) {
  77. const Expr *ARE = AR->getExpr();
  78. SourceLocation L = ARE->getBeginLoc();
  79. range = ARE->getSourceRange();
  80. os << "stack memory allocated by call to alloca() on line "
  81. << SM.getExpansionLineNumber(L);
  82. } else if (const auto *BR = dyn_cast<BlockDataRegion>(R)) {
  83. const BlockDecl *BD = BR->getCodeRegion()->getDecl();
  84. SourceLocation L = BD->getBeginLoc();
  85. range = BD->getSourceRange();
  86. os << "stack-allocated block declared on line "
  87. << SM.getExpansionLineNumber(L);
  88. } else if (const auto *VR = dyn_cast<VarRegion>(R)) {
  89. os << "stack memory associated with local variable '" << VR->getString()
  90. << '\'';
  91. range = VR->getDecl()->getSourceRange();
  92. } else if (const auto *TOR = dyn_cast<CXXTempObjectRegion>(R)) {
  93. QualType Ty = TOR->getValueType().getLocalUnqualifiedType();
  94. os << "stack memory associated with temporary object of type '";
  95. Ty.print(os, Ctx.getPrintingPolicy());
  96. os << "'";
  97. range = TOR->getExpr()->getSourceRange();
  98. } else {
  99. llvm_unreachable("Invalid region in ReturnStackAddressChecker.");
  100. }
  101. return range;
  102. }
  103. bool StackAddrEscapeChecker::isArcManagedBlock(const MemRegion *R,
  104. CheckerContext &C) {
  105. assert(R && "MemRegion should not be null");
  106. return C.getASTContext().getLangOpts().ObjCAutoRefCount &&
  107. isa<BlockDataRegion>(R);
  108. }
  109. bool StackAddrEscapeChecker::isNotInCurrentFrame(const MemRegion *R,
  110. CheckerContext &C) {
  111. const StackSpaceRegion *S = cast<StackSpaceRegion>(R->getMemorySpace());
  112. return S->getStackFrame() != C.getStackFrame();
  113. }
  114. bool StackAddrEscapeChecker::isSemaphoreCaptured(const BlockDecl &B) const {
  115. if (!dispatch_semaphore_tII)
  116. dispatch_semaphore_tII = &B.getASTContext().Idents.get("dispatch_semaphore_t");
  117. for (const auto &C : B.captures()) {
  118. const auto *T = C.getVariable()->getType()->getAs<TypedefType>();
  119. if (T && T->getDecl()->getIdentifier() == dispatch_semaphore_tII)
  120. return true;
  121. }
  122. return false;
  123. }
  124. SmallVector<const MemRegion *, 4>
  125. StackAddrEscapeChecker::getCapturedStackRegions(const BlockDataRegion &B,
  126. CheckerContext &C) {
  127. SmallVector<const MemRegion *, 4> Regions;
  128. BlockDataRegion::referenced_vars_iterator I = B.referenced_vars_begin();
  129. BlockDataRegion::referenced_vars_iterator E = B.referenced_vars_end();
  130. for (; I != E; ++I) {
  131. SVal Val = C.getState()->getSVal(I.getCapturedRegion());
  132. const MemRegion *Region = Val.getAsRegion();
  133. if (Region && isa<StackSpaceRegion>(Region->getMemorySpace()))
  134. Regions.push_back(Region);
  135. }
  136. return Regions;
  137. }
  138. void StackAddrEscapeChecker::EmitStackError(CheckerContext &C,
  139. const MemRegion *R,
  140. const Expr *RetE) const {
  141. ExplodedNode *N = C.generateNonFatalErrorNode();
  142. if (!N)
  143. return;
  144. if (!BT_returnstack)
  145. BT_returnstack = std::make_unique<BuiltinBug>(
  146. CheckNames[CK_StackAddrEscapeChecker],
  147. "Return of address to stack-allocated memory");
  148. // Generate a report for this bug.
  149. SmallString<128> buf;
  150. llvm::raw_svector_ostream os(buf);
  151. SourceRange range = genName(os, R, C.getASTContext());
  152. os << " returned to caller";
  153. auto report =
  154. std::make_unique<PathSensitiveBugReport>(*BT_returnstack, os.str(), N);
  155. report->addRange(RetE->getSourceRange());
  156. if (range.isValid())
  157. report->addRange(range);
  158. C.emitReport(std::move(report));
  159. }
  160. void StackAddrEscapeChecker::checkAsyncExecutedBlockCaptures(
  161. const BlockDataRegion &B, CheckerContext &C) const {
  162. // There is a not-too-uncommon idiom
  163. // where a block passed to dispatch_async captures a semaphore
  164. // and then the thread (which called dispatch_async) is blocked on waiting
  165. // for the completion of the execution of the block
  166. // via dispatch_semaphore_wait. To avoid false-positives (for now)
  167. // we ignore all the blocks which have captured
  168. // a variable of the type "dispatch_semaphore_t".
  169. if (isSemaphoreCaptured(*B.getDecl()))
  170. return;
  171. for (const MemRegion *Region : getCapturedStackRegions(B, C)) {
  172. // The block passed to dispatch_async may capture another block
  173. // created on the stack. However, there is no leak in this situaton,
  174. // no matter if ARC or no ARC is enabled:
  175. // dispatch_async copies the passed "outer" block (via Block_copy)
  176. // and if the block has captured another "inner" block,
  177. // the "inner" block will be copied as well.
  178. if (isa<BlockDataRegion>(Region))
  179. continue;
  180. ExplodedNode *N = C.generateNonFatalErrorNode();
  181. if (!N)
  182. continue;
  183. if (!BT_capturedstackasync)
  184. BT_capturedstackasync = std::make_unique<BuiltinBug>(
  185. CheckNames[CK_StackAddrAsyncEscapeChecker],
  186. "Address of stack-allocated memory is captured");
  187. SmallString<128> Buf;
  188. llvm::raw_svector_ostream Out(Buf);
  189. SourceRange Range = genName(Out, Region, C.getASTContext());
  190. Out << " is captured by an asynchronously-executed block";
  191. auto Report = std::make_unique<PathSensitiveBugReport>(
  192. *BT_capturedstackasync, Out.str(), N);
  193. if (Range.isValid())
  194. Report->addRange(Range);
  195. C.emitReport(std::move(Report));
  196. }
  197. }
  198. void StackAddrEscapeChecker::checkReturnedBlockCaptures(
  199. const BlockDataRegion &B, CheckerContext &C) const {
  200. for (const MemRegion *Region : getCapturedStackRegions(B, C)) {
  201. if (isArcManagedBlock(Region, C) || isNotInCurrentFrame(Region, C))
  202. continue;
  203. ExplodedNode *N = C.generateNonFatalErrorNode();
  204. if (!N)
  205. continue;
  206. if (!BT_capturedstackret)
  207. BT_capturedstackret = std::make_unique<BuiltinBug>(
  208. CheckNames[CK_StackAddrEscapeChecker],
  209. "Address of stack-allocated memory is captured");
  210. SmallString<128> Buf;
  211. llvm::raw_svector_ostream Out(Buf);
  212. SourceRange Range = genName(Out, Region, C.getASTContext());
  213. Out << " is captured by a returned block";
  214. auto Report = std::make_unique<PathSensitiveBugReport>(*BT_capturedstackret,
  215. Out.str(), N);
  216. if (Range.isValid())
  217. Report->addRange(Range);
  218. C.emitReport(std::move(Report));
  219. }
  220. }
  221. void StackAddrEscapeChecker::checkPreCall(const CallEvent &Call,
  222. CheckerContext &C) const {
  223. if (!ChecksEnabled[CK_StackAddrAsyncEscapeChecker])
  224. return;
  225. if (!Call.isGlobalCFunction("dispatch_after") &&
  226. !Call.isGlobalCFunction("dispatch_async"))
  227. return;
  228. for (unsigned Idx = 0, NumArgs = Call.getNumArgs(); Idx < NumArgs; ++Idx) {
  229. if (const BlockDataRegion *B = dyn_cast_or_null<BlockDataRegion>(
  230. Call.getArgSVal(Idx).getAsRegion()))
  231. checkAsyncExecutedBlockCaptures(*B, C);
  232. }
  233. }
  234. void StackAddrEscapeChecker::checkPreStmt(const ReturnStmt *RS,
  235. CheckerContext &C) const {
  236. if (!ChecksEnabled[CK_StackAddrEscapeChecker])
  237. return;
  238. const Expr *RetE = RS->getRetValue();
  239. if (!RetE)
  240. return;
  241. RetE = RetE->IgnoreParens();
  242. SVal V = C.getSVal(RetE);
  243. const MemRegion *R = V.getAsRegion();
  244. if (!R)
  245. return;
  246. if (const BlockDataRegion *B = dyn_cast<BlockDataRegion>(R))
  247. checkReturnedBlockCaptures(*B, C);
  248. if (!isa<StackSpaceRegion>(R->getMemorySpace()) ||
  249. isNotInCurrentFrame(R, C) || isArcManagedBlock(R, C))
  250. return;
  251. // Returning a record by value is fine. (In this case, the returned
  252. // expression will be a copy-constructor, possibly wrapped in an
  253. // ExprWithCleanups node.)
  254. if (const ExprWithCleanups *Cleanup = dyn_cast<ExprWithCleanups>(RetE))
  255. RetE = Cleanup->getSubExpr();
  256. if (isa<CXXConstructExpr>(RetE) && RetE->getType()->isRecordType())
  257. return;
  258. // The CK_CopyAndAutoreleaseBlockObject cast causes the block to be copied
  259. // so the stack address is not escaping here.
  260. if (const auto *ICE = dyn_cast<ImplicitCastExpr>(RetE)) {
  261. if (isa<BlockDataRegion>(R) &&
  262. ICE->getCastKind() == CK_CopyAndAutoreleaseBlockObject) {
  263. return;
  264. }
  265. }
  266. EmitStackError(C, R, RetE);
  267. }
  268. void StackAddrEscapeChecker::checkEndFunction(const ReturnStmt *RS,
  269. CheckerContext &Ctx) const {
  270. if (!ChecksEnabled[CK_StackAddrEscapeChecker])
  271. return;
  272. ProgramStateRef State = Ctx.getState();
  273. // Iterate over all bindings to global variables and see if it contains
  274. // a memory region in the stack space.
  275. class CallBack : public StoreManager::BindingsHandler {
  276. private:
  277. CheckerContext &Ctx;
  278. const StackFrameContext *PoppedFrame;
  279. /// Look for stack variables referring to popped stack variables.
  280. /// Returns true only if it found some dangling stack variables
  281. /// referred by an other stack variable from different stack frame.
  282. bool checkForDanglingStackVariable(const MemRegion *Referrer,
  283. const MemRegion *Referred) {
  284. const auto *ReferrerMemSpace =
  285. Referrer->getMemorySpace()->getAs<StackSpaceRegion>();
  286. const auto *ReferredMemSpace =
  287. Referred->getMemorySpace()->getAs<StackSpaceRegion>();
  288. if (!ReferrerMemSpace || !ReferredMemSpace)
  289. return false;
  290. const auto *ReferrerFrame = ReferrerMemSpace->getStackFrame();
  291. const auto *ReferredFrame = ReferredMemSpace->getStackFrame();
  292. if (ReferrerMemSpace && ReferredMemSpace) {
  293. if (ReferredFrame == PoppedFrame &&
  294. ReferrerFrame->isParentOf(PoppedFrame)) {
  295. V.emplace_back(Referrer, Referred);
  296. return true;
  297. }
  298. }
  299. return false;
  300. }
  301. public:
  302. SmallVector<std::pair<const MemRegion *, const MemRegion *>, 10> V;
  303. CallBack(CheckerContext &CC) : Ctx(CC), PoppedFrame(CC.getStackFrame()) {}
  304. bool HandleBinding(StoreManager &SMgr, Store S, const MemRegion *Region,
  305. SVal Val) override {
  306. const MemRegion *VR = Val.getAsRegion();
  307. if (!VR)
  308. return true;
  309. if (checkForDanglingStackVariable(Region, VR))
  310. return true;
  311. // Check the globals for the same.
  312. if (!isa<GlobalsSpaceRegion>(Region->getMemorySpace()))
  313. return true;
  314. if (VR && VR->hasStackStorage() && !isArcManagedBlock(VR, Ctx) &&
  315. !isNotInCurrentFrame(VR, Ctx))
  316. V.emplace_back(Region, VR);
  317. return true;
  318. }
  319. };
  320. CallBack Cb(Ctx);
  321. State->getStateManager().getStoreManager().iterBindings(State->getStore(),
  322. Cb);
  323. if (Cb.V.empty())
  324. return;
  325. // Generate an error node.
  326. ExplodedNode *N = Ctx.generateNonFatalErrorNode(State);
  327. if (!N)
  328. return;
  329. if (!BT_stackleak)
  330. BT_stackleak = std::make_unique<BuiltinBug>(
  331. CheckNames[CK_StackAddrEscapeChecker],
  332. "Stack address stored into global variable",
  333. "Stack address was saved into a global variable. "
  334. "This is dangerous because the address will become "
  335. "invalid after returning from the function");
  336. for (const auto &P : Cb.V) {
  337. const MemRegion *Referrer = P.first;
  338. const MemRegion *Referred = P.second;
  339. // Generate a report for this bug.
  340. const StringRef CommonSuffix =
  341. "upon returning to the caller. This will be a dangling reference";
  342. SmallString<128> Buf;
  343. llvm::raw_svector_ostream Out(Buf);
  344. const SourceRange Range = genName(Out, Referred, Ctx.getASTContext());
  345. if (isa<CXXTempObjectRegion>(Referrer)) {
  346. Out << " is still referred to by a temporary object on the stack "
  347. << CommonSuffix;
  348. auto Report =
  349. std::make_unique<PathSensitiveBugReport>(*BT_stackleak, Out.str(), N);
  350. Ctx.emitReport(std::move(Report));
  351. return;
  352. }
  353. const StringRef ReferrerMemorySpace = [](const MemSpaceRegion *Space) {
  354. if (isa<StaticGlobalSpaceRegion>(Space))
  355. return "static";
  356. if (isa<GlobalsSpaceRegion>(Space))
  357. return "global";
  358. assert(isa<StackSpaceRegion>(Space));
  359. return "stack";
  360. }(Referrer->getMemorySpace());
  361. // This cast supposed to succeed.
  362. const VarRegion *ReferrerVar = cast<VarRegion>(Referrer->getBaseRegion());
  363. const std::string ReferrerVarName =
  364. ReferrerVar->getDecl()->getDeclName().getAsString();
  365. Out << " is still referred to by the " << ReferrerMemorySpace
  366. << " variable '" << ReferrerVarName << "' " << CommonSuffix;
  367. auto Report =
  368. std::make_unique<PathSensitiveBugReport>(*BT_stackleak, Out.str(), N);
  369. if (Range.isValid())
  370. Report->addRange(Range);
  371. Ctx.emitReport(std::move(Report));
  372. }
  373. }
  374. void ento::registerStackAddrEscapeBase(CheckerManager &mgr) {
  375. mgr.registerChecker<StackAddrEscapeChecker>();
  376. }
  377. bool ento::shouldRegisterStackAddrEscapeBase(const CheckerManager &mgr) {
  378. return true;
  379. }
  380. #define REGISTER_CHECKER(name) \
  381. void ento::register##name(CheckerManager &Mgr) { \
  382. StackAddrEscapeChecker *Chk = Mgr.getChecker<StackAddrEscapeChecker>(); \
  383. Chk->ChecksEnabled[StackAddrEscapeChecker::CK_##name] = true; \
  384. Chk->CheckNames[StackAddrEscapeChecker::CK_##name] = \
  385. Mgr.getCurrentCheckerName(); \
  386. } \
  387. \
  388. bool ento::shouldRegister##name(const CheckerManager &mgr) { return true; }
  389. REGISTER_CHECKER(StackAddrEscapeChecker)
  390. REGISTER_CHECKER(StackAddrAsyncEscapeChecker)