JumpDiagnostics.cpp 39 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020
  1. //===--- JumpDiagnostics.cpp - Protected scope jump analysis ------*- C++ -*-=//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file implements the JumpScopeChecker class, which is used to diagnose
  10. // jumps that enter a protected scope in an invalid way.
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "clang/AST/DeclCXX.h"
  14. #include "clang/AST/Expr.h"
  15. #include "clang/AST/ExprCXX.h"
  16. #include "clang/AST/StmtCXX.h"
  17. #include "clang/AST/StmtObjC.h"
  18. #include "clang/AST/StmtOpenMP.h"
  19. #include "clang/Basic/SourceLocation.h"
  20. #include "clang/Sema/SemaInternal.h"
  21. #include "llvm/ADT/BitVector.h"
  22. using namespace clang;
  23. namespace {
  24. /// JumpScopeChecker - This object is used by Sema to diagnose invalid jumps
  25. /// into VLA and other protected scopes. For example, this rejects:
  26. /// goto L;
  27. /// int a[n];
  28. /// L:
  29. ///
  30. /// We also detect jumps out of protected scopes when it's not possible to do
  31. /// cleanups properly. Indirect jumps and ASM jumps can't do cleanups because
  32. /// the target is unknown. Return statements with \c [[clang::musttail]] cannot
  33. /// handle any cleanups due to the nature of a tail call.
  34. class JumpScopeChecker {
  35. Sema &S;
  36. /// Permissive - True when recovering from errors, in which case precautions
  37. /// are taken to handle incomplete scope information.
  38. const bool Permissive;
  39. /// GotoScope - This is a record that we use to keep track of all of the
  40. /// scopes that are introduced by VLAs and other things that scope jumps like
  41. /// gotos. This scope tree has nothing to do with the source scope tree,
  42. /// because you can have multiple VLA scopes per compound statement, and most
  43. /// compound statements don't introduce any scopes.
  44. struct GotoScope {
  45. /// ParentScope - The index in ScopeMap of the parent scope. This is 0 for
  46. /// the parent scope is the function body.
  47. unsigned ParentScope;
  48. /// InDiag - The note to emit if there is a jump into this scope.
  49. unsigned InDiag;
  50. /// OutDiag - The note to emit if there is an indirect jump out
  51. /// of this scope. Direct jumps always clean up their current scope
  52. /// in an orderly way.
  53. unsigned OutDiag;
  54. /// Loc - Location to emit the diagnostic.
  55. SourceLocation Loc;
  56. GotoScope(unsigned parentScope, unsigned InDiag, unsigned OutDiag,
  57. SourceLocation L)
  58. : ParentScope(parentScope), InDiag(InDiag), OutDiag(OutDiag), Loc(L) {}
  59. };
  60. SmallVector<GotoScope, 48> Scopes;
  61. llvm::DenseMap<Stmt*, unsigned> LabelAndGotoScopes;
  62. SmallVector<Stmt*, 16> Jumps;
  63. SmallVector<Stmt*, 4> IndirectJumps;
  64. SmallVector<Stmt*, 4> AsmJumps;
  65. SmallVector<AttributedStmt *, 4> MustTailStmts;
  66. SmallVector<LabelDecl*, 4> IndirectJumpTargets;
  67. SmallVector<LabelDecl*, 4> AsmJumpTargets;
  68. public:
  69. JumpScopeChecker(Stmt *Body, Sema &S);
  70. private:
  71. void BuildScopeInformation(Decl *D, unsigned &ParentScope);
  72. void BuildScopeInformation(VarDecl *D, const BlockDecl *BDecl,
  73. unsigned &ParentScope);
  74. void BuildScopeInformation(CompoundLiteralExpr *CLE, unsigned &ParentScope);
  75. void BuildScopeInformation(Stmt *S, unsigned &origParentScope);
  76. void VerifyJumps();
  77. void VerifyIndirectOrAsmJumps(bool IsAsmGoto);
  78. void VerifyMustTailStmts();
  79. void NoteJumpIntoScopes(ArrayRef<unsigned> ToScopes);
  80. void DiagnoseIndirectOrAsmJump(Stmt *IG, unsigned IGScope, LabelDecl *Target,
  81. unsigned TargetScope);
  82. void CheckJump(Stmt *From, Stmt *To, SourceLocation DiagLoc,
  83. unsigned JumpDiag, unsigned JumpDiagWarning,
  84. unsigned JumpDiagCXX98Compat);
  85. void CheckGotoStmt(GotoStmt *GS);
  86. const Attr *GetMustTailAttr(AttributedStmt *AS);
  87. unsigned GetDeepestCommonScope(unsigned A, unsigned B);
  88. };
  89. } // end anonymous namespace
  90. #define CHECK_PERMISSIVE(x) (assert(Permissive || !(x)), (Permissive && (x)))
  91. JumpScopeChecker::JumpScopeChecker(Stmt *Body, Sema &s)
  92. : S(s), Permissive(s.hasAnyUnrecoverableErrorsInThisFunction()) {
  93. // Add a scope entry for function scope.
  94. Scopes.push_back(GotoScope(~0U, ~0U, ~0U, SourceLocation()));
  95. // Build information for the top level compound statement, so that we have a
  96. // defined scope record for every "goto" and label.
  97. unsigned BodyParentScope = 0;
  98. BuildScopeInformation(Body, BodyParentScope);
  99. // Check that all jumps we saw are kosher.
  100. VerifyJumps();
  101. VerifyIndirectOrAsmJumps(false);
  102. VerifyIndirectOrAsmJumps(true);
  103. VerifyMustTailStmts();
  104. }
  105. /// GetDeepestCommonScope - Finds the innermost scope enclosing the
  106. /// two scopes.
  107. unsigned JumpScopeChecker::GetDeepestCommonScope(unsigned A, unsigned B) {
  108. while (A != B) {
  109. // Inner scopes are created after outer scopes and therefore have
  110. // higher indices.
  111. if (A < B) {
  112. assert(Scopes[B].ParentScope < B);
  113. B = Scopes[B].ParentScope;
  114. } else {
  115. assert(Scopes[A].ParentScope < A);
  116. A = Scopes[A].ParentScope;
  117. }
  118. }
  119. return A;
  120. }
  121. typedef std::pair<unsigned,unsigned> ScopePair;
  122. /// GetDiagForGotoScopeDecl - If this decl induces a new goto scope, return a
  123. /// diagnostic that should be emitted if control goes over it. If not, return 0.
  124. static ScopePair GetDiagForGotoScopeDecl(Sema &S, const Decl *D) {
  125. if (const VarDecl *VD = dyn_cast<VarDecl>(D)) {
  126. unsigned InDiag = 0;
  127. unsigned OutDiag = 0;
  128. if (VD->getType()->isVariablyModifiedType())
  129. InDiag = diag::note_protected_by_vla;
  130. if (VD->hasAttr<BlocksAttr>())
  131. return ScopePair(diag::note_protected_by___block,
  132. diag::note_exits___block);
  133. if (VD->hasAttr<CleanupAttr>())
  134. return ScopePair(diag::note_protected_by_cleanup,
  135. diag::note_exits_cleanup);
  136. if (VD->hasLocalStorage()) {
  137. switch (VD->getType().isDestructedType()) {
  138. case QualType::DK_objc_strong_lifetime:
  139. return ScopePair(diag::note_protected_by_objc_strong_init,
  140. diag::note_exits_objc_strong);
  141. case QualType::DK_objc_weak_lifetime:
  142. return ScopePair(diag::note_protected_by_objc_weak_init,
  143. diag::note_exits_objc_weak);
  144. case QualType::DK_nontrivial_c_struct:
  145. return ScopePair(diag::note_protected_by_non_trivial_c_struct_init,
  146. diag::note_exits_dtor);
  147. case QualType::DK_cxx_destructor:
  148. OutDiag = diag::note_exits_dtor;
  149. break;
  150. case QualType::DK_none:
  151. break;
  152. }
  153. }
  154. const Expr *Init = VD->getInit();
  155. if (S.Context.getLangOpts().CPlusPlus && VD->hasLocalStorage() && Init) {
  156. // C++11 [stmt.dcl]p3:
  157. // A program that jumps from a point where a variable with automatic
  158. // storage duration is not in scope to a point where it is in scope
  159. // is ill-formed unless the variable has scalar type, class type with
  160. // a trivial default constructor and a trivial destructor, a
  161. // cv-qualified version of one of these types, or an array of one of
  162. // the preceding types and is declared without an initializer.
  163. // C++03 [stmt.dcl.p3:
  164. // A program that jumps from a point where a local variable
  165. // with automatic storage duration is not in scope to a point
  166. // where it is in scope is ill-formed unless the variable has
  167. // POD type and is declared without an initializer.
  168. InDiag = diag::note_protected_by_variable_init;
  169. // For a variable of (array of) class type declared without an
  170. // initializer, we will have call-style initialization and the initializer
  171. // will be the CXXConstructExpr with no intervening nodes.
  172. if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(Init)) {
  173. const CXXConstructorDecl *Ctor = CCE->getConstructor();
  174. if (Ctor->isTrivial() && Ctor->isDefaultConstructor() &&
  175. VD->getInitStyle() == VarDecl::CallInit) {
  176. if (OutDiag)
  177. InDiag = diag::note_protected_by_variable_nontriv_destructor;
  178. else if (!Ctor->getParent()->isPOD())
  179. InDiag = diag::note_protected_by_variable_non_pod;
  180. else
  181. InDiag = 0;
  182. }
  183. }
  184. }
  185. return ScopePair(InDiag, OutDiag);
  186. }
  187. if (const TypedefNameDecl *TD = dyn_cast<TypedefNameDecl>(D)) {
  188. if (TD->getUnderlyingType()->isVariablyModifiedType())
  189. return ScopePair(isa<TypedefDecl>(TD)
  190. ? diag::note_protected_by_vla_typedef
  191. : diag::note_protected_by_vla_type_alias,
  192. 0);
  193. }
  194. return ScopePair(0U, 0U);
  195. }
  196. /// Build scope information for a declaration that is part of a DeclStmt.
  197. void JumpScopeChecker::BuildScopeInformation(Decl *D, unsigned &ParentScope) {
  198. // If this decl causes a new scope, push and switch to it.
  199. std::pair<unsigned,unsigned> Diags = GetDiagForGotoScopeDecl(S, D);
  200. if (Diags.first || Diags.second) {
  201. Scopes.push_back(GotoScope(ParentScope, Diags.first, Diags.second,
  202. D->getLocation()));
  203. ParentScope = Scopes.size()-1;
  204. }
  205. // If the decl has an initializer, walk it with the potentially new
  206. // scope we just installed.
  207. if (VarDecl *VD = dyn_cast<VarDecl>(D))
  208. if (Expr *Init = VD->getInit())
  209. BuildScopeInformation(Init, ParentScope);
  210. }
  211. /// Build scope information for a captured block literal variables.
  212. void JumpScopeChecker::BuildScopeInformation(VarDecl *D,
  213. const BlockDecl *BDecl,
  214. unsigned &ParentScope) {
  215. // exclude captured __block variables; there's no destructor
  216. // associated with the block literal for them.
  217. if (D->hasAttr<BlocksAttr>())
  218. return;
  219. QualType T = D->getType();
  220. QualType::DestructionKind destructKind = T.isDestructedType();
  221. if (destructKind != QualType::DK_none) {
  222. std::pair<unsigned,unsigned> Diags;
  223. switch (destructKind) {
  224. case QualType::DK_cxx_destructor:
  225. Diags = ScopePair(diag::note_enters_block_captures_cxx_obj,
  226. diag::note_exits_block_captures_cxx_obj);
  227. break;
  228. case QualType::DK_objc_strong_lifetime:
  229. Diags = ScopePair(diag::note_enters_block_captures_strong,
  230. diag::note_exits_block_captures_strong);
  231. break;
  232. case QualType::DK_objc_weak_lifetime:
  233. Diags = ScopePair(diag::note_enters_block_captures_weak,
  234. diag::note_exits_block_captures_weak);
  235. break;
  236. case QualType::DK_nontrivial_c_struct:
  237. Diags = ScopePair(diag::note_enters_block_captures_non_trivial_c_struct,
  238. diag::note_exits_block_captures_non_trivial_c_struct);
  239. break;
  240. case QualType::DK_none:
  241. llvm_unreachable("non-lifetime captured variable");
  242. }
  243. SourceLocation Loc = D->getLocation();
  244. if (Loc.isInvalid())
  245. Loc = BDecl->getLocation();
  246. Scopes.push_back(GotoScope(ParentScope,
  247. Diags.first, Diags.second, Loc));
  248. ParentScope = Scopes.size()-1;
  249. }
  250. }
  251. /// Build scope information for compound literals of C struct types that are
  252. /// non-trivial to destruct.
  253. void JumpScopeChecker::BuildScopeInformation(CompoundLiteralExpr *CLE,
  254. unsigned &ParentScope) {
  255. unsigned InDiag = diag::note_enters_compound_literal_scope;
  256. unsigned OutDiag = diag::note_exits_compound_literal_scope;
  257. Scopes.push_back(GotoScope(ParentScope, InDiag, OutDiag, CLE->getExprLoc()));
  258. ParentScope = Scopes.size() - 1;
  259. }
  260. /// BuildScopeInformation - The statements from CI to CE are known to form a
  261. /// coherent VLA scope with a specified parent node. Walk through the
  262. /// statements, adding any labels or gotos to LabelAndGotoScopes and recursively
  263. /// walking the AST as needed.
  264. void JumpScopeChecker::BuildScopeInformation(Stmt *S,
  265. unsigned &origParentScope) {
  266. // If this is a statement, rather than an expression, scopes within it don't
  267. // propagate out into the enclosing scope. Otherwise we have to worry
  268. // about block literals, which have the lifetime of their enclosing statement.
  269. unsigned independentParentScope = origParentScope;
  270. unsigned &ParentScope = ((isa<Expr>(S) && !isa<StmtExpr>(S))
  271. ? origParentScope : independentParentScope);
  272. unsigned StmtsToSkip = 0u;
  273. // If we found a label, remember that it is in ParentScope scope.
  274. switch (S->getStmtClass()) {
  275. case Stmt::AddrLabelExprClass:
  276. IndirectJumpTargets.push_back(cast<AddrLabelExpr>(S)->getLabel());
  277. break;
  278. case Stmt::ObjCForCollectionStmtClass: {
  279. auto *CS = cast<ObjCForCollectionStmt>(S);
  280. unsigned Diag = diag::note_protected_by_objc_fast_enumeration;
  281. unsigned NewParentScope = Scopes.size();
  282. Scopes.push_back(GotoScope(ParentScope, Diag, 0, S->getBeginLoc()));
  283. BuildScopeInformation(CS->getBody(), NewParentScope);
  284. return;
  285. }
  286. case Stmt::IndirectGotoStmtClass:
  287. // "goto *&&lbl;" is a special case which we treat as equivalent
  288. // to a normal goto. In addition, we don't calculate scope in the
  289. // operand (to avoid recording the address-of-label use), which
  290. // works only because of the restricted set of expressions which
  291. // we detect as constant targets.
  292. if (cast<IndirectGotoStmt>(S)->getConstantTarget()) {
  293. LabelAndGotoScopes[S] = ParentScope;
  294. Jumps.push_back(S);
  295. return;
  296. }
  297. LabelAndGotoScopes[S] = ParentScope;
  298. IndirectJumps.push_back(S);
  299. break;
  300. case Stmt::SwitchStmtClass:
  301. // Evaluate the C++17 init stmt and condition variable
  302. // before entering the scope of the switch statement.
  303. if (Stmt *Init = cast<SwitchStmt>(S)->getInit()) {
  304. BuildScopeInformation(Init, ParentScope);
  305. ++StmtsToSkip;
  306. }
  307. if (VarDecl *Var = cast<SwitchStmt>(S)->getConditionVariable()) {
  308. BuildScopeInformation(Var, ParentScope);
  309. ++StmtsToSkip;
  310. }
  311. LLVM_FALLTHROUGH;
  312. case Stmt::GotoStmtClass:
  313. // Remember both what scope a goto is in as well as the fact that we have
  314. // it. This makes the second scan not have to walk the AST again.
  315. LabelAndGotoScopes[S] = ParentScope;
  316. Jumps.push_back(S);
  317. break;
  318. case Stmt::GCCAsmStmtClass:
  319. if (auto *GS = dyn_cast<GCCAsmStmt>(S))
  320. if (GS->isAsmGoto()) {
  321. // Remember both what scope a goto is in as well as the fact that we
  322. // have it. This makes the second scan not have to walk the AST again.
  323. LabelAndGotoScopes[S] = ParentScope;
  324. AsmJumps.push_back(GS);
  325. for (auto *E : GS->labels())
  326. AsmJumpTargets.push_back(E->getLabel());
  327. }
  328. break;
  329. case Stmt::IfStmtClass: {
  330. IfStmt *IS = cast<IfStmt>(S);
  331. if (!(IS->isConstexpr() || IS->isConsteval() ||
  332. IS->isObjCAvailabilityCheck()))
  333. break;
  334. unsigned Diag = diag::note_protected_by_if_available;
  335. if (IS->isConstexpr())
  336. Diag = diag::note_protected_by_constexpr_if;
  337. else if (IS->isConsteval())
  338. Diag = diag::note_protected_by_consteval_if;
  339. if (VarDecl *Var = IS->getConditionVariable())
  340. BuildScopeInformation(Var, ParentScope);
  341. // Cannot jump into the middle of the condition.
  342. unsigned NewParentScope = Scopes.size();
  343. Scopes.push_back(GotoScope(ParentScope, Diag, 0, IS->getBeginLoc()));
  344. if (!IS->isConsteval())
  345. BuildScopeInformation(IS->getCond(), NewParentScope);
  346. // Jumps into either arm of an 'if constexpr' are not allowed.
  347. NewParentScope = Scopes.size();
  348. Scopes.push_back(GotoScope(ParentScope, Diag, 0, IS->getBeginLoc()));
  349. BuildScopeInformation(IS->getThen(), NewParentScope);
  350. if (Stmt *Else = IS->getElse()) {
  351. NewParentScope = Scopes.size();
  352. Scopes.push_back(GotoScope(ParentScope, Diag, 0, IS->getBeginLoc()));
  353. BuildScopeInformation(Else, NewParentScope);
  354. }
  355. return;
  356. }
  357. case Stmt::CXXTryStmtClass: {
  358. CXXTryStmt *TS = cast<CXXTryStmt>(S);
  359. {
  360. unsigned NewParentScope = Scopes.size();
  361. Scopes.push_back(GotoScope(ParentScope,
  362. diag::note_protected_by_cxx_try,
  363. diag::note_exits_cxx_try,
  364. TS->getSourceRange().getBegin()));
  365. if (Stmt *TryBlock = TS->getTryBlock())
  366. BuildScopeInformation(TryBlock, NewParentScope);
  367. }
  368. // Jump from the catch into the try is not allowed either.
  369. for (unsigned I = 0, E = TS->getNumHandlers(); I != E; ++I) {
  370. CXXCatchStmt *CS = TS->getHandler(I);
  371. unsigned NewParentScope = Scopes.size();
  372. Scopes.push_back(GotoScope(ParentScope,
  373. diag::note_protected_by_cxx_catch,
  374. diag::note_exits_cxx_catch,
  375. CS->getSourceRange().getBegin()));
  376. BuildScopeInformation(CS->getHandlerBlock(), NewParentScope);
  377. }
  378. return;
  379. }
  380. case Stmt::SEHTryStmtClass: {
  381. SEHTryStmt *TS = cast<SEHTryStmt>(S);
  382. {
  383. unsigned NewParentScope = Scopes.size();
  384. Scopes.push_back(GotoScope(ParentScope,
  385. diag::note_protected_by_seh_try,
  386. diag::note_exits_seh_try,
  387. TS->getSourceRange().getBegin()));
  388. if (Stmt *TryBlock = TS->getTryBlock())
  389. BuildScopeInformation(TryBlock, NewParentScope);
  390. }
  391. // Jump from __except or __finally into the __try are not allowed either.
  392. if (SEHExceptStmt *Except = TS->getExceptHandler()) {
  393. unsigned NewParentScope = Scopes.size();
  394. Scopes.push_back(GotoScope(ParentScope,
  395. diag::note_protected_by_seh_except,
  396. diag::note_exits_seh_except,
  397. Except->getSourceRange().getBegin()));
  398. BuildScopeInformation(Except->getBlock(), NewParentScope);
  399. } else if (SEHFinallyStmt *Finally = TS->getFinallyHandler()) {
  400. unsigned NewParentScope = Scopes.size();
  401. Scopes.push_back(GotoScope(ParentScope,
  402. diag::note_protected_by_seh_finally,
  403. diag::note_exits_seh_finally,
  404. Finally->getSourceRange().getBegin()));
  405. BuildScopeInformation(Finally->getBlock(), NewParentScope);
  406. }
  407. return;
  408. }
  409. case Stmt::DeclStmtClass: {
  410. // If this is a declstmt with a VLA definition, it defines a scope from here
  411. // to the end of the containing context.
  412. DeclStmt *DS = cast<DeclStmt>(S);
  413. // The decl statement creates a scope if any of the decls in it are VLAs
  414. // or have the cleanup attribute.
  415. for (auto *I : DS->decls())
  416. BuildScopeInformation(I, origParentScope);
  417. return;
  418. }
  419. case Stmt::ObjCAtTryStmtClass: {
  420. // Disallow jumps into any part of an @try statement by pushing a scope and
  421. // walking all sub-stmts in that scope.
  422. ObjCAtTryStmt *AT = cast<ObjCAtTryStmt>(S);
  423. // Recursively walk the AST for the @try part.
  424. {
  425. unsigned NewParentScope = Scopes.size();
  426. Scopes.push_back(GotoScope(ParentScope,
  427. diag::note_protected_by_objc_try,
  428. diag::note_exits_objc_try,
  429. AT->getAtTryLoc()));
  430. if (Stmt *TryPart = AT->getTryBody())
  431. BuildScopeInformation(TryPart, NewParentScope);
  432. }
  433. // Jump from the catch to the finally or try is not valid.
  434. for (ObjCAtCatchStmt *AC : AT->catch_stmts()) {
  435. unsigned NewParentScope = Scopes.size();
  436. Scopes.push_back(GotoScope(ParentScope,
  437. diag::note_protected_by_objc_catch,
  438. diag::note_exits_objc_catch,
  439. AC->getAtCatchLoc()));
  440. // @catches are nested and it isn't
  441. BuildScopeInformation(AC->getCatchBody(), NewParentScope);
  442. }
  443. // Jump from the finally to the try or catch is not valid.
  444. if (ObjCAtFinallyStmt *AF = AT->getFinallyStmt()) {
  445. unsigned NewParentScope = Scopes.size();
  446. Scopes.push_back(GotoScope(ParentScope,
  447. diag::note_protected_by_objc_finally,
  448. diag::note_exits_objc_finally,
  449. AF->getAtFinallyLoc()));
  450. BuildScopeInformation(AF, NewParentScope);
  451. }
  452. return;
  453. }
  454. case Stmt::ObjCAtSynchronizedStmtClass: {
  455. // Disallow jumps into the protected statement of an @synchronized, but
  456. // allow jumps into the object expression it protects.
  457. ObjCAtSynchronizedStmt *AS = cast<ObjCAtSynchronizedStmt>(S);
  458. // Recursively walk the AST for the @synchronized object expr, it is
  459. // evaluated in the normal scope.
  460. BuildScopeInformation(AS->getSynchExpr(), ParentScope);
  461. // Recursively walk the AST for the @synchronized part, protected by a new
  462. // scope.
  463. unsigned NewParentScope = Scopes.size();
  464. Scopes.push_back(GotoScope(ParentScope,
  465. diag::note_protected_by_objc_synchronized,
  466. diag::note_exits_objc_synchronized,
  467. AS->getAtSynchronizedLoc()));
  468. BuildScopeInformation(AS->getSynchBody(), NewParentScope);
  469. return;
  470. }
  471. case Stmt::ObjCAutoreleasePoolStmtClass: {
  472. // Disallow jumps into the protected statement of an @autoreleasepool.
  473. ObjCAutoreleasePoolStmt *AS = cast<ObjCAutoreleasePoolStmt>(S);
  474. // Recursively walk the AST for the @autoreleasepool part, protected by a
  475. // new scope.
  476. unsigned NewParentScope = Scopes.size();
  477. Scopes.push_back(GotoScope(ParentScope,
  478. diag::note_protected_by_objc_autoreleasepool,
  479. diag::note_exits_objc_autoreleasepool,
  480. AS->getAtLoc()));
  481. BuildScopeInformation(AS->getSubStmt(), NewParentScope);
  482. return;
  483. }
  484. case Stmt::ExprWithCleanupsClass: {
  485. // Disallow jumps past full-expressions that use blocks with
  486. // non-trivial cleanups of their captures. This is theoretically
  487. // implementable but a lot of work which we haven't felt up to doing.
  488. ExprWithCleanups *EWC = cast<ExprWithCleanups>(S);
  489. for (unsigned i = 0, e = EWC->getNumObjects(); i != e; ++i) {
  490. if (auto *BDecl = EWC->getObject(i).dyn_cast<BlockDecl *>())
  491. for (const auto &CI : BDecl->captures()) {
  492. VarDecl *variable = CI.getVariable();
  493. BuildScopeInformation(variable, BDecl, origParentScope);
  494. }
  495. else if (auto *CLE = EWC->getObject(i).dyn_cast<CompoundLiteralExpr *>())
  496. BuildScopeInformation(CLE, origParentScope);
  497. else
  498. llvm_unreachable("unexpected cleanup object type");
  499. }
  500. break;
  501. }
  502. case Stmt::MaterializeTemporaryExprClass: {
  503. // Disallow jumps out of scopes containing temporaries lifetime-extended to
  504. // automatic storage duration.
  505. MaterializeTemporaryExpr *MTE = cast<MaterializeTemporaryExpr>(S);
  506. if (MTE->getStorageDuration() == SD_Automatic) {
  507. SmallVector<const Expr *, 4> CommaLHS;
  508. SmallVector<SubobjectAdjustment, 4> Adjustments;
  509. const Expr *ExtendedObject =
  510. MTE->getSubExpr()->skipRValueSubobjectAdjustments(CommaLHS,
  511. Adjustments);
  512. if (ExtendedObject->getType().isDestructedType()) {
  513. Scopes.push_back(GotoScope(ParentScope, 0,
  514. diag::note_exits_temporary_dtor,
  515. ExtendedObject->getExprLoc()));
  516. origParentScope = Scopes.size()-1;
  517. }
  518. }
  519. break;
  520. }
  521. case Stmt::CaseStmtClass:
  522. case Stmt::DefaultStmtClass:
  523. case Stmt::LabelStmtClass:
  524. LabelAndGotoScopes[S] = ParentScope;
  525. break;
  526. case Stmt::AttributedStmtClass: {
  527. AttributedStmt *AS = cast<AttributedStmt>(S);
  528. if (GetMustTailAttr(AS)) {
  529. LabelAndGotoScopes[AS] = ParentScope;
  530. MustTailStmts.push_back(AS);
  531. }
  532. break;
  533. }
  534. default:
  535. if (auto *ED = dyn_cast<OMPExecutableDirective>(S)) {
  536. if (!ED->isStandaloneDirective()) {
  537. unsigned NewParentScope = Scopes.size();
  538. Scopes.emplace_back(ParentScope,
  539. diag::note_omp_protected_structured_block,
  540. diag::note_omp_exits_structured_block,
  541. ED->getStructuredBlock()->getBeginLoc());
  542. BuildScopeInformation(ED->getStructuredBlock(), NewParentScope);
  543. return;
  544. }
  545. }
  546. break;
  547. }
  548. for (Stmt *SubStmt : S->children()) {
  549. if (!SubStmt)
  550. continue;
  551. if (StmtsToSkip) {
  552. --StmtsToSkip;
  553. continue;
  554. }
  555. // Cases, labels, and defaults aren't "scope parents". It's also
  556. // important to handle these iteratively instead of recursively in
  557. // order to avoid blowing out the stack.
  558. while (true) {
  559. Stmt *Next;
  560. if (SwitchCase *SC = dyn_cast<SwitchCase>(SubStmt))
  561. Next = SC->getSubStmt();
  562. else if (LabelStmt *LS = dyn_cast<LabelStmt>(SubStmt))
  563. Next = LS->getSubStmt();
  564. else
  565. break;
  566. LabelAndGotoScopes[SubStmt] = ParentScope;
  567. SubStmt = Next;
  568. }
  569. // Recursively walk the AST.
  570. BuildScopeInformation(SubStmt, ParentScope);
  571. }
  572. }
  573. /// VerifyJumps - Verify each element of the Jumps array to see if they are
  574. /// valid, emitting diagnostics if not.
  575. void JumpScopeChecker::VerifyJumps() {
  576. while (!Jumps.empty()) {
  577. Stmt *Jump = Jumps.pop_back_val();
  578. // With a goto,
  579. if (GotoStmt *GS = dyn_cast<GotoStmt>(Jump)) {
  580. // The label may not have a statement if it's coming from inline MS ASM.
  581. if (GS->getLabel()->getStmt()) {
  582. CheckJump(GS, GS->getLabel()->getStmt(), GS->getGotoLoc(),
  583. diag::err_goto_into_protected_scope,
  584. diag::ext_goto_into_protected_scope,
  585. diag::warn_cxx98_compat_goto_into_protected_scope);
  586. }
  587. CheckGotoStmt(GS);
  588. continue;
  589. }
  590. // We only get indirect gotos here when they have a constant target.
  591. if (IndirectGotoStmt *IGS = dyn_cast<IndirectGotoStmt>(Jump)) {
  592. LabelDecl *Target = IGS->getConstantTarget();
  593. CheckJump(IGS, Target->getStmt(), IGS->getGotoLoc(),
  594. diag::err_goto_into_protected_scope,
  595. diag::ext_goto_into_protected_scope,
  596. diag::warn_cxx98_compat_goto_into_protected_scope);
  597. continue;
  598. }
  599. SwitchStmt *SS = cast<SwitchStmt>(Jump);
  600. for (SwitchCase *SC = SS->getSwitchCaseList(); SC;
  601. SC = SC->getNextSwitchCase()) {
  602. if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(SC)))
  603. continue;
  604. SourceLocation Loc;
  605. if (CaseStmt *CS = dyn_cast<CaseStmt>(SC))
  606. Loc = CS->getBeginLoc();
  607. else if (DefaultStmt *DS = dyn_cast<DefaultStmt>(SC))
  608. Loc = DS->getBeginLoc();
  609. else
  610. Loc = SC->getBeginLoc();
  611. CheckJump(SS, SC, Loc, diag::err_switch_into_protected_scope, 0,
  612. diag::warn_cxx98_compat_switch_into_protected_scope);
  613. }
  614. }
  615. }
  616. /// VerifyIndirectOrAsmJumps - Verify whether any possible indirect goto or
  617. /// asm goto jump might cross a protection boundary. Unlike direct jumps,
  618. /// indirect or asm goto jumps count cleanups as protection boundaries:
  619. /// since there's no way to know where the jump is going, we can't implicitly
  620. /// run the right cleanups the way we can with direct jumps.
  621. /// Thus, an indirect/asm jump is "trivial" if it bypasses no
  622. /// initializations and no teardowns. More formally, an indirect/asm jump
  623. /// from A to B is trivial if the path out from A to DCA(A,B) is
  624. /// trivial and the path in from DCA(A,B) to B is trivial, where
  625. /// DCA(A,B) is the deepest common ancestor of A and B.
  626. /// Jump-triviality is transitive but asymmetric.
  627. ///
  628. /// A path in is trivial if none of the entered scopes have an InDiag.
  629. /// A path out is trivial is none of the exited scopes have an OutDiag.
  630. ///
  631. /// Under these definitions, this function checks that the indirect
  632. /// jump between A and B is trivial for every indirect goto statement A
  633. /// and every label B whose address was taken in the function.
  634. void JumpScopeChecker::VerifyIndirectOrAsmJumps(bool IsAsmGoto) {
  635. SmallVector<Stmt*, 4> GotoJumps = IsAsmGoto ? AsmJumps : IndirectJumps;
  636. if (GotoJumps.empty())
  637. return;
  638. SmallVector<LabelDecl *, 4> JumpTargets =
  639. IsAsmGoto ? AsmJumpTargets : IndirectJumpTargets;
  640. // If there aren't any address-of-label expressions in this function,
  641. // complain about the first indirect goto.
  642. if (JumpTargets.empty()) {
  643. assert(!IsAsmGoto &&"only indirect goto can get here");
  644. S.Diag(GotoJumps[0]->getBeginLoc(),
  645. diag::err_indirect_goto_without_addrlabel);
  646. return;
  647. }
  648. // Collect a single representative of every scope containing an
  649. // indirect or asm goto. For most code bases, this substantially cuts
  650. // down on the number of jump sites we'll have to consider later.
  651. typedef std::pair<unsigned, Stmt*> JumpScope;
  652. SmallVector<JumpScope, 32> JumpScopes;
  653. {
  654. llvm::DenseMap<unsigned, Stmt*> JumpScopesMap;
  655. for (SmallVectorImpl<Stmt *>::iterator I = GotoJumps.begin(),
  656. E = GotoJumps.end();
  657. I != E; ++I) {
  658. Stmt *IG = *I;
  659. if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(IG)))
  660. continue;
  661. unsigned IGScope = LabelAndGotoScopes[IG];
  662. Stmt *&Entry = JumpScopesMap[IGScope];
  663. if (!Entry) Entry = IG;
  664. }
  665. JumpScopes.reserve(JumpScopesMap.size());
  666. for (llvm::DenseMap<unsigned, Stmt *>::iterator I = JumpScopesMap.begin(),
  667. E = JumpScopesMap.end();
  668. I != E; ++I)
  669. JumpScopes.push_back(*I);
  670. }
  671. // Collect a single representative of every scope containing a
  672. // label whose address was taken somewhere in the function.
  673. // For most code bases, there will be only one such scope.
  674. llvm::DenseMap<unsigned, LabelDecl*> TargetScopes;
  675. for (SmallVectorImpl<LabelDecl *>::iterator I = JumpTargets.begin(),
  676. E = JumpTargets.end();
  677. I != E; ++I) {
  678. LabelDecl *TheLabel = *I;
  679. if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(TheLabel->getStmt())))
  680. continue;
  681. unsigned LabelScope = LabelAndGotoScopes[TheLabel->getStmt()];
  682. LabelDecl *&Target = TargetScopes[LabelScope];
  683. if (!Target) Target = TheLabel;
  684. }
  685. // For each target scope, make sure it's trivially reachable from
  686. // every scope containing a jump site.
  687. //
  688. // A path between scopes always consists of exitting zero or more
  689. // scopes, then entering zero or more scopes. We build a set of
  690. // of scopes S from which the target scope can be trivially
  691. // entered, then verify that every jump scope can be trivially
  692. // exitted to reach a scope in S.
  693. llvm::BitVector Reachable(Scopes.size(), false);
  694. for (llvm::DenseMap<unsigned,LabelDecl*>::iterator
  695. TI = TargetScopes.begin(), TE = TargetScopes.end(); TI != TE; ++TI) {
  696. unsigned TargetScope = TI->first;
  697. LabelDecl *TargetLabel = TI->second;
  698. Reachable.reset();
  699. // Mark all the enclosing scopes from which you can safely jump
  700. // into the target scope. 'Min' will end up being the index of
  701. // the shallowest such scope.
  702. unsigned Min = TargetScope;
  703. while (true) {
  704. Reachable.set(Min);
  705. // Don't go beyond the outermost scope.
  706. if (Min == 0) break;
  707. // Stop if we can't trivially enter the current scope.
  708. if (Scopes[Min].InDiag) break;
  709. Min = Scopes[Min].ParentScope;
  710. }
  711. // Walk through all the jump sites, checking that they can trivially
  712. // reach this label scope.
  713. for (SmallVectorImpl<JumpScope>::iterator
  714. I = JumpScopes.begin(), E = JumpScopes.end(); I != E; ++I) {
  715. unsigned Scope = I->first;
  716. // Walk out the "scope chain" for this scope, looking for a scope
  717. // we've marked reachable. For well-formed code this amortizes
  718. // to O(JumpScopes.size() / Scopes.size()): we only iterate
  719. // when we see something unmarked, and in well-formed code we
  720. // mark everything we iterate past.
  721. bool IsReachable = false;
  722. while (true) {
  723. if (Reachable.test(Scope)) {
  724. // If we find something reachable, mark all the scopes we just
  725. // walked through as reachable.
  726. for (unsigned S = I->first; S != Scope; S = Scopes[S].ParentScope)
  727. Reachable.set(S);
  728. IsReachable = true;
  729. break;
  730. }
  731. // Don't walk out if we've reached the top-level scope or we've
  732. // gotten shallower than the shallowest reachable scope.
  733. if (Scope == 0 || Scope < Min) break;
  734. // Don't walk out through an out-diagnostic.
  735. if (Scopes[Scope].OutDiag) break;
  736. Scope = Scopes[Scope].ParentScope;
  737. }
  738. // Only diagnose if we didn't find something.
  739. if (IsReachable) continue;
  740. DiagnoseIndirectOrAsmJump(I->second, I->first, TargetLabel, TargetScope);
  741. }
  742. }
  743. }
  744. /// Return true if a particular error+note combination must be downgraded to a
  745. /// warning in Microsoft mode.
  746. static bool IsMicrosoftJumpWarning(unsigned JumpDiag, unsigned InDiagNote) {
  747. return (JumpDiag == diag::err_goto_into_protected_scope &&
  748. (InDiagNote == diag::note_protected_by_variable_init ||
  749. InDiagNote == diag::note_protected_by_variable_nontriv_destructor));
  750. }
  751. /// Return true if a particular note should be downgraded to a compatibility
  752. /// warning in C++11 mode.
  753. static bool IsCXX98CompatWarning(Sema &S, unsigned InDiagNote) {
  754. return S.getLangOpts().CPlusPlus11 &&
  755. InDiagNote == diag::note_protected_by_variable_non_pod;
  756. }
  757. /// Produce primary diagnostic for an indirect jump statement.
  758. static void DiagnoseIndirectOrAsmJumpStmt(Sema &S, Stmt *Jump,
  759. LabelDecl *Target, bool &Diagnosed) {
  760. if (Diagnosed)
  761. return;
  762. bool IsAsmGoto = isa<GCCAsmStmt>(Jump);
  763. S.Diag(Jump->getBeginLoc(), diag::err_indirect_goto_in_protected_scope)
  764. << IsAsmGoto;
  765. S.Diag(Target->getStmt()->getIdentLoc(), diag::note_indirect_goto_target)
  766. << IsAsmGoto;
  767. Diagnosed = true;
  768. }
  769. /// Produce note diagnostics for a jump into a protected scope.
  770. void JumpScopeChecker::NoteJumpIntoScopes(ArrayRef<unsigned> ToScopes) {
  771. if (CHECK_PERMISSIVE(ToScopes.empty()))
  772. return;
  773. for (unsigned I = 0, E = ToScopes.size(); I != E; ++I)
  774. if (Scopes[ToScopes[I]].InDiag)
  775. S.Diag(Scopes[ToScopes[I]].Loc, Scopes[ToScopes[I]].InDiag);
  776. }
  777. /// Diagnose an indirect jump which is known to cross scopes.
  778. void JumpScopeChecker::DiagnoseIndirectOrAsmJump(Stmt *Jump, unsigned JumpScope,
  779. LabelDecl *Target,
  780. unsigned TargetScope) {
  781. if (CHECK_PERMISSIVE(JumpScope == TargetScope))
  782. return;
  783. unsigned Common = GetDeepestCommonScope(JumpScope, TargetScope);
  784. bool Diagnosed = false;
  785. // Walk out the scope chain until we reach the common ancestor.
  786. for (unsigned I = JumpScope; I != Common; I = Scopes[I].ParentScope)
  787. if (Scopes[I].OutDiag) {
  788. DiagnoseIndirectOrAsmJumpStmt(S, Jump, Target, Diagnosed);
  789. S.Diag(Scopes[I].Loc, Scopes[I].OutDiag);
  790. }
  791. SmallVector<unsigned, 10> ToScopesCXX98Compat;
  792. // Now walk into the scopes containing the label whose address was taken.
  793. for (unsigned I = TargetScope; I != Common; I = Scopes[I].ParentScope)
  794. if (IsCXX98CompatWarning(S, Scopes[I].InDiag))
  795. ToScopesCXX98Compat.push_back(I);
  796. else if (Scopes[I].InDiag) {
  797. DiagnoseIndirectOrAsmJumpStmt(S, Jump, Target, Diagnosed);
  798. S.Diag(Scopes[I].Loc, Scopes[I].InDiag);
  799. }
  800. // Diagnose this jump if it would be ill-formed in C++98.
  801. if (!Diagnosed && !ToScopesCXX98Compat.empty()) {
  802. bool IsAsmGoto = isa<GCCAsmStmt>(Jump);
  803. S.Diag(Jump->getBeginLoc(),
  804. diag::warn_cxx98_compat_indirect_goto_in_protected_scope)
  805. << IsAsmGoto;
  806. S.Diag(Target->getStmt()->getIdentLoc(), diag::note_indirect_goto_target)
  807. << IsAsmGoto;
  808. NoteJumpIntoScopes(ToScopesCXX98Compat);
  809. }
  810. }
  811. /// CheckJump - Validate that the specified jump statement is valid: that it is
  812. /// jumping within or out of its current scope, not into a deeper one.
  813. void JumpScopeChecker::CheckJump(Stmt *From, Stmt *To, SourceLocation DiagLoc,
  814. unsigned JumpDiagError, unsigned JumpDiagWarning,
  815. unsigned JumpDiagCXX98Compat) {
  816. if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(From)))
  817. return;
  818. if (CHECK_PERMISSIVE(!LabelAndGotoScopes.count(To)))
  819. return;
  820. unsigned FromScope = LabelAndGotoScopes[From];
  821. unsigned ToScope = LabelAndGotoScopes[To];
  822. // Common case: exactly the same scope, which is fine.
  823. if (FromScope == ToScope) return;
  824. // Warn on gotos out of __finally blocks.
  825. if (isa<GotoStmt>(From) || isa<IndirectGotoStmt>(From)) {
  826. // If FromScope > ToScope, FromScope is more nested and the jump goes to a
  827. // less nested scope. Check if it crosses a __finally along the way.
  828. for (unsigned I = FromScope; I > ToScope; I = Scopes[I].ParentScope) {
  829. if (Scopes[I].InDiag == diag::note_protected_by_seh_finally) {
  830. S.Diag(From->getBeginLoc(), diag::warn_jump_out_of_seh_finally);
  831. break;
  832. }
  833. if (Scopes[I].InDiag == diag::note_omp_protected_structured_block) {
  834. S.Diag(From->getBeginLoc(), diag::err_goto_into_protected_scope);
  835. S.Diag(To->getBeginLoc(), diag::note_omp_exits_structured_block);
  836. break;
  837. }
  838. }
  839. }
  840. unsigned CommonScope = GetDeepestCommonScope(FromScope, ToScope);
  841. // It's okay to jump out from a nested scope.
  842. if (CommonScope == ToScope) return;
  843. // Pull out (and reverse) any scopes we might need to diagnose skipping.
  844. SmallVector<unsigned, 10> ToScopesCXX98Compat;
  845. SmallVector<unsigned, 10> ToScopesError;
  846. SmallVector<unsigned, 10> ToScopesWarning;
  847. for (unsigned I = ToScope; I != CommonScope; I = Scopes[I].ParentScope) {
  848. if (S.getLangOpts().MSVCCompat && JumpDiagWarning != 0 &&
  849. IsMicrosoftJumpWarning(JumpDiagError, Scopes[I].InDiag))
  850. ToScopesWarning.push_back(I);
  851. else if (IsCXX98CompatWarning(S, Scopes[I].InDiag))
  852. ToScopesCXX98Compat.push_back(I);
  853. else if (Scopes[I].InDiag)
  854. ToScopesError.push_back(I);
  855. }
  856. // Handle warnings.
  857. if (!ToScopesWarning.empty()) {
  858. S.Diag(DiagLoc, JumpDiagWarning);
  859. NoteJumpIntoScopes(ToScopesWarning);
  860. assert(isa<LabelStmt>(To));
  861. LabelStmt *Label = cast<LabelStmt>(To);
  862. Label->setSideEntry(true);
  863. }
  864. // Handle errors.
  865. if (!ToScopesError.empty()) {
  866. S.Diag(DiagLoc, JumpDiagError);
  867. NoteJumpIntoScopes(ToScopesError);
  868. }
  869. // Handle -Wc++98-compat warnings if the jump is well-formed.
  870. if (ToScopesError.empty() && !ToScopesCXX98Compat.empty()) {
  871. S.Diag(DiagLoc, JumpDiagCXX98Compat);
  872. NoteJumpIntoScopes(ToScopesCXX98Compat);
  873. }
  874. }
  875. void JumpScopeChecker::CheckGotoStmt(GotoStmt *GS) {
  876. if (GS->getLabel()->isMSAsmLabel()) {
  877. S.Diag(GS->getGotoLoc(), diag::err_goto_ms_asm_label)
  878. << GS->getLabel()->getIdentifier();
  879. S.Diag(GS->getLabel()->getLocation(), diag::note_goto_ms_asm_label)
  880. << GS->getLabel()->getIdentifier();
  881. }
  882. }
  883. void JumpScopeChecker::VerifyMustTailStmts() {
  884. for (AttributedStmt *AS : MustTailStmts) {
  885. for (unsigned I = LabelAndGotoScopes[AS]; I; I = Scopes[I].ParentScope) {
  886. if (Scopes[I].OutDiag) {
  887. S.Diag(AS->getBeginLoc(), diag::err_musttail_scope);
  888. S.Diag(Scopes[I].Loc, Scopes[I].OutDiag);
  889. }
  890. }
  891. }
  892. }
  893. const Attr *JumpScopeChecker::GetMustTailAttr(AttributedStmt *AS) {
  894. ArrayRef<const Attr *> Attrs = AS->getAttrs();
  895. const auto *Iter =
  896. llvm::find_if(Attrs, [](const Attr *A) { return isa<MustTailAttr>(A); });
  897. return Iter != Attrs.end() ? *Iter : nullptr;
  898. }
  899. void Sema::DiagnoseInvalidJumps(Stmt *Body) {
  900. (void)JumpScopeChecker(Body, *this);
  901. }