CodeGenPGO.cpp 38 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125
  1. //===--- CodeGenPGO.cpp - PGO Instrumentation for LLVM CodeGen --*- C++ -*-===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // Instrumentation-based profile-guided optimization
  10. //
  11. //===----------------------------------------------------------------------===//
  12. #include "CodeGenPGO.h"
  13. #include "CodeGenFunction.h"
  14. #include "CoverageMappingGen.h"
  15. #include "clang/AST/RecursiveASTVisitor.h"
  16. #include "clang/AST/StmtVisitor.h"
  17. #include "llvm/IR/Intrinsics.h"
  18. #include "llvm/IR/MDBuilder.h"
  19. #include "llvm/Support/CommandLine.h"
  20. #include "llvm/Support/Endian.h"
  21. #include "llvm/Support/FileSystem.h"
  22. #include "llvm/Support/MD5.h"
  23. #include <optional>
  24. static llvm::cl::opt<bool>
  25. EnableValueProfiling("enable-value-profiling",
  26. llvm::cl::desc("Enable value profiling"),
  27. llvm::cl::Hidden, llvm::cl::init(false));
  28. using namespace clang;
  29. using namespace CodeGen;
  30. void CodeGenPGO::setFuncName(StringRef Name,
  31. llvm::GlobalValue::LinkageTypes Linkage) {
  32. llvm::IndexedInstrProfReader *PGOReader = CGM.getPGOReader();
  33. FuncName = llvm::getPGOFuncName(
  34. Name, Linkage, CGM.getCodeGenOpts().MainFileName,
  35. PGOReader ? PGOReader->getVersion() : llvm::IndexedInstrProf::Version);
  36. // If we're generating a profile, create a variable for the name.
  37. if (CGM.getCodeGenOpts().hasProfileClangInstr())
  38. FuncNameVar = llvm::createPGOFuncNameVar(CGM.getModule(), Linkage, FuncName);
  39. }
  40. void CodeGenPGO::setFuncName(llvm::Function *Fn) {
  41. setFuncName(Fn->getName(), Fn->getLinkage());
  42. // Create PGOFuncName meta data.
  43. llvm::createPGOFuncNameMetadata(*Fn, FuncName);
  44. }
  45. /// The version of the PGO hash algorithm.
  46. enum PGOHashVersion : unsigned {
  47. PGO_HASH_V1,
  48. PGO_HASH_V2,
  49. PGO_HASH_V3,
  50. // Keep this set to the latest hash version.
  51. PGO_HASH_LATEST = PGO_HASH_V3
  52. };
  53. namespace {
  54. /// Stable hasher for PGO region counters.
  55. ///
  56. /// PGOHash produces a stable hash of a given function's control flow.
  57. ///
  58. /// Changing the output of this hash will invalidate all previously generated
  59. /// profiles -- i.e., don't do it.
  60. ///
  61. /// \note When this hash does eventually change (years?), we still need to
  62. /// support old hashes. We'll need to pull in the version number from the
  63. /// profile data format and use the matching hash function.
  64. class PGOHash {
  65. uint64_t Working;
  66. unsigned Count;
  67. PGOHashVersion HashVersion;
  68. llvm::MD5 MD5;
  69. static const int NumBitsPerType = 6;
  70. static const unsigned NumTypesPerWord = sizeof(uint64_t) * 8 / NumBitsPerType;
  71. static const unsigned TooBig = 1u << NumBitsPerType;
  72. public:
  73. /// Hash values for AST nodes.
  74. ///
  75. /// Distinct values for AST nodes that have region counters attached.
  76. ///
  77. /// These values must be stable. All new members must be added at the end,
  78. /// and no members should be removed. Changing the enumeration value for an
  79. /// AST node will affect the hash of every function that contains that node.
  80. enum HashType : unsigned char {
  81. None = 0,
  82. LabelStmt = 1,
  83. WhileStmt,
  84. DoStmt,
  85. ForStmt,
  86. CXXForRangeStmt,
  87. ObjCForCollectionStmt,
  88. SwitchStmt,
  89. CaseStmt,
  90. DefaultStmt,
  91. IfStmt,
  92. CXXTryStmt,
  93. CXXCatchStmt,
  94. ConditionalOperator,
  95. BinaryOperatorLAnd,
  96. BinaryOperatorLOr,
  97. BinaryConditionalOperator,
  98. // The preceding values are available with PGO_HASH_V1.
  99. EndOfScope,
  100. IfThenBranch,
  101. IfElseBranch,
  102. GotoStmt,
  103. IndirectGotoStmt,
  104. BreakStmt,
  105. ContinueStmt,
  106. ReturnStmt,
  107. ThrowExpr,
  108. UnaryOperatorLNot,
  109. BinaryOperatorLT,
  110. BinaryOperatorGT,
  111. BinaryOperatorLE,
  112. BinaryOperatorGE,
  113. BinaryOperatorEQ,
  114. BinaryOperatorNE,
  115. // The preceding values are available since PGO_HASH_V2.
  116. // Keep this last. It's for the static assert that follows.
  117. LastHashType
  118. };
  119. static_assert(LastHashType <= TooBig, "Too many types in HashType");
  120. PGOHash(PGOHashVersion HashVersion)
  121. : Working(0), Count(0), HashVersion(HashVersion) {}
  122. void combine(HashType Type);
  123. uint64_t finalize();
  124. PGOHashVersion getHashVersion() const { return HashVersion; }
  125. };
  126. const int PGOHash::NumBitsPerType;
  127. const unsigned PGOHash::NumTypesPerWord;
  128. const unsigned PGOHash::TooBig;
  129. /// Get the PGO hash version used in the given indexed profile.
  130. static PGOHashVersion getPGOHashVersion(llvm::IndexedInstrProfReader *PGOReader,
  131. CodeGenModule &CGM) {
  132. if (PGOReader->getVersion() <= 4)
  133. return PGO_HASH_V1;
  134. if (PGOReader->getVersion() <= 5)
  135. return PGO_HASH_V2;
  136. return PGO_HASH_V3;
  137. }
  138. /// A RecursiveASTVisitor that fills a map of statements to PGO counters.
  139. struct MapRegionCounters : public RecursiveASTVisitor<MapRegionCounters> {
  140. using Base = RecursiveASTVisitor<MapRegionCounters>;
  141. /// The next counter value to assign.
  142. unsigned NextCounter;
  143. /// The function hash.
  144. PGOHash Hash;
  145. /// The map of statements to counters.
  146. llvm::DenseMap<const Stmt *, unsigned> &CounterMap;
  147. /// The profile version.
  148. uint64_t ProfileVersion;
  149. MapRegionCounters(PGOHashVersion HashVersion, uint64_t ProfileVersion,
  150. llvm::DenseMap<const Stmt *, unsigned> &CounterMap)
  151. : NextCounter(0), Hash(HashVersion), CounterMap(CounterMap),
  152. ProfileVersion(ProfileVersion) {}
  153. // Blocks and lambdas are handled as separate functions, so we need not
  154. // traverse them in the parent context.
  155. bool TraverseBlockExpr(BlockExpr *BE) { return true; }
  156. bool TraverseLambdaExpr(LambdaExpr *LE) {
  157. // Traverse the captures, but not the body.
  158. for (auto C : zip(LE->captures(), LE->capture_inits()))
  159. TraverseLambdaCapture(LE, &std::get<0>(C), std::get<1>(C));
  160. return true;
  161. }
  162. bool TraverseCapturedStmt(CapturedStmt *CS) { return true; }
  163. bool VisitDecl(const Decl *D) {
  164. switch (D->getKind()) {
  165. default:
  166. break;
  167. case Decl::Function:
  168. case Decl::CXXMethod:
  169. case Decl::CXXConstructor:
  170. case Decl::CXXDestructor:
  171. case Decl::CXXConversion:
  172. case Decl::ObjCMethod:
  173. case Decl::Block:
  174. case Decl::Captured:
  175. CounterMap[D->getBody()] = NextCounter++;
  176. break;
  177. }
  178. return true;
  179. }
  180. /// If \p S gets a fresh counter, update the counter mappings. Return the
  181. /// V1 hash of \p S.
  182. PGOHash::HashType updateCounterMappings(Stmt *S) {
  183. auto Type = getHashType(PGO_HASH_V1, S);
  184. if (Type != PGOHash::None)
  185. CounterMap[S] = NextCounter++;
  186. return Type;
  187. }
  188. /// The RHS of all logical operators gets a fresh counter in order to count
  189. /// how many times the RHS evaluates to true or false, depending on the
  190. /// semantics of the operator. This is only valid for ">= v7" of the profile
  191. /// version so that we facilitate backward compatibility.
  192. bool VisitBinaryOperator(BinaryOperator *S) {
  193. if (ProfileVersion >= llvm::IndexedInstrProf::Version7)
  194. if (S->isLogicalOp() &&
  195. CodeGenFunction::isInstrumentedCondition(S->getRHS()))
  196. CounterMap[S->getRHS()] = NextCounter++;
  197. return Base::VisitBinaryOperator(S);
  198. }
  199. /// Include \p S in the function hash.
  200. bool VisitStmt(Stmt *S) {
  201. auto Type = updateCounterMappings(S);
  202. if (Hash.getHashVersion() != PGO_HASH_V1)
  203. Type = getHashType(Hash.getHashVersion(), S);
  204. if (Type != PGOHash::None)
  205. Hash.combine(Type);
  206. return true;
  207. }
  208. bool TraverseIfStmt(IfStmt *If) {
  209. // If we used the V1 hash, use the default traversal.
  210. if (Hash.getHashVersion() == PGO_HASH_V1)
  211. return Base::TraverseIfStmt(If);
  212. // Otherwise, keep track of which branch we're in while traversing.
  213. VisitStmt(If);
  214. for (Stmt *CS : If->children()) {
  215. if (!CS)
  216. continue;
  217. if (CS == If->getThen())
  218. Hash.combine(PGOHash::IfThenBranch);
  219. else if (CS == If->getElse())
  220. Hash.combine(PGOHash::IfElseBranch);
  221. TraverseStmt(CS);
  222. }
  223. Hash.combine(PGOHash::EndOfScope);
  224. return true;
  225. }
  226. // If the statement type \p N is nestable, and its nesting impacts profile
  227. // stability, define a custom traversal which tracks the end of the statement
  228. // in the hash (provided we're not using the V1 hash).
  229. #define DEFINE_NESTABLE_TRAVERSAL(N) \
  230. bool Traverse##N(N *S) { \
  231. Base::Traverse##N(S); \
  232. if (Hash.getHashVersion() != PGO_HASH_V1) \
  233. Hash.combine(PGOHash::EndOfScope); \
  234. return true; \
  235. }
  236. DEFINE_NESTABLE_TRAVERSAL(WhileStmt)
  237. DEFINE_NESTABLE_TRAVERSAL(DoStmt)
  238. DEFINE_NESTABLE_TRAVERSAL(ForStmt)
  239. DEFINE_NESTABLE_TRAVERSAL(CXXForRangeStmt)
  240. DEFINE_NESTABLE_TRAVERSAL(ObjCForCollectionStmt)
  241. DEFINE_NESTABLE_TRAVERSAL(CXXTryStmt)
  242. DEFINE_NESTABLE_TRAVERSAL(CXXCatchStmt)
  243. /// Get version \p HashVersion of the PGO hash for \p S.
  244. PGOHash::HashType getHashType(PGOHashVersion HashVersion, const Stmt *S) {
  245. switch (S->getStmtClass()) {
  246. default:
  247. break;
  248. case Stmt::LabelStmtClass:
  249. return PGOHash::LabelStmt;
  250. case Stmt::WhileStmtClass:
  251. return PGOHash::WhileStmt;
  252. case Stmt::DoStmtClass:
  253. return PGOHash::DoStmt;
  254. case Stmt::ForStmtClass:
  255. return PGOHash::ForStmt;
  256. case Stmt::CXXForRangeStmtClass:
  257. return PGOHash::CXXForRangeStmt;
  258. case Stmt::ObjCForCollectionStmtClass:
  259. return PGOHash::ObjCForCollectionStmt;
  260. case Stmt::SwitchStmtClass:
  261. return PGOHash::SwitchStmt;
  262. case Stmt::CaseStmtClass:
  263. return PGOHash::CaseStmt;
  264. case Stmt::DefaultStmtClass:
  265. return PGOHash::DefaultStmt;
  266. case Stmt::IfStmtClass:
  267. return PGOHash::IfStmt;
  268. case Stmt::CXXTryStmtClass:
  269. return PGOHash::CXXTryStmt;
  270. case Stmt::CXXCatchStmtClass:
  271. return PGOHash::CXXCatchStmt;
  272. case Stmt::ConditionalOperatorClass:
  273. return PGOHash::ConditionalOperator;
  274. case Stmt::BinaryConditionalOperatorClass:
  275. return PGOHash::BinaryConditionalOperator;
  276. case Stmt::BinaryOperatorClass: {
  277. const BinaryOperator *BO = cast<BinaryOperator>(S);
  278. if (BO->getOpcode() == BO_LAnd)
  279. return PGOHash::BinaryOperatorLAnd;
  280. if (BO->getOpcode() == BO_LOr)
  281. return PGOHash::BinaryOperatorLOr;
  282. if (HashVersion >= PGO_HASH_V2) {
  283. switch (BO->getOpcode()) {
  284. default:
  285. break;
  286. case BO_LT:
  287. return PGOHash::BinaryOperatorLT;
  288. case BO_GT:
  289. return PGOHash::BinaryOperatorGT;
  290. case BO_LE:
  291. return PGOHash::BinaryOperatorLE;
  292. case BO_GE:
  293. return PGOHash::BinaryOperatorGE;
  294. case BO_EQ:
  295. return PGOHash::BinaryOperatorEQ;
  296. case BO_NE:
  297. return PGOHash::BinaryOperatorNE;
  298. }
  299. }
  300. break;
  301. }
  302. }
  303. if (HashVersion >= PGO_HASH_V2) {
  304. switch (S->getStmtClass()) {
  305. default:
  306. break;
  307. case Stmt::GotoStmtClass:
  308. return PGOHash::GotoStmt;
  309. case Stmt::IndirectGotoStmtClass:
  310. return PGOHash::IndirectGotoStmt;
  311. case Stmt::BreakStmtClass:
  312. return PGOHash::BreakStmt;
  313. case Stmt::ContinueStmtClass:
  314. return PGOHash::ContinueStmt;
  315. case Stmt::ReturnStmtClass:
  316. return PGOHash::ReturnStmt;
  317. case Stmt::CXXThrowExprClass:
  318. return PGOHash::ThrowExpr;
  319. case Stmt::UnaryOperatorClass: {
  320. const UnaryOperator *UO = cast<UnaryOperator>(S);
  321. if (UO->getOpcode() == UO_LNot)
  322. return PGOHash::UnaryOperatorLNot;
  323. break;
  324. }
  325. }
  326. }
  327. return PGOHash::None;
  328. }
  329. };
  330. /// A StmtVisitor that propagates the raw counts through the AST and
  331. /// records the count at statements where the value may change.
  332. struct ComputeRegionCounts : public ConstStmtVisitor<ComputeRegionCounts> {
  333. /// PGO state.
  334. CodeGenPGO &PGO;
  335. /// A flag that is set when the current count should be recorded on the
  336. /// next statement, such as at the exit of a loop.
  337. bool RecordNextStmtCount;
  338. /// The count at the current location in the traversal.
  339. uint64_t CurrentCount;
  340. /// The map of statements to count values.
  341. llvm::DenseMap<const Stmt *, uint64_t> &CountMap;
  342. /// BreakContinueStack - Keep counts of breaks and continues inside loops.
  343. struct BreakContinue {
  344. uint64_t BreakCount;
  345. uint64_t ContinueCount;
  346. BreakContinue() : BreakCount(0), ContinueCount(0) {}
  347. };
  348. SmallVector<BreakContinue, 8> BreakContinueStack;
  349. ComputeRegionCounts(llvm::DenseMap<const Stmt *, uint64_t> &CountMap,
  350. CodeGenPGO &PGO)
  351. : PGO(PGO), RecordNextStmtCount(false), CountMap(CountMap) {}
  352. void RecordStmtCount(const Stmt *S) {
  353. if (RecordNextStmtCount) {
  354. CountMap[S] = CurrentCount;
  355. RecordNextStmtCount = false;
  356. }
  357. }
  358. /// Set and return the current count.
  359. uint64_t setCount(uint64_t Count) {
  360. CurrentCount = Count;
  361. return Count;
  362. }
  363. void VisitStmt(const Stmt *S) {
  364. RecordStmtCount(S);
  365. for (const Stmt *Child : S->children())
  366. if (Child)
  367. this->Visit(Child);
  368. }
  369. void VisitFunctionDecl(const FunctionDecl *D) {
  370. // Counter tracks entry to the function body.
  371. uint64_t BodyCount = setCount(PGO.getRegionCount(D->getBody()));
  372. CountMap[D->getBody()] = BodyCount;
  373. Visit(D->getBody());
  374. }
  375. // Skip lambda expressions. We visit these as FunctionDecls when we're
  376. // generating them and aren't interested in the body when generating a
  377. // parent context.
  378. void VisitLambdaExpr(const LambdaExpr *LE) {}
  379. void VisitCapturedDecl(const CapturedDecl *D) {
  380. // Counter tracks entry to the capture body.
  381. uint64_t BodyCount = setCount(PGO.getRegionCount(D->getBody()));
  382. CountMap[D->getBody()] = BodyCount;
  383. Visit(D->getBody());
  384. }
  385. void VisitObjCMethodDecl(const ObjCMethodDecl *D) {
  386. // Counter tracks entry to the method body.
  387. uint64_t BodyCount = setCount(PGO.getRegionCount(D->getBody()));
  388. CountMap[D->getBody()] = BodyCount;
  389. Visit(D->getBody());
  390. }
  391. void VisitBlockDecl(const BlockDecl *D) {
  392. // Counter tracks entry to the block body.
  393. uint64_t BodyCount = setCount(PGO.getRegionCount(D->getBody()));
  394. CountMap[D->getBody()] = BodyCount;
  395. Visit(D->getBody());
  396. }
  397. void VisitReturnStmt(const ReturnStmt *S) {
  398. RecordStmtCount(S);
  399. if (S->getRetValue())
  400. Visit(S->getRetValue());
  401. CurrentCount = 0;
  402. RecordNextStmtCount = true;
  403. }
  404. void VisitCXXThrowExpr(const CXXThrowExpr *E) {
  405. RecordStmtCount(E);
  406. if (E->getSubExpr())
  407. Visit(E->getSubExpr());
  408. CurrentCount = 0;
  409. RecordNextStmtCount = true;
  410. }
  411. void VisitGotoStmt(const GotoStmt *S) {
  412. RecordStmtCount(S);
  413. CurrentCount = 0;
  414. RecordNextStmtCount = true;
  415. }
  416. void VisitLabelStmt(const LabelStmt *S) {
  417. RecordNextStmtCount = false;
  418. // Counter tracks the block following the label.
  419. uint64_t BlockCount = setCount(PGO.getRegionCount(S));
  420. CountMap[S] = BlockCount;
  421. Visit(S->getSubStmt());
  422. }
  423. void VisitBreakStmt(const BreakStmt *S) {
  424. RecordStmtCount(S);
  425. assert(!BreakContinueStack.empty() && "break not in a loop or switch!");
  426. BreakContinueStack.back().BreakCount += CurrentCount;
  427. CurrentCount = 0;
  428. RecordNextStmtCount = true;
  429. }
  430. void VisitContinueStmt(const ContinueStmt *S) {
  431. RecordStmtCount(S);
  432. assert(!BreakContinueStack.empty() && "continue stmt not in a loop!");
  433. BreakContinueStack.back().ContinueCount += CurrentCount;
  434. CurrentCount = 0;
  435. RecordNextStmtCount = true;
  436. }
  437. void VisitWhileStmt(const WhileStmt *S) {
  438. RecordStmtCount(S);
  439. uint64_t ParentCount = CurrentCount;
  440. BreakContinueStack.push_back(BreakContinue());
  441. // Visit the body region first so the break/continue adjustments can be
  442. // included when visiting the condition.
  443. uint64_t BodyCount = setCount(PGO.getRegionCount(S));
  444. CountMap[S->getBody()] = CurrentCount;
  445. Visit(S->getBody());
  446. uint64_t BackedgeCount = CurrentCount;
  447. // ...then go back and propagate counts through the condition. The count
  448. // at the start of the condition is the sum of the incoming edges,
  449. // the backedge from the end of the loop body, and the edges from
  450. // continue statements.
  451. BreakContinue BC = BreakContinueStack.pop_back_val();
  452. uint64_t CondCount =
  453. setCount(ParentCount + BackedgeCount + BC.ContinueCount);
  454. CountMap[S->getCond()] = CondCount;
  455. Visit(S->getCond());
  456. setCount(BC.BreakCount + CondCount - BodyCount);
  457. RecordNextStmtCount = true;
  458. }
  459. void VisitDoStmt(const DoStmt *S) {
  460. RecordStmtCount(S);
  461. uint64_t LoopCount = PGO.getRegionCount(S);
  462. BreakContinueStack.push_back(BreakContinue());
  463. // The count doesn't include the fallthrough from the parent scope. Add it.
  464. uint64_t BodyCount = setCount(LoopCount + CurrentCount);
  465. CountMap[S->getBody()] = BodyCount;
  466. Visit(S->getBody());
  467. uint64_t BackedgeCount = CurrentCount;
  468. BreakContinue BC = BreakContinueStack.pop_back_val();
  469. // The count at the start of the condition is equal to the count at the
  470. // end of the body, plus any continues.
  471. uint64_t CondCount = setCount(BackedgeCount + BC.ContinueCount);
  472. CountMap[S->getCond()] = CondCount;
  473. Visit(S->getCond());
  474. setCount(BC.BreakCount + CondCount - LoopCount);
  475. RecordNextStmtCount = true;
  476. }
  477. void VisitForStmt(const ForStmt *S) {
  478. RecordStmtCount(S);
  479. if (S->getInit())
  480. Visit(S->getInit());
  481. uint64_t ParentCount = CurrentCount;
  482. BreakContinueStack.push_back(BreakContinue());
  483. // Visit the body region first. (This is basically the same as a while
  484. // loop; see further comments in VisitWhileStmt.)
  485. uint64_t BodyCount = setCount(PGO.getRegionCount(S));
  486. CountMap[S->getBody()] = BodyCount;
  487. Visit(S->getBody());
  488. uint64_t BackedgeCount = CurrentCount;
  489. BreakContinue BC = BreakContinueStack.pop_back_val();
  490. // The increment is essentially part of the body but it needs to include
  491. // the count for all the continue statements.
  492. if (S->getInc()) {
  493. uint64_t IncCount = setCount(BackedgeCount + BC.ContinueCount);
  494. CountMap[S->getInc()] = IncCount;
  495. Visit(S->getInc());
  496. }
  497. // ...then go back and propagate counts through the condition.
  498. uint64_t CondCount =
  499. setCount(ParentCount + BackedgeCount + BC.ContinueCount);
  500. if (S->getCond()) {
  501. CountMap[S->getCond()] = CondCount;
  502. Visit(S->getCond());
  503. }
  504. setCount(BC.BreakCount + CondCount - BodyCount);
  505. RecordNextStmtCount = true;
  506. }
  507. void VisitCXXForRangeStmt(const CXXForRangeStmt *S) {
  508. RecordStmtCount(S);
  509. if (S->getInit())
  510. Visit(S->getInit());
  511. Visit(S->getLoopVarStmt());
  512. Visit(S->getRangeStmt());
  513. Visit(S->getBeginStmt());
  514. Visit(S->getEndStmt());
  515. uint64_t ParentCount = CurrentCount;
  516. BreakContinueStack.push_back(BreakContinue());
  517. // Visit the body region first. (This is basically the same as a while
  518. // loop; see further comments in VisitWhileStmt.)
  519. uint64_t BodyCount = setCount(PGO.getRegionCount(S));
  520. CountMap[S->getBody()] = BodyCount;
  521. Visit(S->getBody());
  522. uint64_t BackedgeCount = CurrentCount;
  523. BreakContinue BC = BreakContinueStack.pop_back_val();
  524. // The increment is essentially part of the body but it needs to include
  525. // the count for all the continue statements.
  526. uint64_t IncCount = setCount(BackedgeCount + BC.ContinueCount);
  527. CountMap[S->getInc()] = IncCount;
  528. Visit(S->getInc());
  529. // ...then go back and propagate counts through the condition.
  530. uint64_t CondCount =
  531. setCount(ParentCount + BackedgeCount + BC.ContinueCount);
  532. CountMap[S->getCond()] = CondCount;
  533. Visit(S->getCond());
  534. setCount(BC.BreakCount + CondCount - BodyCount);
  535. RecordNextStmtCount = true;
  536. }
  537. void VisitObjCForCollectionStmt(const ObjCForCollectionStmt *S) {
  538. RecordStmtCount(S);
  539. Visit(S->getElement());
  540. uint64_t ParentCount = CurrentCount;
  541. BreakContinueStack.push_back(BreakContinue());
  542. // Counter tracks the body of the loop.
  543. uint64_t BodyCount = setCount(PGO.getRegionCount(S));
  544. CountMap[S->getBody()] = BodyCount;
  545. Visit(S->getBody());
  546. uint64_t BackedgeCount = CurrentCount;
  547. BreakContinue BC = BreakContinueStack.pop_back_val();
  548. setCount(BC.BreakCount + ParentCount + BackedgeCount + BC.ContinueCount -
  549. BodyCount);
  550. RecordNextStmtCount = true;
  551. }
  552. void VisitSwitchStmt(const SwitchStmt *S) {
  553. RecordStmtCount(S);
  554. if (S->getInit())
  555. Visit(S->getInit());
  556. Visit(S->getCond());
  557. CurrentCount = 0;
  558. BreakContinueStack.push_back(BreakContinue());
  559. Visit(S->getBody());
  560. // If the switch is inside a loop, add the continue counts.
  561. BreakContinue BC = BreakContinueStack.pop_back_val();
  562. if (!BreakContinueStack.empty())
  563. BreakContinueStack.back().ContinueCount += BC.ContinueCount;
  564. // Counter tracks the exit block of the switch.
  565. setCount(PGO.getRegionCount(S));
  566. RecordNextStmtCount = true;
  567. }
  568. void VisitSwitchCase(const SwitchCase *S) {
  569. RecordNextStmtCount = false;
  570. // Counter for this particular case. This counts only jumps from the
  571. // switch header and does not include fallthrough from the case before
  572. // this one.
  573. uint64_t CaseCount = PGO.getRegionCount(S);
  574. setCount(CurrentCount + CaseCount);
  575. // We need the count without fallthrough in the mapping, so it's more useful
  576. // for branch probabilities.
  577. CountMap[S] = CaseCount;
  578. RecordNextStmtCount = true;
  579. Visit(S->getSubStmt());
  580. }
  581. void VisitIfStmt(const IfStmt *S) {
  582. RecordStmtCount(S);
  583. if (S->isConsteval()) {
  584. const Stmt *Stm = S->isNegatedConsteval() ? S->getThen() : S->getElse();
  585. if (Stm)
  586. Visit(Stm);
  587. return;
  588. }
  589. uint64_t ParentCount = CurrentCount;
  590. if (S->getInit())
  591. Visit(S->getInit());
  592. Visit(S->getCond());
  593. // Counter tracks the "then" part of an if statement. The count for
  594. // the "else" part, if it exists, will be calculated from this counter.
  595. uint64_t ThenCount = setCount(PGO.getRegionCount(S));
  596. CountMap[S->getThen()] = ThenCount;
  597. Visit(S->getThen());
  598. uint64_t OutCount = CurrentCount;
  599. uint64_t ElseCount = ParentCount - ThenCount;
  600. if (S->getElse()) {
  601. setCount(ElseCount);
  602. CountMap[S->getElse()] = ElseCount;
  603. Visit(S->getElse());
  604. OutCount += CurrentCount;
  605. } else
  606. OutCount += ElseCount;
  607. setCount(OutCount);
  608. RecordNextStmtCount = true;
  609. }
  610. void VisitCXXTryStmt(const CXXTryStmt *S) {
  611. RecordStmtCount(S);
  612. Visit(S->getTryBlock());
  613. for (unsigned I = 0, E = S->getNumHandlers(); I < E; ++I)
  614. Visit(S->getHandler(I));
  615. // Counter tracks the continuation block of the try statement.
  616. setCount(PGO.getRegionCount(S));
  617. RecordNextStmtCount = true;
  618. }
  619. void VisitCXXCatchStmt(const CXXCatchStmt *S) {
  620. RecordNextStmtCount = false;
  621. // Counter tracks the catch statement's handler block.
  622. uint64_t CatchCount = setCount(PGO.getRegionCount(S));
  623. CountMap[S] = CatchCount;
  624. Visit(S->getHandlerBlock());
  625. }
  626. void VisitAbstractConditionalOperator(const AbstractConditionalOperator *E) {
  627. RecordStmtCount(E);
  628. uint64_t ParentCount = CurrentCount;
  629. Visit(E->getCond());
  630. // Counter tracks the "true" part of a conditional operator. The
  631. // count in the "false" part will be calculated from this counter.
  632. uint64_t TrueCount = setCount(PGO.getRegionCount(E));
  633. CountMap[E->getTrueExpr()] = TrueCount;
  634. Visit(E->getTrueExpr());
  635. uint64_t OutCount = CurrentCount;
  636. uint64_t FalseCount = setCount(ParentCount - TrueCount);
  637. CountMap[E->getFalseExpr()] = FalseCount;
  638. Visit(E->getFalseExpr());
  639. OutCount += CurrentCount;
  640. setCount(OutCount);
  641. RecordNextStmtCount = true;
  642. }
  643. void VisitBinLAnd(const BinaryOperator *E) {
  644. RecordStmtCount(E);
  645. uint64_t ParentCount = CurrentCount;
  646. Visit(E->getLHS());
  647. // Counter tracks the right hand side of a logical and operator.
  648. uint64_t RHSCount = setCount(PGO.getRegionCount(E));
  649. CountMap[E->getRHS()] = RHSCount;
  650. Visit(E->getRHS());
  651. setCount(ParentCount + RHSCount - CurrentCount);
  652. RecordNextStmtCount = true;
  653. }
  654. void VisitBinLOr(const BinaryOperator *E) {
  655. RecordStmtCount(E);
  656. uint64_t ParentCount = CurrentCount;
  657. Visit(E->getLHS());
  658. // Counter tracks the right hand side of a logical or operator.
  659. uint64_t RHSCount = setCount(PGO.getRegionCount(E));
  660. CountMap[E->getRHS()] = RHSCount;
  661. Visit(E->getRHS());
  662. setCount(ParentCount + RHSCount - CurrentCount);
  663. RecordNextStmtCount = true;
  664. }
  665. };
  666. } // end anonymous namespace
  667. void PGOHash::combine(HashType Type) {
  668. // Check that we never combine 0 and only have six bits.
  669. assert(Type && "Hash is invalid: unexpected type 0");
  670. assert(unsigned(Type) < TooBig && "Hash is invalid: too many types");
  671. // Pass through MD5 if enough work has built up.
  672. if (Count && Count % NumTypesPerWord == 0) {
  673. using namespace llvm::support;
  674. uint64_t Swapped = endian::byte_swap<uint64_t, little>(Working);
  675. MD5.update(llvm::ArrayRef((uint8_t *)&Swapped, sizeof(Swapped)));
  676. Working = 0;
  677. }
  678. // Accumulate the current type.
  679. ++Count;
  680. Working = Working << NumBitsPerType | Type;
  681. }
  682. uint64_t PGOHash::finalize() {
  683. // Use Working as the hash directly if we never used MD5.
  684. if (Count <= NumTypesPerWord)
  685. // No need to byte swap here, since none of the math was endian-dependent.
  686. // This number will be byte-swapped as required on endianness transitions,
  687. // so we will see the same value on the other side.
  688. return Working;
  689. // Check for remaining work in Working.
  690. if (Working) {
  691. // Keep the buggy behavior from v1 and v2 for backward-compatibility. This
  692. // is buggy because it converts a uint64_t into an array of uint8_t.
  693. if (HashVersion < PGO_HASH_V3) {
  694. MD5.update({(uint8_t)Working});
  695. } else {
  696. using namespace llvm::support;
  697. uint64_t Swapped = endian::byte_swap<uint64_t, little>(Working);
  698. MD5.update(llvm::ArrayRef((uint8_t *)&Swapped, sizeof(Swapped)));
  699. }
  700. }
  701. // Finalize the MD5 and return the hash.
  702. llvm::MD5::MD5Result Result;
  703. MD5.final(Result);
  704. return Result.low();
  705. }
  706. void CodeGenPGO::assignRegionCounters(GlobalDecl GD, llvm::Function *Fn) {
  707. const Decl *D = GD.getDecl();
  708. if (!D->hasBody())
  709. return;
  710. // Skip CUDA/HIP kernel launch stub functions.
  711. if (CGM.getLangOpts().CUDA && !CGM.getLangOpts().CUDAIsDevice &&
  712. D->hasAttr<CUDAGlobalAttr>())
  713. return;
  714. bool InstrumentRegions = CGM.getCodeGenOpts().hasProfileClangInstr();
  715. llvm::IndexedInstrProfReader *PGOReader = CGM.getPGOReader();
  716. if (!InstrumentRegions && !PGOReader)
  717. return;
  718. if (D->isImplicit())
  719. return;
  720. // Constructors and destructors may be represented by several functions in IR.
  721. // If so, instrument only base variant, others are implemented by delegation
  722. // to the base one, it would be counted twice otherwise.
  723. if (CGM.getTarget().getCXXABI().hasConstructorVariants()) {
  724. if (const auto *CCD = dyn_cast<CXXConstructorDecl>(D))
  725. if (GD.getCtorType() != Ctor_Base &&
  726. CodeGenFunction::IsConstructorDelegationValid(CCD))
  727. return;
  728. }
  729. if (isa<CXXDestructorDecl>(D) && GD.getDtorType() != Dtor_Base)
  730. return;
  731. CGM.ClearUnusedCoverageMapping(D);
  732. if (Fn->hasFnAttribute(llvm::Attribute::NoProfile))
  733. return;
  734. if (Fn->hasFnAttribute(llvm::Attribute::SkipProfile))
  735. return;
  736. setFuncName(Fn);
  737. mapRegionCounters(D);
  738. if (CGM.getCodeGenOpts().CoverageMapping)
  739. emitCounterRegionMapping(D);
  740. if (PGOReader) {
  741. SourceManager &SM = CGM.getContext().getSourceManager();
  742. loadRegionCounts(PGOReader, SM.isInMainFile(D->getLocation()));
  743. computeRegionCounts(D);
  744. applyFunctionAttributes(PGOReader, Fn);
  745. }
  746. }
  747. void CodeGenPGO::mapRegionCounters(const Decl *D) {
  748. // Use the latest hash version when inserting instrumentation, but use the
  749. // version in the indexed profile if we're reading PGO data.
  750. PGOHashVersion HashVersion = PGO_HASH_LATEST;
  751. uint64_t ProfileVersion = llvm::IndexedInstrProf::Version;
  752. if (auto *PGOReader = CGM.getPGOReader()) {
  753. HashVersion = getPGOHashVersion(PGOReader, CGM);
  754. ProfileVersion = PGOReader->getVersion();
  755. }
  756. RegionCounterMap.reset(new llvm::DenseMap<const Stmt *, unsigned>);
  757. MapRegionCounters Walker(HashVersion, ProfileVersion, *RegionCounterMap);
  758. if (const FunctionDecl *FD = dyn_cast_or_null<FunctionDecl>(D))
  759. Walker.TraverseDecl(const_cast<FunctionDecl *>(FD));
  760. else if (const ObjCMethodDecl *MD = dyn_cast_or_null<ObjCMethodDecl>(D))
  761. Walker.TraverseDecl(const_cast<ObjCMethodDecl *>(MD));
  762. else if (const BlockDecl *BD = dyn_cast_or_null<BlockDecl>(D))
  763. Walker.TraverseDecl(const_cast<BlockDecl *>(BD));
  764. else if (const CapturedDecl *CD = dyn_cast_or_null<CapturedDecl>(D))
  765. Walker.TraverseDecl(const_cast<CapturedDecl *>(CD));
  766. assert(Walker.NextCounter > 0 && "no entry counter mapped for decl");
  767. NumRegionCounters = Walker.NextCounter;
  768. FunctionHash = Walker.Hash.finalize();
  769. }
  770. bool CodeGenPGO::skipRegionMappingForDecl(const Decl *D) {
  771. if (!D->getBody())
  772. return true;
  773. // Skip host-only functions in the CUDA device compilation and device-only
  774. // functions in the host compilation. Just roughly filter them out based on
  775. // the function attributes. If there are effectively host-only or device-only
  776. // ones, their coverage mapping may still be generated.
  777. if (CGM.getLangOpts().CUDA &&
  778. ((CGM.getLangOpts().CUDAIsDevice && !D->hasAttr<CUDADeviceAttr>() &&
  779. !D->hasAttr<CUDAGlobalAttr>()) ||
  780. (!CGM.getLangOpts().CUDAIsDevice &&
  781. (D->hasAttr<CUDAGlobalAttr>() ||
  782. (!D->hasAttr<CUDAHostAttr>() && D->hasAttr<CUDADeviceAttr>())))))
  783. return true;
  784. // Don't map the functions in system headers.
  785. const auto &SM = CGM.getContext().getSourceManager();
  786. auto Loc = D->getBody()->getBeginLoc();
  787. return SM.isInSystemHeader(Loc);
  788. }
  789. void CodeGenPGO::emitCounterRegionMapping(const Decl *D) {
  790. if (skipRegionMappingForDecl(D))
  791. return;
  792. std::string CoverageMapping;
  793. llvm::raw_string_ostream OS(CoverageMapping);
  794. CoverageMappingGen MappingGen(*CGM.getCoverageMapping(),
  795. CGM.getContext().getSourceManager(),
  796. CGM.getLangOpts(), RegionCounterMap.get());
  797. MappingGen.emitCounterMapping(D, OS);
  798. OS.flush();
  799. if (CoverageMapping.empty())
  800. return;
  801. CGM.getCoverageMapping()->addFunctionMappingRecord(
  802. FuncNameVar, FuncName, FunctionHash, CoverageMapping);
  803. }
  804. void
  805. CodeGenPGO::emitEmptyCounterMapping(const Decl *D, StringRef Name,
  806. llvm::GlobalValue::LinkageTypes Linkage) {
  807. if (skipRegionMappingForDecl(D))
  808. return;
  809. std::string CoverageMapping;
  810. llvm::raw_string_ostream OS(CoverageMapping);
  811. CoverageMappingGen MappingGen(*CGM.getCoverageMapping(),
  812. CGM.getContext().getSourceManager(),
  813. CGM.getLangOpts());
  814. MappingGen.emitEmptyMapping(D, OS);
  815. OS.flush();
  816. if (CoverageMapping.empty())
  817. return;
  818. setFuncName(Name, Linkage);
  819. CGM.getCoverageMapping()->addFunctionMappingRecord(
  820. FuncNameVar, FuncName, FunctionHash, CoverageMapping, false);
  821. }
  822. void CodeGenPGO::computeRegionCounts(const Decl *D) {
  823. StmtCountMap.reset(new llvm::DenseMap<const Stmt *, uint64_t>);
  824. ComputeRegionCounts Walker(*StmtCountMap, *this);
  825. if (const FunctionDecl *FD = dyn_cast_or_null<FunctionDecl>(D))
  826. Walker.VisitFunctionDecl(FD);
  827. else if (const ObjCMethodDecl *MD = dyn_cast_or_null<ObjCMethodDecl>(D))
  828. Walker.VisitObjCMethodDecl(MD);
  829. else if (const BlockDecl *BD = dyn_cast_or_null<BlockDecl>(D))
  830. Walker.VisitBlockDecl(BD);
  831. else if (const CapturedDecl *CD = dyn_cast_or_null<CapturedDecl>(D))
  832. Walker.VisitCapturedDecl(const_cast<CapturedDecl *>(CD));
  833. }
  834. void
  835. CodeGenPGO::applyFunctionAttributes(llvm::IndexedInstrProfReader *PGOReader,
  836. llvm::Function *Fn) {
  837. if (!haveRegionCounts())
  838. return;
  839. uint64_t FunctionCount = getRegionCount(nullptr);
  840. Fn->setEntryCount(FunctionCount);
  841. }
  842. void CodeGenPGO::emitCounterIncrement(CGBuilderTy &Builder, const Stmt *S,
  843. llvm::Value *StepV) {
  844. if (!CGM.getCodeGenOpts().hasProfileClangInstr() || !RegionCounterMap)
  845. return;
  846. if (!Builder.GetInsertBlock())
  847. return;
  848. unsigned Counter = (*RegionCounterMap)[S];
  849. auto *I8PtrTy = llvm::Type::getInt8PtrTy(CGM.getLLVMContext());
  850. llvm::Value *Args[] = {llvm::ConstantExpr::getBitCast(FuncNameVar, I8PtrTy),
  851. Builder.getInt64(FunctionHash),
  852. Builder.getInt32(NumRegionCounters),
  853. Builder.getInt32(Counter), StepV};
  854. if (!StepV)
  855. Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::instrprof_increment),
  856. ArrayRef(Args, 4));
  857. else
  858. Builder.CreateCall(
  859. CGM.getIntrinsic(llvm::Intrinsic::instrprof_increment_step),
  860. ArrayRef(Args));
  861. }
  862. void CodeGenPGO::setValueProfilingFlag(llvm::Module &M) {
  863. if (CGM.getCodeGenOpts().hasProfileClangInstr())
  864. M.addModuleFlag(llvm::Module::Warning, "EnableValueProfiling",
  865. uint32_t(EnableValueProfiling));
  866. }
  867. // This method either inserts a call to the profile run-time during
  868. // instrumentation or puts profile data into metadata for PGO use.
  869. void CodeGenPGO::valueProfile(CGBuilderTy &Builder, uint32_t ValueKind,
  870. llvm::Instruction *ValueSite, llvm::Value *ValuePtr) {
  871. if (!EnableValueProfiling)
  872. return;
  873. if (!ValuePtr || !ValueSite || !Builder.GetInsertBlock())
  874. return;
  875. if (isa<llvm::Constant>(ValuePtr))
  876. return;
  877. bool InstrumentValueSites = CGM.getCodeGenOpts().hasProfileClangInstr();
  878. if (InstrumentValueSites && RegionCounterMap) {
  879. auto BuilderInsertPoint = Builder.saveIP();
  880. Builder.SetInsertPoint(ValueSite);
  881. llvm::Value *Args[5] = {
  882. llvm::ConstantExpr::getBitCast(FuncNameVar, Builder.getInt8PtrTy()),
  883. Builder.getInt64(FunctionHash),
  884. Builder.CreatePtrToInt(ValuePtr, Builder.getInt64Ty()),
  885. Builder.getInt32(ValueKind),
  886. Builder.getInt32(NumValueSites[ValueKind]++)
  887. };
  888. Builder.CreateCall(
  889. CGM.getIntrinsic(llvm::Intrinsic::instrprof_value_profile), Args);
  890. Builder.restoreIP(BuilderInsertPoint);
  891. return;
  892. }
  893. llvm::IndexedInstrProfReader *PGOReader = CGM.getPGOReader();
  894. if (PGOReader && haveRegionCounts()) {
  895. // We record the top most called three functions at each call site.
  896. // Profile metadata contains "VP" string identifying this metadata
  897. // as value profiling data, then a uint32_t value for the value profiling
  898. // kind, a uint64_t value for the total number of times the call is
  899. // executed, followed by the function hash and execution count (uint64_t)
  900. // pairs for each function.
  901. if (NumValueSites[ValueKind] >= ProfRecord->getNumValueSites(ValueKind))
  902. return;
  903. llvm::annotateValueSite(CGM.getModule(), *ValueSite, *ProfRecord,
  904. (llvm::InstrProfValueKind)ValueKind,
  905. NumValueSites[ValueKind]);
  906. NumValueSites[ValueKind]++;
  907. }
  908. }
  909. void CodeGenPGO::loadRegionCounts(llvm::IndexedInstrProfReader *PGOReader,
  910. bool IsInMainFile) {
  911. CGM.getPGOStats().addVisited(IsInMainFile);
  912. RegionCounts.clear();
  913. llvm::Expected<llvm::InstrProfRecord> RecordExpected =
  914. PGOReader->getInstrProfRecord(FuncName, FunctionHash);
  915. if (auto E = RecordExpected.takeError()) {
  916. auto IPE = llvm::InstrProfError::take(std::move(E));
  917. if (IPE == llvm::instrprof_error::unknown_function)
  918. CGM.getPGOStats().addMissing(IsInMainFile);
  919. else if (IPE == llvm::instrprof_error::hash_mismatch)
  920. CGM.getPGOStats().addMismatched(IsInMainFile);
  921. else if (IPE == llvm::instrprof_error::malformed)
  922. // TODO: Consider a more specific warning for this case.
  923. CGM.getPGOStats().addMismatched(IsInMainFile);
  924. return;
  925. }
  926. ProfRecord =
  927. std::make_unique<llvm::InstrProfRecord>(std::move(RecordExpected.get()));
  928. RegionCounts = ProfRecord->Counts;
  929. }
  930. /// Calculate what to divide by to scale weights.
  931. ///
  932. /// Given the maximum weight, calculate a divisor that will scale all the
  933. /// weights to strictly less than UINT32_MAX.
  934. static uint64_t calculateWeightScale(uint64_t MaxWeight) {
  935. return MaxWeight < UINT32_MAX ? 1 : MaxWeight / UINT32_MAX + 1;
  936. }
  937. /// Scale an individual branch weight (and add 1).
  938. ///
  939. /// Scale a 64-bit weight down to 32-bits using \c Scale.
  940. ///
  941. /// According to Laplace's Rule of Succession, it is better to compute the
  942. /// weight based on the count plus 1, so universally add 1 to the value.
  943. ///
  944. /// \pre \c Scale was calculated by \a calculateWeightScale() with a weight no
  945. /// greater than \c Weight.
  946. static uint32_t scaleBranchWeight(uint64_t Weight, uint64_t Scale) {
  947. assert(Scale && "scale by 0?");
  948. uint64_t Scaled = Weight / Scale + 1;
  949. assert(Scaled <= UINT32_MAX && "overflow 32-bits");
  950. return Scaled;
  951. }
  952. llvm::MDNode *CodeGenFunction::createProfileWeights(uint64_t TrueCount,
  953. uint64_t FalseCount) const {
  954. // Check for empty weights.
  955. if (!TrueCount && !FalseCount)
  956. return nullptr;
  957. // Calculate how to scale down to 32-bits.
  958. uint64_t Scale = calculateWeightScale(std::max(TrueCount, FalseCount));
  959. llvm::MDBuilder MDHelper(CGM.getLLVMContext());
  960. return MDHelper.createBranchWeights(scaleBranchWeight(TrueCount, Scale),
  961. scaleBranchWeight(FalseCount, Scale));
  962. }
  963. llvm::MDNode *
  964. CodeGenFunction::createProfileWeights(ArrayRef<uint64_t> Weights) const {
  965. // We need at least two elements to create meaningful weights.
  966. if (Weights.size() < 2)
  967. return nullptr;
  968. // Check for empty weights.
  969. uint64_t MaxWeight = *std::max_element(Weights.begin(), Weights.end());
  970. if (MaxWeight == 0)
  971. return nullptr;
  972. // Calculate how to scale down to 32-bits.
  973. uint64_t Scale = calculateWeightScale(MaxWeight);
  974. SmallVector<uint32_t, 16> ScaledWeights;
  975. ScaledWeights.reserve(Weights.size());
  976. for (uint64_t W : Weights)
  977. ScaledWeights.push_back(scaleBranchWeight(W, Scale));
  978. llvm::MDBuilder MDHelper(CGM.getLLVMContext());
  979. return MDHelper.createBranchWeights(ScaledWeights);
  980. }
  981. llvm::MDNode *
  982. CodeGenFunction::createProfileWeightsForLoop(const Stmt *Cond,
  983. uint64_t LoopCount) const {
  984. if (!PGO.haveRegionCounts())
  985. return nullptr;
  986. std::optional<uint64_t> CondCount = PGO.getStmtCount(Cond);
  987. if (!CondCount || *CondCount == 0)
  988. return nullptr;
  989. return createProfileWeights(LoopCount,
  990. std::max(*CondCount, LoopCount) - LoopCount);
  991. }