HotColdSplitting.cpp 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783
  1. //===- HotColdSplitting.cpp -- Outline Cold Regions -------------*- C++ -*-===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. ///
  9. /// \file
  10. /// The goal of hot/cold splitting is to improve the memory locality of code.
  11. /// The splitting pass does this by identifying cold blocks and moving them into
  12. /// separate functions.
  13. ///
  14. /// When the splitting pass finds a cold block (referred to as "the sink"), it
  15. /// grows a maximal cold region around that block. The maximal region contains
  16. /// all blocks (post-)dominated by the sink [*]. In theory, these blocks are as
  17. /// cold as the sink. Once a region is found, it's split out of the original
  18. /// function provided it's profitable to do so.
  19. ///
  20. /// [*] In practice, there is some added complexity because some blocks are not
  21. /// safe to extract.
  22. ///
  23. /// TODO: Use the PM to get domtrees, and preserve BFI/BPI.
  24. /// TODO: Reorder outlined functions.
  25. ///
  26. //===----------------------------------------------------------------------===//
  27. #include "llvm/Transforms/IPO/HotColdSplitting.h"
  28. #include "llvm/ADT/PostOrderIterator.h"
  29. #include "llvm/ADT/SmallVector.h"
  30. #include "llvm/ADT/Statistic.h"
  31. #include "llvm/Analysis/AssumptionCache.h"
  32. #include "llvm/Analysis/BlockFrequencyInfo.h"
  33. #include "llvm/Analysis/OptimizationRemarkEmitter.h"
  34. #include "llvm/Analysis/PostDominators.h"
  35. #include "llvm/Analysis/ProfileSummaryInfo.h"
  36. #include "llvm/Analysis/TargetTransformInfo.h"
  37. #include "llvm/IR/BasicBlock.h"
  38. #include "llvm/IR/CFG.h"
  39. #include "llvm/IR/DiagnosticInfo.h"
  40. #include "llvm/IR/Dominators.h"
  41. #include "llvm/IR/Function.h"
  42. #include "llvm/IR/Instruction.h"
  43. #include "llvm/IR/Instructions.h"
  44. #include "llvm/IR/Module.h"
  45. #include "llvm/IR/PassManager.h"
  46. #include "llvm/IR/User.h"
  47. #include "llvm/IR/Value.h"
  48. #include "llvm/InitializePasses.h"
  49. #include "llvm/Pass.h"
  50. #include "llvm/Support/CommandLine.h"
  51. #include "llvm/Support/Debug.h"
  52. #include "llvm/Support/raw_ostream.h"
  53. #include "llvm/Transforms/IPO.h"
  54. #include "llvm/Transforms/Utils/CodeExtractor.h"
  55. #include <algorithm>
  56. #include <cassert>
  57. #include <limits>
  58. #include <string>
  59. #define DEBUG_TYPE "hotcoldsplit"
  60. STATISTIC(NumColdRegionsFound, "Number of cold regions found.");
  61. STATISTIC(NumColdRegionsOutlined, "Number of cold regions outlined.");
  62. using namespace llvm;
  63. static cl::opt<bool> EnableStaticAnalysis("hot-cold-static-analysis",
  64. cl::init(true), cl::Hidden);
  65. static cl::opt<int>
  66. SplittingThreshold("hotcoldsplit-threshold", cl::init(2), cl::Hidden,
  67. cl::desc("Base penalty for splitting cold code (as a "
  68. "multiple of TCC_Basic)"));
  69. static cl::opt<bool> EnableColdSection(
  70. "enable-cold-section", cl::init(false), cl::Hidden,
  71. cl::desc("Enable placement of extracted cold functions"
  72. " into a separate section after hot-cold splitting."));
  73. static cl::opt<std::string>
  74. ColdSectionName("hotcoldsplit-cold-section-name", cl::init("__llvm_cold"),
  75. cl::Hidden,
  76. cl::desc("Name for the section containing cold functions "
  77. "extracted by hot-cold splitting."));
  78. static cl::opt<int> MaxParametersForSplit(
  79. "hotcoldsplit-max-params", cl::init(4), cl::Hidden,
  80. cl::desc("Maximum number of parameters for a split function"));
  81. namespace {
  82. // Same as blockEndsInUnreachable in CodeGen/BranchFolding.cpp. Do not modify
  83. // this function unless you modify the MBB version as well.
  84. //
  85. /// A no successor, non-return block probably ends in unreachable and is cold.
  86. /// Also consider a block that ends in an indirect branch to be a return block,
  87. /// since many targets use plain indirect branches to return.
  88. bool blockEndsInUnreachable(const BasicBlock &BB) {
  89. if (!succ_empty(&BB))
  90. return false;
  91. if (BB.empty())
  92. return true;
  93. const Instruction *I = BB.getTerminator();
  94. return !(isa<ReturnInst>(I) || isa<IndirectBrInst>(I));
  95. }
  96. bool unlikelyExecuted(BasicBlock &BB) {
  97. // Exception handling blocks are unlikely executed.
  98. if (BB.isEHPad() || isa<ResumeInst>(BB.getTerminator()))
  99. return true;
  100. // The block is cold if it calls/invokes a cold function. However, do not
  101. // mark sanitizer traps as cold.
  102. for (Instruction &I : BB)
  103. if (auto *CB = dyn_cast<CallBase>(&I))
  104. if (CB->hasFnAttr(Attribute::Cold) &&
  105. !CB->getMetadata(LLVMContext::MD_nosanitize))
  106. return true;
  107. // The block is cold if it has an unreachable terminator, unless it's
  108. // preceded by a call to a (possibly warm) noreturn call (e.g. longjmp).
  109. if (blockEndsInUnreachable(BB)) {
  110. if (auto *CI =
  111. dyn_cast_or_null<CallInst>(BB.getTerminator()->getPrevNode()))
  112. if (CI->hasFnAttr(Attribute::NoReturn))
  113. return false;
  114. return true;
  115. }
  116. return false;
  117. }
  118. /// Check whether it's safe to outline \p BB.
  119. static bool mayExtractBlock(const BasicBlock &BB) {
  120. // EH pads are unsafe to outline because doing so breaks EH type tables. It
  121. // follows that invoke instructions cannot be extracted, because CodeExtractor
  122. // requires unwind destinations to be within the extraction region.
  123. //
  124. // Resumes that are not reachable from a cleanup landing pad are considered to
  125. // be unreachable. It’s not safe to split them out either.
  126. if (BB.hasAddressTaken() || BB.isEHPad())
  127. return false;
  128. auto Term = BB.getTerminator();
  129. return !isa<InvokeInst>(Term) && !isa<ResumeInst>(Term);
  130. }
  131. /// Mark \p F cold. Based on this assumption, also optimize it for minimum size.
  132. /// If \p UpdateEntryCount is true (set when this is a new split function and
  133. /// module has profile data), set entry count to 0 to ensure treated as cold.
  134. /// Return true if the function is changed.
  135. static bool markFunctionCold(Function &F, bool UpdateEntryCount = false) {
  136. assert(!F.hasOptNone() && "Can't mark this cold");
  137. bool Changed = false;
  138. if (!F.hasFnAttribute(Attribute::Cold)) {
  139. F.addFnAttr(Attribute::Cold);
  140. Changed = true;
  141. }
  142. if (!F.hasFnAttribute(Attribute::MinSize)) {
  143. F.addFnAttr(Attribute::MinSize);
  144. Changed = true;
  145. }
  146. if (UpdateEntryCount) {
  147. // Set the entry count to 0 to ensure it is placed in the unlikely text
  148. // section when function sections are enabled.
  149. F.setEntryCount(0);
  150. Changed = true;
  151. }
  152. return Changed;
  153. }
  154. class HotColdSplittingLegacyPass : public ModulePass {
  155. public:
  156. static char ID;
  157. HotColdSplittingLegacyPass() : ModulePass(ID) {
  158. initializeHotColdSplittingLegacyPassPass(*PassRegistry::getPassRegistry());
  159. }
  160. void getAnalysisUsage(AnalysisUsage &AU) const override {
  161. AU.addRequired<BlockFrequencyInfoWrapperPass>();
  162. AU.addRequired<ProfileSummaryInfoWrapperPass>();
  163. AU.addRequired<TargetTransformInfoWrapperPass>();
  164. AU.addUsedIfAvailable<AssumptionCacheTracker>();
  165. }
  166. bool runOnModule(Module &M) override;
  167. };
  168. } // end anonymous namespace
  169. /// Check whether \p F is inherently cold.
  170. bool HotColdSplitting::isFunctionCold(const Function &F) const {
  171. if (F.hasFnAttribute(Attribute::Cold))
  172. return true;
  173. if (F.getCallingConv() == CallingConv::Cold)
  174. return true;
  175. if (PSI->isFunctionEntryCold(&F))
  176. return true;
  177. return false;
  178. }
  179. // Returns false if the function should not be considered for hot-cold split
  180. // optimization.
  181. bool HotColdSplitting::shouldOutlineFrom(const Function &F) const {
  182. if (F.hasFnAttribute(Attribute::AlwaysInline))
  183. return false;
  184. if (F.hasFnAttribute(Attribute::NoInline))
  185. return false;
  186. // A function marked `noreturn` may contain unreachable terminators: these
  187. // should not be considered cold, as the function may be a trampoline.
  188. if (F.hasFnAttribute(Attribute::NoReturn))
  189. return false;
  190. if (F.hasFnAttribute(Attribute::SanitizeAddress) ||
  191. F.hasFnAttribute(Attribute::SanitizeHWAddress) ||
  192. F.hasFnAttribute(Attribute::SanitizeThread) ||
  193. F.hasFnAttribute(Attribute::SanitizeMemory))
  194. return false;
  195. return true;
  196. }
  197. /// Get the benefit score of outlining \p Region.
  198. static InstructionCost getOutliningBenefit(ArrayRef<BasicBlock *> Region,
  199. TargetTransformInfo &TTI) {
  200. // Sum up the code size costs of non-terminator instructions. Tight coupling
  201. // with \ref getOutliningPenalty is needed to model the costs of terminators.
  202. InstructionCost Benefit = 0;
  203. for (BasicBlock *BB : Region)
  204. for (Instruction &I : BB->instructionsWithoutDebug())
  205. if (&I != BB->getTerminator())
  206. Benefit +=
  207. TTI.getInstructionCost(&I, TargetTransformInfo::TCK_CodeSize);
  208. return Benefit;
  209. }
  210. /// Get the penalty score for outlining \p Region.
  211. static int getOutliningPenalty(ArrayRef<BasicBlock *> Region,
  212. unsigned NumInputs, unsigned NumOutputs) {
  213. int Penalty = SplittingThreshold;
  214. LLVM_DEBUG(dbgs() << "Applying penalty for splitting: " << Penalty << "\n");
  215. // If the splitting threshold is set at or below zero, skip the usual
  216. // profitability check.
  217. if (SplittingThreshold <= 0)
  218. return Penalty;
  219. // Find the number of distinct exit blocks for the region. Use a conservative
  220. // check to determine whether control returns from the region.
  221. bool NoBlocksReturn = true;
  222. SmallPtrSet<BasicBlock *, 2> SuccsOutsideRegion;
  223. for (BasicBlock *BB : Region) {
  224. // If a block has no successors, only assume it does not return if it's
  225. // unreachable.
  226. if (succ_empty(BB)) {
  227. NoBlocksReturn &= isa<UnreachableInst>(BB->getTerminator());
  228. continue;
  229. }
  230. for (BasicBlock *SuccBB : successors(BB)) {
  231. if (!is_contained(Region, SuccBB)) {
  232. NoBlocksReturn = false;
  233. SuccsOutsideRegion.insert(SuccBB);
  234. }
  235. }
  236. }
  237. // Count the number of phis in exit blocks with >= 2 incoming values from the
  238. // outlining region. These phis are split (\ref severSplitPHINodesOfExits),
  239. // and new outputs are created to supply the split phis. CodeExtractor can't
  240. // report these new outputs until extraction begins, but it's important to
  241. // factor the cost of the outputs into the cost calculation.
  242. unsigned NumSplitExitPhis = 0;
  243. for (BasicBlock *ExitBB : SuccsOutsideRegion) {
  244. for (PHINode &PN : ExitBB->phis()) {
  245. // Find all incoming values from the outlining region.
  246. int NumIncomingVals = 0;
  247. for (unsigned i = 0; i < PN.getNumIncomingValues(); ++i)
  248. if (llvm::is_contained(Region, PN.getIncomingBlock(i))) {
  249. ++NumIncomingVals;
  250. if (NumIncomingVals > 1) {
  251. ++NumSplitExitPhis;
  252. break;
  253. }
  254. }
  255. }
  256. }
  257. // Apply a penalty for calling the split function. Factor in the cost of
  258. // materializing all of the parameters.
  259. int NumOutputsAndSplitPhis = NumOutputs + NumSplitExitPhis;
  260. int NumParams = NumInputs + NumOutputsAndSplitPhis;
  261. if (NumParams > MaxParametersForSplit) {
  262. LLVM_DEBUG(dbgs() << NumInputs << " inputs and " << NumOutputsAndSplitPhis
  263. << " outputs exceeds parameter limit ("
  264. << MaxParametersForSplit << ")\n");
  265. return std::numeric_limits<int>::max();
  266. }
  267. const int CostForArgMaterialization = 2 * TargetTransformInfo::TCC_Basic;
  268. LLVM_DEBUG(dbgs() << "Applying penalty for: " << NumParams << " params\n");
  269. Penalty += CostForArgMaterialization * NumParams;
  270. // Apply the typical code size cost for an output alloca and its associated
  271. // reload in the caller. Also penalize the associated store in the callee.
  272. LLVM_DEBUG(dbgs() << "Applying penalty for: " << NumOutputsAndSplitPhis
  273. << " outputs/split phis\n");
  274. const int CostForRegionOutput = 3 * TargetTransformInfo::TCC_Basic;
  275. Penalty += CostForRegionOutput * NumOutputsAndSplitPhis;
  276. // Apply a `noreturn` bonus.
  277. if (NoBlocksReturn) {
  278. LLVM_DEBUG(dbgs() << "Applying bonus for: " << Region.size()
  279. << " non-returning terminators\n");
  280. Penalty -= Region.size();
  281. }
  282. // Apply a penalty for having more than one successor outside of the region.
  283. // This penalty accounts for the switch needed in the caller.
  284. if (SuccsOutsideRegion.size() > 1) {
  285. LLVM_DEBUG(dbgs() << "Applying penalty for: " << SuccsOutsideRegion.size()
  286. << " non-region successors\n");
  287. Penalty += (SuccsOutsideRegion.size() - 1) * TargetTransformInfo::TCC_Basic;
  288. }
  289. return Penalty;
  290. }
  291. Function *HotColdSplitting::extractColdRegion(
  292. const BlockSequence &Region, const CodeExtractorAnalysisCache &CEAC,
  293. DominatorTree &DT, BlockFrequencyInfo *BFI, TargetTransformInfo &TTI,
  294. OptimizationRemarkEmitter &ORE, AssumptionCache *AC, unsigned Count) {
  295. assert(!Region.empty());
  296. // TODO: Pass BFI and BPI to update profile information.
  297. CodeExtractor CE(Region, &DT, /* AggregateArgs */ false, /* BFI */ nullptr,
  298. /* BPI */ nullptr, AC, /* AllowVarArgs */ false,
  299. /* AllowAlloca */ false, /* AllocaBlock */ nullptr,
  300. /* Suffix */ "cold." + std::to_string(Count));
  301. // Perform a simple cost/benefit analysis to decide whether or not to permit
  302. // splitting.
  303. SetVector<Value *> Inputs, Outputs, Sinks;
  304. CE.findInputsOutputs(Inputs, Outputs, Sinks);
  305. InstructionCost OutliningBenefit = getOutliningBenefit(Region, TTI);
  306. int OutliningPenalty =
  307. getOutliningPenalty(Region, Inputs.size(), Outputs.size());
  308. LLVM_DEBUG(dbgs() << "Split profitability: benefit = " << OutliningBenefit
  309. << ", penalty = " << OutliningPenalty << "\n");
  310. if (!OutliningBenefit.isValid() || OutliningBenefit <= OutliningPenalty)
  311. return nullptr;
  312. Function *OrigF = Region[0]->getParent();
  313. if (Function *OutF = CE.extractCodeRegion(CEAC)) {
  314. User *U = *OutF->user_begin();
  315. CallInst *CI = cast<CallInst>(U);
  316. NumColdRegionsOutlined++;
  317. if (TTI.useColdCCForColdCall(*OutF)) {
  318. OutF->setCallingConv(CallingConv::Cold);
  319. CI->setCallingConv(CallingConv::Cold);
  320. }
  321. CI->setIsNoInline();
  322. if (EnableColdSection)
  323. OutF->setSection(ColdSectionName);
  324. else {
  325. if (OrigF->hasSection())
  326. OutF->setSection(OrigF->getSection());
  327. }
  328. markFunctionCold(*OutF, BFI != nullptr);
  329. LLVM_DEBUG(llvm::dbgs() << "Outlined Region: " << *OutF);
  330. ORE.emit([&]() {
  331. return OptimizationRemark(DEBUG_TYPE, "HotColdSplit",
  332. &*Region[0]->begin())
  333. << ore::NV("Original", OrigF) << " split cold code into "
  334. << ore::NV("Split", OutF);
  335. });
  336. return OutF;
  337. }
  338. ORE.emit([&]() {
  339. return OptimizationRemarkMissed(DEBUG_TYPE, "ExtractFailed",
  340. &*Region[0]->begin())
  341. << "Failed to extract region at block "
  342. << ore::NV("Block", Region.front());
  343. });
  344. return nullptr;
  345. }
  346. /// A pair of (basic block, score).
  347. using BlockTy = std::pair<BasicBlock *, unsigned>;
  348. namespace {
  349. /// A maximal outlining region. This contains all blocks post-dominated by a
  350. /// sink block, the sink block itself, and all blocks dominated by the sink.
  351. /// If sink-predecessors and sink-successors cannot be extracted in one region,
  352. /// the static constructor returns a list of suitable extraction regions.
  353. class OutliningRegion {
  354. /// A list of (block, score) pairs. A block's score is non-zero iff it's a
  355. /// viable sub-region entry point. Blocks with higher scores are better entry
  356. /// points (i.e. they are more distant ancestors of the sink block).
  357. SmallVector<BlockTy, 0> Blocks = {};
  358. /// The suggested entry point into the region. If the region has multiple
  359. /// entry points, all blocks within the region may not be reachable from this
  360. /// entry point.
  361. BasicBlock *SuggestedEntryPoint = nullptr;
  362. /// Whether the entire function is cold.
  363. bool EntireFunctionCold = false;
  364. /// If \p BB is a viable entry point, return \p Score. Return 0 otherwise.
  365. static unsigned getEntryPointScore(BasicBlock &BB, unsigned Score) {
  366. return mayExtractBlock(BB) ? Score : 0;
  367. }
  368. /// These scores should be lower than the score for predecessor blocks,
  369. /// because regions starting at predecessor blocks are typically larger.
  370. static constexpr unsigned ScoreForSuccBlock = 1;
  371. static constexpr unsigned ScoreForSinkBlock = 1;
  372. OutliningRegion(const OutliningRegion &) = delete;
  373. OutliningRegion &operator=(const OutliningRegion &) = delete;
  374. public:
  375. OutliningRegion() = default;
  376. OutliningRegion(OutliningRegion &&) = default;
  377. OutliningRegion &operator=(OutliningRegion &&) = default;
  378. static std::vector<OutliningRegion> create(BasicBlock &SinkBB,
  379. const DominatorTree &DT,
  380. const PostDominatorTree &PDT) {
  381. std::vector<OutliningRegion> Regions;
  382. SmallPtrSet<BasicBlock *, 4> RegionBlocks;
  383. Regions.emplace_back();
  384. OutliningRegion *ColdRegion = &Regions.back();
  385. auto addBlockToRegion = [&](BasicBlock *BB, unsigned Score) {
  386. RegionBlocks.insert(BB);
  387. ColdRegion->Blocks.emplace_back(BB, Score);
  388. };
  389. // The ancestor farthest-away from SinkBB, and also post-dominated by it.
  390. unsigned SinkScore = getEntryPointScore(SinkBB, ScoreForSinkBlock);
  391. ColdRegion->SuggestedEntryPoint = (SinkScore > 0) ? &SinkBB : nullptr;
  392. unsigned BestScore = SinkScore;
  393. // Visit SinkBB's ancestors using inverse DFS.
  394. auto PredIt = ++idf_begin(&SinkBB);
  395. auto PredEnd = idf_end(&SinkBB);
  396. while (PredIt != PredEnd) {
  397. BasicBlock &PredBB = **PredIt;
  398. bool SinkPostDom = PDT.dominates(&SinkBB, &PredBB);
  399. // If the predecessor is cold and has no predecessors, the entire
  400. // function must be cold.
  401. if (SinkPostDom && pred_empty(&PredBB)) {
  402. ColdRegion->EntireFunctionCold = true;
  403. return Regions;
  404. }
  405. // If SinkBB does not post-dominate a predecessor, do not mark the
  406. // predecessor (or any of its predecessors) cold.
  407. if (!SinkPostDom || !mayExtractBlock(PredBB)) {
  408. PredIt.skipChildren();
  409. continue;
  410. }
  411. // Keep track of the post-dominated ancestor farthest away from the sink.
  412. // The path length is always >= 2, ensuring that predecessor blocks are
  413. // considered as entry points before the sink block.
  414. unsigned PredScore = getEntryPointScore(PredBB, PredIt.getPathLength());
  415. if (PredScore > BestScore) {
  416. ColdRegion->SuggestedEntryPoint = &PredBB;
  417. BestScore = PredScore;
  418. }
  419. addBlockToRegion(&PredBB, PredScore);
  420. ++PredIt;
  421. }
  422. // If the sink can be added to the cold region, do so. It's considered as
  423. // an entry point before any sink-successor blocks.
  424. //
  425. // Otherwise, split cold sink-successor blocks using a separate region.
  426. // This satisfies the requirement that all extraction blocks other than the
  427. // first have predecessors within the extraction region.
  428. if (mayExtractBlock(SinkBB)) {
  429. addBlockToRegion(&SinkBB, SinkScore);
  430. if (pred_empty(&SinkBB)) {
  431. ColdRegion->EntireFunctionCold = true;
  432. return Regions;
  433. }
  434. } else {
  435. Regions.emplace_back();
  436. ColdRegion = &Regions.back();
  437. BestScore = 0;
  438. }
  439. // Find all successors of SinkBB dominated by SinkBB using DFS.
  440. auto SuccIt = ++df_begin(&SinkBB);
  441. auto SuccEnd = df_end(&SinkBB);
  442. while (SuccIt != SuccEnd) {
  443. BasicBlock &SuccBB = **SuccIt;
  444. bool SinkDom = DT.dominates(&SinkBB, &SuccBB);
  445. // Don't allow the backwards & forwards DFSes to mark the same block.
  446. bool DuplicateBlock = RegionBlocks.count(&SuccBB);
  447. // If SinkBB does not dominate a successor, do not mark the successor (or
  448. // any of its successors) cold.
  449. if (DuplicateBlock || !SinkDom || !mayExtractBlock(SuccBB)) {
  450. SuccIt.skipChildren();
  451. continue;
  452. }
  453. unsigned SuccScore = getEntryPointScore(SuccBB, ScoreForSuccBlock);
  454. if (SuccScore > BestScore) {
  455. ColdRegion->SuggestedEntryPoint = &SuccBB;
  456. BestScore = SuccScore;
  457. }
  458. addBlockToRegion(&SuccBB, SuccScore);
  459. ++SuccIt;
  460. }
  461. return Regions;
  462. }
  463. /// Whether this region has nothing to extract.
  464. bool empty() const { return !SuggestedEntryPoint; }
  465. /// The blocks in this region.
  466. ArrayRef<std::pair<BasicBlock *, unsigned>> blocks() const { return Blocks; }
  467. /// Whether the entire function containing this region is cold.
  468. bool isEntireFunctionCold() const { return EntireFunctionCold; }
  469. /// Remove a sub-region from this region and return it as a block sequence.
  470. BlockSequence takeSingleEntrySubRegion(DominatorTree &DT) {
  471. assert(!empty() && !isEntireFunctionCold() && "Nothing to extract");
  472. // Remove blocks dominated by the suggested entry point from this region.
  473. // During the removal, identify the next best entry point into the region.
  474. // Ensure that the first extracted block is the suggested entry point.
  475. BlockSequence SubRegion = {SuggestedEntryPoint};
  476. BasicBlock *NextEntryPoint = nullptr;
  477. unsigned NextScore = 0;
  478. auto RegionEndIt = Blocks.end();
  479. auto RegionStartIt = remove_if(Blocks, [&](const BlockTy &Block) {
  480. BasicBlock *BB = Block.first;
  481. unsigned Score = Block.second;
  482. bool InSubRegion =
  483. BB == SuggestedEntryPoint || DT.dominates(SuggestedEntryPoint, BB);
  484. if (!InSubRegion && Score > NextScore) {
  485. NextEntryPoint = BB;
  486. NextScore = Score;
  487. }
  488. if (InSubRegion && BB != SuggestedEntryPoint)
  489. SubRegion.push_back(BB);
  490. return InSubRegion;
  491. });
  492. Blocks.erase(RegionStartIt, RegionEndIt);
  493. // Update the suggested entry point.
  494. SuggestedEntryPoint = NextEntryPoint;
  495. return SubRegion;
  496. }
  497. };
  498. } // namespace
  499. bool HotColdSplitting::outlineColdRegions(Function &F, bool HasProfileSummary) {
  500. bool Changed = false;
  501. // The set of cold blocks.
  502. SmallPtrSet<BasicBlock *, 4> ColdBlocks;
  503. // The worklist of non-intersecting regions left to outline.
  504. SmallVector<OutliningRegion, 2> OutliningWorklist;
  505. // Set up an RPO traversal. Experimentally, this performs better (outlines
  506. // more) than a PO traversal, because we prevent region overlap by keeping
  507. // the first region to contain a block.
  508. ReversePostOrderTraversal<Function *> RPOT(&F);
  509. // Calculate domtrees lazily. This reduces compile-time significantly.
  510. std::unique_ptr<DominatorTree> DT;
  511. std::unique_ptr<PostDominatorTree> PDT;
  512. // Calculate BFI lazily (it's only used to query ProfileSummaryInfo). This
  513. // reduces compile-time significantly. TODO: When we *do* use BFI, we should
  514. // be able to salvage its domtrees instead of recomputing them.
  515. BlockFrequencyInfo *BFI = nullptr;
  516. if (HasProfileSummary)
  517. BFI = GetBFI(F);
  518. TargetTransformInfo &TTI = GetTTI(F);
  519. OptimizationRemarkEmitter &ORE = (*GetORE)(F);
  520. AssumptionCache *AC = LookupAC(F);
  521. // Find all cold regions.
  522. for (BasicBlock *BB : RPOT) {
  523. // This block is already part of some outlining region.
  524. if (ColdBlocks.count(BB))
  525. continue;
  526. bool Cold = (BFI && PSI->isColdBlock(BB, BFI)) ||
  527. (EnableStaticAnalysis && unlikelyExecuted(*BB));
  528. if (!Cold)
  529. continue;
  530. LLVM_DEBUG({
  531. dbgs() << "Found a cold block:\n";
  532. BB->dump();
  533. });
  534. if (!DT)
  535. DT = std::make_unique<DominatorTree>(F);
  536. if (!PDT)
  537. PDT = std::make_unique<PostDominatorTree>(F);
  538. auto Regions = OutliningRegion::create(*BB, *DT, *PDT);
  539. for (OutliningRegion &Region : Regions) {
  540. if (Region.empty())
  541. continue;
  542. if (Region.isEntireFunctionCold()) {
  543. LLVM_DEBUG(dbgs() << "Entire function is cold\n");
  544. return markFunctionCold(F);
  545. }
  546. // If this outlining region intersects with another, drop the new region.
  547. //
  548. // TODO: It's theoretically possible to outline more by only keeping the
  549. // largest region which contains a block, but the extra bookkeeping to do
  550. // this is tricky/expensive.
  551. bool RegionsOverlap = any_of(Region.blocks(), [&](const BlockTy &Block) {
  552. return !ColdBlocks.insert(Block.first).second;
  553. });
  554. if (RegionsOverlap)
  555. continue;
  556. OutliningWorklist.emplace_back(std::move(Region));
  557. ++NumColdRegionsFound;
  558. }
  559. }
  560. if (OutliningWorklist.empty())
  561. return Changed;
  562. // Outline single-entry cold regions, splitting up larger regions as needed.
  563. unsigned OutlinedFunctionID = 1;
  564. // Cache and recycle the CodeExtractor analysis to avoid O(n^2) compile-time.
  565. CodeExtractorAnalysisCache CEAC(F);
  566. do {
  567. OutliningRegion Region = OutliningWorklist.pop_back_val();
  568. assert(!Region.empty() && "Empty outlining region in worklist");
  569. do {
  570. BlockSequence SubRegion = Region.takeSingleEntrySubRegion(*DT);
  571. LLVM_DEBUG({
  572. dbgs() << "Hot/cold splitting attempting to outline these blocks:\n";
  573. for (BasicBlock *BB : SubRegion)
  574. BB->dump();
  575. });
  576. Function *Outlined = extractColdRegion(SubRegion, CEAC, *DT, BFI, TTI,
  577. ORE, AC, OutlinedFunctionID);
  578. if (Outlined) {
  579. ++OutlinedFunctionID;
  580. Changed = true;
  581. }
  582. } while (!Region.empty());
  583. } while (!OutliningWorklist.empty());
  584. return Changed;
  585. }
  586. bool HotColdSplitting::run(Module &M) {
  587. bool Changed = false;
  588. bool HasProfileSummary = (M.getProfileSummary(/* IsCS */ false) != nullptr);
  589. for (Function &F : M) {
  590. // Do not touch declarations.
  591. if (F.isDeclaration())
  592. continue;
  593. // Do not modify `optnone` functions.
  594. if (F.hasOptNone())
  595. continue;
  596. // Detect inherently cold functions and mark them as such.
  597. if (isFunctionCold(F)) {
  598. Changed |= markFunctionCold(F);
  599. continue;
  600. }
  601. if (!shouldOutlineFrom(F)) {
  602. LLVM_DEBUG(llvm::dbgs() << "Skipping " << F.getName() << "\n");
  603. continue;
  604. }
  605. LLVM_DEBUG(llvm::dbgs() << "Outlining in " << F.getName() << "\n");
  606. Changed |= outlineColdRegions(F, HasProfileSummary);
  607. }
  608. return Changed;
  609. }
  610. bool HotColdSplittingLegacyPass::runOnModule(Module &M) {
  611. if (skipModule(M))
  612. return false;
  613. ProfileSummaryInfo *PSI =
  614. &getAnalysis<ProfileSummaryInfoWrapperPass>().getPSI();
  615. auto GTTI = [this](Function &F) -> TargetTransformInfo & {
  616. return this->getAnalysis<TargetTransformInfoWrapperPass>().getTTI(F);
  617. };
  618. auto GBFI = [this](Function &F) {
  619. return &this->getAnalysis<BlockFrequencyInfoWrapperPass>(F).getBFI();
  620. };
  621. std::unique_ptr<OptimizationRemarkEmitter> ORE;
  622. std::function<OptimizationRemarkEmitter &(Function &)> GetORE =
  623. [&ORE](Function &F) -> OptimizationRemarkEmitter & {
  624. ORE.reset(new OptimizationRemarkEmitter(&F));
  625. return *ORE;
  626. };
  627. auto LookupAC = [this](Function &F) -> AssumptionCache * {
  628. if (auto *ACT = getAnalysisIfAvailable<AssumptionCacheTracker>())
  629. return ACT->lookupAssumptionCache(F);
  630. return nullptr;
  631. };
  632. return HotColdSplitting(PSI, GBFI, GTTI, &GetORE, LookupAC).run(M);
  633. }
  634. PreservedAnalyses
  635. HotColdSplittingPass::run(Module &M, ModuleAnalysisManager &AM) {
  636. auto &FAM = AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
  637. auto LookupAC = [&FAM](Function &F) -> AssumptionCache * {
  638. return FAM.getCachedResult<AssumptionAnalysis>(F);
  639. };
  640. auto GBFI = [&FAM](Function &F) {
  641. return &FAM.getResult<BlockFrequencyAnalysis>(F);
  642. };
  643. std::function<TargetTransformInfo &(Function &)> GTTI =
  644. [&FAM](Function &F) -> TargetTransformInfo & {
  645. return FAM.getResult<TargetIRAnalysis>(F);
  646. };
  647. std::unique_ptr<OptimizationRemarkEmitter> ORE;
  648. std::function<OptimizationRemarkEmitter &(Function &)> GetORE =
  649. [&ORE](Function &F) -> OptimizationRemarkEmitter & {
  650. ORE.reset(new OptimizationRemarkEmitter(&F));
  651. return *ORE;
  652. };
  653. ProfileSummaryInfo *PSI = &AM.getResult<ProfileSummaryAnalysis>(M);
  654. if (HotColdSplitting(PSI, GBFI, GTTI, &GetORE, LookupAC).run(M))
  655. return PreservedAnalyses::none();
  656. return PreservedAnalyses::all();
  657. }
  658. char HotColdSplittingLegacyPass::ID = 0;
  659. INITIALIZE_PASS_BEGIN(HotColdSplittingLegacyPass, "hotcoldsplit",
  660. "Hot Cold Splitting", false, false)
  661. INITIALIZE_PASS_DEPENDENCY(ProfileSummaryInfoWrapperPass)
  662. INITIALIZE_PASS_DEPENDENCY(BlockFrequencyInfoWrapperPass)
  663. INITIALIZE_PASS_END(HotColdSplittingLegacyPass, "hotcoldsplit",
  664. "Hot Cold Splitting", false, false)
  665. ModulePass *llvm::createHotColdSplittingPass() {
  666. return new HotColdSplittingLegacyPass();
  667. }