ScopDetection.cpp 70 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044
  1. //===- ScopDetection.cpp - Detect Scops -----------------------------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // Detect the maximal Scops of a function.
  10. //
  11. // A static control part (Scop) is a subgraph of the control flow graph (CFG)
  12. // that only has statically known control flow and can therefore be described
  13. // within the polyhedral model.
  14. //
  15. // Every Scop fulfills these restrictions:
  16. //
  17. // * It is a single entry single exit region
  18. //
  19. // * Only affine linear bounds in the loops
  20. //
  21. // Every natural loop in a Scop must have a number of loop iterations that can
  22. // be described as an affine linear function in surrounding loop iterators or
  23. // parameters. (A parameter is a scalar that does not change its value during
  24. // execution of the Scop).
  25. //
  26. // * Only comparisons of affine linear expressions in conditions
  27. //
  28. // * All loops and conditions perfectly nested
  29. //
  30. // The control flow needs to be structured such that it could be written using
  31. // just 'for' and 'if' statements, without the need for any 'goto', 'break' or
  32. // 'continue'.
  33. //
  34. // * Side effect free functions call
  35. //
  36. // Function calls and intrinsics that do not have side effects (readnone)
  37. // or memory intrinsics (memset, memcpy, memmove) are allowed.
  38. //
  39. // The Scop detection finds the largest Scops by checking if the largest
  40. // region is a Scop. If this is not the case, its canonical subregions are
  41. // checked until a region is a Scop. It is now tried to extend this Scop by
  42. // creating a larger non canonical region.
  43. //
  44. //===----------------------------------------------------------------------===//
  45. #include "polly/ScopDetection.h"
  46. #include "polly/LinkAllPasses.h"
  47. #include "polly/Options.h"
  48. #include "polly/ScopDetectionDiagnostic.h"
  49. #include "polly/Support/SCEVValidator.h"
  50. #include "polly/Support/ScopHelper.h"
  51. #include "polly/Support/ScopLocation.h"
  52. #include "llvm/ADT/SmallPtrSet.h"
  53. #include "llvm/ADT/Statistic.h"
  54. #include "llvm/Analysis/AliasAnalysis.h"
  55. #include "llvm/Analysis/Delinearization.h"
  56. #include "llvm/Analysis/Loads.h"
  57. #include "llvm/Analysis/LoopInfo.h"
  58. #include "llvm/Analysis/OptimizationRemarkEmitter.h"
  59. #include "llvm/Analysis/RegionInfo.h"
  60. #include "llvm/Analysis/ScalarEvolution.h"
  61. #include "llvm/Analysis/ScalarEvolutionExpressions.h"
  62. #include "llvm/IR/BasicBlock.h"
  63. #include "llvm/IR/DebugLoc.h"
  64. #include "llvm/IR/DerivedTypes.h"
  65. #include "llvm/IR/DiagnosticInfo.h"
  66. #include "llvm/IR/DiagnosticPrinter.h"
  67. #include "llvm/IR/Dominators.h"
  68. #include "llvm/IR/Function.h"
  69. #include "llvm/IR/InstrTypes.h"
  70. #include "llvm/IR/Instruction.h"
  71. #include "llvm/IR/Instructions.h"
  72. #include "llvm/IR/IntrinsicInst.h"
  73. #include "llvm/IR/Metadata.h"
  74. #include "llvm/IR/Module.h"
  75. #include "llvm/IR/PassManager.h"
  76. #include "llvm/IR/Value.h"
  77. #include "llvm/InitializePasses.h"
  78. #include "llvm/Pass.h"
  79. #include "llvm/Support/Debug.h"
  80. #include "llvm/Support/Regex.h"
  81. #include "llvm/Support/raw_ostream.h"
  82. #include <algorithm>
  83. #include <cassert>
  84. #include <memory>
  85. #include <stack>
  86. #include <string>
  87. #include <utility>
  88. #include <vector>
  89. using namespace llvm;
  90. using namespace polly;
  91. #define DEBUG_TYPE "polly-detect"
  92. // This option is set to a very high value, as analyzing such loops increases
  93. // compile time on several cases. For experiments that enable this option,
  94. // a value of around 40 has been working to avoid run-time regressions with
  95. // Polly while still exposing interesting optimization opportunities.
  96. static cl::opt<int> ProfitabilityMinPerLoopInstructions(
  97. "polly-detect-profitability-min-per-loop-insts",
  98. cl::desc("The minimal number of per-loop instructions before a single loop "
  99. "region is considered profitable"),
  100. cl::Hidden, cl::ValueRequired, cl::init(100000000), cl::cat(PollyCategory));
  101. bool polly::PollyProcessUnprofitable;
  102. static cl::opt<bool, true> XPollyProcessUnprofitable(
  103. "polly-process-unprofitable",
  104. cl::desc(
  105. "Process scops that are unlikely to benefit from Polly optimizations."),
  106. cl::location(PollyProcessUnprofitable), cl::init(false), cl::ZeroOrMore,
  107. cl::cat(PollyCategory));
  108. static cl::list<std::string> OnlyFunctions(
  109. "polly-only-func",
  110. cl::desc("Only run on functions that match a regex. "
  111. "Multiple regexes can be comma separated. "
  112. "Scop detection will run on all functions that match "
  113. "ANY of the regexes provided."),
  114. cl::ZeroOrMore, cl::CommaSeparated, cl::cat(PollyCategory));
  115. static cl::list<std::string> IgnoredFunctions(
  116. "polly-ignore-func",
  117. cl::desc("Ignore functions that match a regex. "
  118. "Multiple regexes can be comma separated. "
  119. "Scop detection will ignore all functions that match "
  120. "ANY of the regexes provided."),
  121. cl::ZeroOrMore, cl::CommaSeparated, cl::cat(PollyCategory));
  122. bool polly::PollyAllowFullFunction;
  123. static cl::opt<bool, true>
  124. XAllowFullFunction("polly-detect-full-functions",
  125. cl::desc("Allow the detection of full functions"),
  126. cl::location(polly::PollyAllowFullFunction),
  127. cl::init(false), cl::cat(PollyCategory));
  128. static cl::opt<std::string> OnlyRegion(
  129. "polly-only-region",
  130. cl::desc("Only run on certain regions (The provided identifier must "
  131. "appear in the name of the region's entry block"),
  132. cl::value_desc("identifier"), cl::ValueRequired, cl::init(""),
  133. cl::cat(PollyCategory));
  134. static cl::opt<bool>
  135. IgnoreAliasing("polly-ignore-aliasing",
  136. cl::desc("Ignore possible aliasing of the array bases"),
  137. cl::Hidden, cl::init(false), cl::ZeroOrMore,
  138. cl::cat(PollyCategory));
  139. bool polly::PollyAllowUnsignedOperations;
  140. static cl::opt<bool, true> XPollyAllowUnsignedOperations(
  141. "polly-allow-unsigned-operations",
  142. cl::desc("Allow unsigned operations such as comparisons or zero-extends."),
  143. cl::location(PollyAllowUnsignedOperations), cl::Hidden, cl::ZeroOrMore,
  144. cl::init(true), cl::cat(PollyCategory));
  145. bool polly::PollyUseRuntimeAliasChecks;
  146. static cl::opt<bool, true> XPollyUseRuntimeAliasChecks(
  147. "polly-use-runtime-alias-checks",
  148. cl::desc("Use runtime alias checks to resolve possible aliasing."),
  149. cl::location(PollyUseRuntimeAliasChecks), cl::Hidden, cl::ZeroOrMore,
  150. cl::init(true), cl::cat(PollyCategory));
  151. static cl::opt<bool>
  152. ReportLevel("polly-report",
  153. cl::desc("Print information about the activities of Polly"),
  154. cl::init(false), cl::ZeroOrMore, cl::cat(PollyCategory));
  155. static cl::opt<bool> AllowDifferentTypes(
  156. "polly-allow-differing-element-types",
  157. cl::desc("Allow different element types for array accesses"), cl::Hidden,
  158. cl::init(true), cl::ZeroOrMore, cl::cat(PollyCategory));
  159. static cl::opt<bool>
  160. AllowNonAffine("polly-allow-nonaffine",
  161. cl::desc("Allow non affine access functions in arrays"),
  162. cl::Hidden, cl::init(false), cl::ZeroOrMore,
  163. cl::cat(PollyCategory));
  164. static cl::opt<bool>
  165. AllowModrefCall("polly-allow-modref-calls",
  166. cl::desc("Allow functions with known modref behavior"),
  167. cl::Hidden, cl::init(false), cl::ZeroOrMore,
  168. cl::cat(PollyCategory));
  169. static cl::opt<bool> AllowNonAffineSubRegions(
  170. "polly-allow-nonaffine-branches",
  171. cl::desc("Allow non affine conditions for branches"), cl::Hidden,
  172. cl::init(true), cl::ZeroOrMore, cl::cat(PollyCategory));
  173. static cl::opt<bool>
  174. AllowNonAffineSubLoops("polly-allow-nonaffine-loops",
  175. cl::desc("Allow non affine conditions for loops"),
  176. cl::Hidden, cl::init(false), cl::ZeroOrMore,
  177. cl::cat(PollyCategory));
  178. static cl::opt<bool, true>
  179. TrackFailures("polly-detect-track-failures",
  180. cl::desc("Track failure strings in detecting scop regions"),
  181. cl::location(PollyTrackFailures), cl::Hidden, cl::ZeroOrMore,
  182. cl::init(true), cl::cat(PollyCategory));
  183. static cl::opt<bool> KeepGoing("polly-detect-keep-going",
  184. cl::desc("Do not fail on the first error."),
  185. cl::Hidden, cl::ZeroOrMore, cl::init(false),
  186. cl::cat(PollyCategory));
  187. static cl::opt<bool, true>
  188. PollyDelinearizeX("polly-delinearize",
  189. cl::desc("Delinearize array access functions"),
  190. cl::location(PollyDelinearize), cl::Hidden,
  191. cl::ZeroOrMore, cl::init(true), cl::cat(PollyCategory));
  192. static cl::opt<bool>
  193. VerifyScops("polly-detect-verify",
  194. cl::desc("Verify the detected SCoPs after each transformation"),
  195. cl::Hidden, cl::init(false), cl::ZeroOrMore,
  196. cl::cat(PollyCategory));
  197. bool polly::PollyInvariantLoadHoisting;
  198. static cl::opt<bool, true> XPollyInvariantLoadHoisting(
  199. "polly-invariant-load-hoisting", cl::desc("Hoist invariant loads."),
  200. cl::location(PollyInvariantLoadHoisting), cl::Hidden, cl::ZeroOrMore,
  201. cl::init(false), cl::cat(PollyCategory));
  202. static cl::opt<bool> PollyAllowErrorBlocks(
  203. "polly-allow-error-blocks",
  204. cl::desc("Allow to speculate on the execution of 'error blocks'."),
  205. cl::Hidden, cl::init(true), cl::ZeroOrMore, cl::cat(PollyCategory));
  206. /// The minimal trip count under which loops are considered unprofitable.
  207. static const unsigned MIN_LOOP_TRIP_COUNT = 8;
  208. bool polly::PollyTrackFailures = false;
  209. bool polly::PollyDelinearize = false;
  210. StringRef polly::PollySkipFnAttr = "polly.skip.fn";
  211. //===----------------------------------------------------------------------===//
  212. // Statistics.
  213. STATISTIC(NumScopRegions, "Number of scops");
  214. STATISTIC(NumLoopsInScop, "Number of loops in scops");
  215. STATISTIC(NumScopsDepthZero, "Number of scops with maximal loop depth 0");
  216. STATISTIC(NumScopsDepthOne, "Number of scops with maximal loop depth 1");
  217. STATISTIC(NumScopsDepthTwo, "Number of scops with maximal loop depth 2");
  218. STATISTIC(NumScopsDepthThree, "Number of scops with maximal loop depth 3");
  219. STATISTIC(NumScopsDepthFour, "Number of scops with maximal loop depth 4");
  220. STATISTIC(NumScopsDepthFive, "Number of scops with maximal loop depth 5");
  221. STATISTIC(NumScopsDepthLarger,
  222. "Number of scops with maximal loop depth 6 and larger");
  223. STATISTIC(NumProfScopRegions, "Number of scops (profitable scops only)");
  224. STATISTIC(NumLoopsInProfScop,
  225. "Number of loops in scops (profitable scops only)");
  226. STATISTIC(NumLoopsOverall, "Number of total loops");
  227. STATISTIC(NumProfScopsDepthZero,
  228. "Number of scops with maximal loop depth 0 (profitable scops only)");
  229. STATISTIC(NumProfScopsDepthOne,
  230. "Number of scops with maximal loop depth 1 (profitable scops only)");
  231. STATISTIC(NumProfScopsDepthTwo,
  232. "Number of scops with maximal loop depth 2 (profitable scops only)");
  233. STATISTIC(NumProfScopsDepthThree,
  234. "Number of scops with maximal loop depth 3 (profitable scops only)");
  235. STATISTIC(NumProfScopsDepthFour,
  236. "Number of scops with maximal loop depth 4 (profitable scops only)");
  237. STATISTIC(NumProfScopsDepthFive,
  238. "Number of scops with maximal loop depth 5 (profitable scops only)");
  239. STATISTIC(NumProfScopsDepthLarger,
  240. "Number of scops with maximal loop depth 6 and larger "
  241. "(profitable scops only)");
  242. STATISTIC(MaxNumLoopsInScop, "Maximal number of loops in scops");
  243. STATISTIC(MaxNumLoopsInProfScop,
  244. "Maximal number of loops in scops (profitable scops only)");
  245. static void updateLoopCountStatistic(ScopDetection::LoopStats Stats,
  246. bool OnlyProfitable);
  247. namespace {
  248. class DiagnosticScopFound : public DiagnosticInfo {
  249. private:
  250. static int PluginDiagnosticKind;
  251. Function &F;
  252. std::string FileName;
  253. unsigned EntryLine, ExitLine;
  254. public:
  255. DiagnosticScopFound(Function &F, std::string FileName, unsigned EntryLine,
  256. unsigned ExitLine)
  257. : DiagnosticInfo(PluginDiagnosticKind, DS_Note), F(F), FileName(FileName),
  258. EntryLine(EntryLine), ExitLine(ExitLine) {}
  259. void print(DiagnosticPrinter &DP) const override;
  260. static bool classof(const DiagnosticInfo *DI) {
  261. return DI->getKind() == PluginDiagnosticKind;
  262. }
  263. };
  264. } // namespace
  265. int DiagnosticScopFound::PluginDiagnosticKind =
  266. getNextAvailablePluginDiagnosticKind();
  267. void DiagnosticScopFound::print(DiagnosticPrinter &DP) const {
  268. DP << "Polly detected an optimizable loop region (scop) in function '" << F
  269. << "'\n";
  270. if (FileName.empty()) {
  271. DP << "Scop location is unknown. Compile with debug info "
  272. "(-g) to get more precise information. ";
  273. return;
  274. }
  275. DP << FileName << ":" << EntryLine << ": Start of scop\n";
  276. DP << FileName << ":" << ExitLine << ": End of scop";
  277. }
  278. /// Check if a string matches any regex in a list of regexes.
  279. /// @param Str the input string to match against.
  280. /// @param RegexList a list of strings that are regular expressions.
  281. static bool doesStringMatchAnyRegex(StringRef Str,
  282. const cl::list<std::string> &RegexList) {
  283. for (auto RegexStr : RegexList) {
  284. Regex R(RegexStr);
  285. std::string Err;
  286. if (!R.isValid(Err))
  287. report_fatal_error(Twine("invalid regex given as input to polly: ") + Err,
  288. true);
  289. if (R.match(Str))
  290. return true;
  291. }
  292. return false;
  293. }
  294. //===----------------------------------------------------------------------===//
  295. // ScopDetection.
  296. ScopDetection::ScopDetection(const DominatorTree &DT, ScalarEvolution &SE,
  297. LoopInfo &LI, RegionInfo &RI, AliasAnalysis &AA,
  298. OptimizationRemarkEmitter &ORE)
  299. : DT(DT), SE(SE), LI(LI), RI(RI), AA(AA), ORE(ORE) {}
  300. void ScopDetection::detect(Function &F) {
  301. assert(ValidRegions.empty() && "Detection must run only once");
  302. if (!PollyProcessUnprofitable && LI.empty())
  303. return;
  304. Region *TopRegion = RI.getTopLevelRegion();
  305. if (!OnlyFunctions.empty() &&
  306. !doesStringMatchAnyRegex(F.getName(), OnlyFunctions))
  307. return;
  308. if (doesStringMatchAnyRegex(F.getName(), IgnoredFunctions))
  309. return;
  310. if (!isValidFunction(F))
  311. return;
  312. findScops(*TopRegion);
  313. NumScopRegions += ValidRegions.size();
  314. // Prune non-profitable regions.
  315. for (auto &DIt : DetectionContextMap) {
  316. DetectionContext &DC = *DIt.getSecond().get();
  317. if (DC.Log.hasErrors())
  318. continue;
  319. if (!ValidRegions.count(&DC.CurRegion))
  320. continue;
  321. LoopStats Stats = countBeneficialLoops(&DC.CurRegion, SE, LI, 0);
  322. updateLoopCountStatistic(Stats, false /* OnlyProfitable */);
  323. if (isProfitableRegion(DC)) {
  324. updateLoopCountStatistic(Stats, true /* OnlyProfitable */);
  325. continue;
  326. }
  327. ValidRegions.remove(&DC.CurRegion);
  328. }
  329. NumProfScopRegions += ValidRegions.size();
  330. NumLoopsOverall += countBeneficialLoops(TopRegion, SE, LI, 0).NumLoops;
  331. // Only makes sense when we tracked errors.
  332. if (PollyTrackFailures)
  333. emitMissedRemarks(F);
  334. if (ReportLevel)
  335. printLocations(F);
  336. assert(ValidRegions.size() <= DetectionContextMap.size() &&
  337. "Cached more results than valid regions");
  338. }
  339. template <class RR, typename... Args>
  340. inline bool ScopDetection::invalid(DetectionContext &Context, bool Assert,
  341. Args &&...Arguments) const {
  342. if (!Context.Verifying) {
  343. RejectLog &Log = Context.Log;
  344. std::shared_ptr<RR> RejectReason = std::make_shared<RR>(Arguments...);
  345. if (PollyTrackFailures)
  346. Log.report(RejectReason);
  347. LLVM_DEBUG(dbgs() << RejectReason->getMessage());
  348. LLVM_DEBUG(dbgs() << "\n");
  349. } else {
  350. assert(!Assert && "Verification of detected scop failed");
  351. }
  352. return false;
  353. }
  354. bool ScopDetection::isMaxRegionInScop(const Region &R, bool Verify) {
  355. if (!ValidRegions.count(&R))
  356. return false;
  357. if (Verify) {
  358. BBPair P = getBBPairForRegion(&R);
  359. std::unique_ptr<DetectionContext> &Entry = DetectionContextMap[P];
  360. // Free previous DetectionContext for the region and create and verify a new
  361. // one. Be sure that the DetectionContext is not still used by a ScopInfop.
  362. // Due to changes but CodeGeneration of another Scop, the Region object and
  363. // the BBPair might not match anymore.
  364. Entry = std::make_unique<DetectionContext>(const_cast<Region &>(R), AA,
  365. /*Verifying=*/false);
  366. return isValidRegion(*Entry.get());
  367. }
  368. return true;
  369. }
  370. std::string ScopDetection::regionIsInvalidBecause(const Region *R) const {
  371. // Get the first error we found. Even in keep-going mode, this is the first
  372. // reason that caused the candidate to be rejected.
  373. auto *Log = lookupRejectionLog(R);
  374. // This can happen when we marked a region invalid, but didn't track
  375. // an error for it.
  376. if (!Log || !Log->hasErrors())
  377. return "";
  378. RejectReasonPtr RR = *Log->begin();
  379. return RR->getMessage();
  380. }
  381. bool ScopDetection::addOverApproximatedRegion(Region *AR,
  382. DetectionContext &Context) const {
  383. // If we already know about Ar we can exit.
  384. if (!Context.NonAffineSubRegionSet.insert(AR))
  385. return true;
  386. // All loops in the region have to be overapproximated too if there
  387. // are accesses that depend on the iteration count.
  388. for (BasicBlock *BB : AR->blocks()) {
  389. Loop *L = LI.getLoopFor(BB);
  390. if (AR->contains(L))
  391. Context.BoxedLoopsSet.insert(L);
  392. }
  393. return (AllowNonAffineSubLoops || Context.BoxedLoopsSet.empty());
  394. }
  395. bool ScopDetection::onlyValidRequiredInvariantLoads(
  396. InvariantLoadsSetTy &RequiredILS, DetectionContext &Context) const {
  397. Region &CurRegion = Context.CurRegion;
  398. const DataLayout &DL = CurRegion.getEntry()->getModule()->getDataLayout();
  399. if (!PollyInvariantLoadHoisting && !RequiredILS.empty())
  400. return false;
  401. for (LoadInst *Load : RequiredILS) {
  402. // If we already know a load has been accepted as required invariant, we
  403. // already run the validation below once and consequently don't need to
  404. // run it again. Hence, we return early. For certain test cases (e.g.,
  405. // COSMO this avoids us spending 50% of scop-detection time in this
  406. // very function (and its children).
  407. if (Context.RequiredILS.count(Load))
  408. continue;
  409. if (!isHoistableLoad(Load, CurRegion, LI, SE, DT, Context.RequiredILS))
  410. return false;
  411. for (auto NonAffineRegion : Context.NonAffineSubRegionSet) {
  412. if (isSafeToLoadUnconditionally(Load->getPointerOperand(),
  413. Load->getType(), Load->getAlign(), DL))
  414. continue;
  415. if (NonAffineRegion->contains(Load) &&
  416. Load->getParent() != NonAffineRegion->getEntry())
  417. return false;
  418. }
  419. }
  420. Context.RequiredILS.insert(RequiredILS.begin(), RequiredILS.end());
  421. return true;
  422. }
  423. bool ScopDetection::involvesMultiplePtrs(const SCEV *S0, const SCEV *S1,
  424. Loop *Scope) const {
  425. SetVector<Value *> Values;
  426. findValues(S0, SE, Values);
  427. if (S1)
  428. findValues(S1, SE, Values);
  429. SmallPtrSet<Value *, 8> PtrVals;
  430. for (auto *V : Values) {
  431. if (auto *P2I = dyn_cast<PtrToIntInst>(V))
  432. V = P2I->getOperand(0);
  433. if (!V->getType()->isPointerTy())
  434. continue;
  435. auto *PtrSCEV = SE.getSCEVAtScope(V, Scope);
  436. if (isa<SCEVConstant>(PtrSCEV))
  437. continue;
  438. auto *BasePtr = dyn_cast<SCEVUnknown>(SE.getPointerBase(PtrSCEV));
  439. if (!BasePtr)
  440. return true;
  441. auto *BasePtrVal = BasePtr->getValue();
  442. if (PtrVals.insert(BasePtrVal).second) {
  443. for (auto *PtrVal : PtrVals)
  444. if (PtrVal != BasePtrVal && !AA.isNoAlias(PtrVal, BasePtrVal))
  445. return true;
  446. }
  447. }
  448. return false;
  449. }
  450. bool ScopDetection::isAffine(const SCEV *S, Loop *Scope,
  451. DetectionContext &Context) const {
  452. InvariantLoadsSetTy AccessILS;
  453. if (!isAffineExpr(&Context.CurRegion, Scope, S, SE, &AccessILS))
  454. return false;
  455. if (!onlyValidRequiredInvariantLoads(AccessILS, Context))
  456. return false;
  457. return true;
  458. }
  459. bool ScopDetection::isValidSwitch(BasicBlock &BB, SwitchInst *SI,
  460. Value *Condition, bool IsLoopBranch,
  461. DetectionContext &Context) const {
  462. Loop *L = LI.getLoopFor(&BB);
  463. const SCEV *ConditionSCEV = SE.getSCEVAtScope(Condition, L);
  464. if (IsLoopBranch && L->isLoopLatch(&BB))
  465. return false;
  466. // Check for invalid usage of different pointers in one expression.
  467. if (involvesMultiplePtrs(ConditionSCEV, nullptr, L))
  468. return false;
  469. if (isAffine(ConditionSCEV, L, Context))
  470. return true;
  471. if (AllowNonAffineSubRegions &&
  472. addOverApproximatedRegion(RI.getRegionFor(&BB), Context))
  473. return true;
  474. return invalid<ReportNonAffBranch>(Context, /*Assert=*/true, &BB,
  475. ConditionSCEV, ConditionSCEV, SI);
  476. }
  477. bool ScopDetection::isValidBranch(BasicBlock &BB, BranchInst *BI,
  478. Value *Condition, bool IsLoopBranch,
  479. DetectionContext &Context) {
  480. // Constant integer conditions are always affine.
  481. if (isa<ConstantInt>(Condition))
  482. return true;
  483. if (BinaryOperator *BinOp = dyn_cast<BinaryOperator>(Condition)) {
  484. auto Opcode = BinOp->getOpcode();
  485. if (Opcode == Instruction::And || Opcode == Instruction::Or) {
  486. Value *Op0 = BinOp->getOperand(0);
  487. Value *Op1 = BinOp->getOperand(1);
  488. return isValidBranch(BB, BI, Op0, IsLoopBranch, Context) &&
  489. isValidBranch(BB, BI, Op1, IsLoopBranch, Context);
  490. }
  491. }
  492. if (auto PHI = dyn_cast<PHINode>(Condition)) {
  493. auto *Unique = dyn_cast_or_null<ConstantInt>(
  494. getUniqueNonErrorValue(PHI, &Context.CurRegion, this));
  495. if (Unique && (Unique->isZero() || Unique->isOne()))
  496. return true;
  497. }
  498. if (auto Load = dyn_cast<LoadInst>(Condition))
  499. if (!IsLoopBranch && Context.CurRegion.contains(Load)) {
  500. Context.RequiredILS.insert(Load);
  501. return true;
  502. }
  503. // Non constant conditions of branches need to be ICmpInst.
  504. if (!isa<ICmpInst>(Condition)) {
  505. if (!IsLoopBranch && AllowNonAffineSubRegions &&
  506. addOverApproximatedRegion(RI.getRegionFor(&BB), Context))
  507. return true;
  508. return invalid<ReportInvalidCond>(Context, /*Assert=*/true, BI, &BB);
  509. }
  510. ICmpInst *ICmp = cast<ICmpInst>(Condition);
  511. // Are both operands of the ICmp affine?
  512. if (isa<UndefValue>(ICmp->getOperand(0)) ||
  513. isa<UndefValue>(ICmp->getOperand(1)))
  514. return invalid<ReportUndefOperand>(Context, /*Assert=*/true, &BB, ICmp);
  515. Loop *L = LI.getLoopFor(&BB);
  516. const SCEV *LHS = SE.getSCEVAtScope(ICmp->getOperand(0), L);
  517. const SCEV *RHS = SE.getSCEVAtScope(ICmp->getOperand(1), L);
  518. LHS = tryForwardThroughPHI(LHS, Context.CurRegion, SE, this);
  519. RHS = tryForwardThroughPHI(RHS, Context.CurRegion, SE, this);
  520. // If unsigned operations are not allowed try to approximate the region.
  521. if (ICmp->isUnsigned() && !PollyAllowUnsignedOperations)
  522. return !IsLoopBranch && AllowNonAffineSubRegions &&
  523. addOverApproximatedRegion(RI.getRegionFor(&BB), Context);
  524. // Check for invalid usage of different pointers in one expression.
  525. if (ICmp->isEquality() && involvesMultiplePtrs(LHS, nullptr, L) &&
  526. involvesMultiplePtrs(RHS, nullptr, L))
  527. return false;
  528. // Check for invalid usage of different pointers in a relational comparison.
  529. if (ICmp->isRelational() && involvesMultiplePtrs(LHS, RHS, L))
  530. return false;
  531. if (isAffine(LHS, L, Context) && isAffine(RHS, L, Context))
  532. return true;
  533. if (!IsLoopBranch && AllowNonAffineSubRegions &&
  534. addOverApproximatedRegion(RI.getRegionFor(&BB), Context))
  535. return true;
  536. if (IsLoopBranch)
  537. return false;
  538. return invalid<ReportNonAffBranch>(Context, /*Assert=*/true, &BB, LHS, RHS,
  539. ICmp);
  540. }
  541. bool ScopDetection::isValidCFG(BasicBlock &BB, bool IsLoopBranch,
  542. bool AllowUnreachable,
  543. DetectionContext &Context) {
  544. Region &CurRegion = Context.CurRegion;
  545. Instruction *TI = BB.getTerminator();
  546. if (AllowUnreachable && isa<UnreachableInst>(TI))
  547. return true;
  548. // Return instructions are only valid if the region is the top level region.
  549. if (isa<ReturnInst>(TI) && CurRegion.isTopLevelRegion())
  550. return true;
  551. Value *Condition = getConditionFromTerminator(TI);
  552. if (!Condition)
  553. return invalid<ReportInvalidTerminator>(Context, /*Assert=*/true, &BB);
  554. // UndefValue is not allowed as condition.
  555. if (isa<UndefValue>(Condition))
  556. return invalid<ReportUndefCond>(Context, /*Assert=*/true, TI, &BB);
  557. if (BranchInst *BI = dyn_cast<BranchInst>(TI))
  558. return isValidBranch(BB, BI, Condition, IsLoopBranch, Context);
  559. SwitchInst *SI = dyn_cast<SwitchInst>(TI);
  560. assert(SI && "Terminator was neither branch nor switch");
  561. return isValidSwitch(BB, SI, Condition, IsLoopBranch, Context);
  562. }
  563. bool ScopDetection::isValidCallInst(CallInst &CI,
  564. DetectionContext &Context) const {
  565. if (CI.doesNotReturn())
  566. return false;
  567. if (CI.doesNotAccessMemory())
  568. return true;
  569. if (auto *II = dyn_cast<IntrinsicInst>(&CI))
  570. if (isValidIntrinsicInst(*II, Context))
  571. return true;
  572. Function *CalledFunction = CI.getCalledFunction();
  573. // Indirect calls are not supported.
  574. if (CalledFunction == nullptr)
  575. return false;
  576. if (isDebugCall(&CI)) {
  577. LLVM_DEBUG(dbgs() << "Allow call to debug function: "
  578. << CalledFunction->getName() << '\n');
  579. return true;
  580. }
  581. if (AllowModrefCall) {
  582. switch (AA.getModRefBehavior(CalledFunction)) {
  583. case FMRB_UnknownModRefBehavior:
  584. return false;
  585. case FMRB_DoesNotAccessMemory:
  586. case FMRB_OnlyReadsMemory:
  587. case FMRB_OnlyReadsInaccessibleMem:
  588. case FMRB_OnlyReadsInaccessibleOrArgMem:
  589. // Implicitly disable delinearization since we have an unknown
  590. // accesses with an unknown access function.
  591. Context.HasUnknownAccess = true;
  592. // Explicitly use addUnknown so we don't put a loop-variant
  593. // pointer into the alias set.
  594. Context.AST.addUnknown(&CI);
  595. return true;
  596. case FMRB_OnlyReadsArgumentPointees:
  597. case FMRB_OnlyAccessesArgumentPointees:
  598. case FMRB_OnlyWritesArgumentPointees:
  599. for (const auto &Arg : CI.args()) {
  600. if (!Arg->getType()->isPointerTy())
  601. continue;
  602. // Bail if a pointer argument has a base address not known to
  603. // ScalarEvolution. Note that a zero pointer is acceptable.
  604. auto *ArgSCEV = SE.getSCEVAtScope(Arg, LI.getLoopFor(CI.getParent()));
  605. if (ArgSCEV->isZero())
  606. continue;
  607. auto *BP = dyn_cast<SCEVUnknown>(SE.getPointerBase(ArgSCEV));
  608. if (!BP)
  609. return false;
  610. // Implicitly disable delinearization since we have an unknown
  611. // accesses with an unknown access function.
  612. Context.HasUnknownAccess = true;
  613. }
  614. // Explicitly use addUnknown so we don't put a loop-variant
  615. // pointer into the alias set.
  616. Context.AST.addUnknown(&CI);
  617. return true;
  618. case FMRB_OnlyWritesMemory:
  619. case FMRB_OnlyWritesInaccessibleMem:
  620. case FMRB_OnlyWritesInaccessibleOrArgMem:
  621. case FMRB_OnlyAccessesInaccessibleMem:
  622. case FMRB_OnlyAccessesInaccessibleOrArgMem:
  623. return false;
  624. }
  625. }
  626. return false;
  627. }
  628. bool ScopDetection::isValidIntrinsicInst(IntrinsicInst &II,
  629. DetectionContext &Context) const {
  630. if (isIgnoredIntrinsic(&II))
  631. return true;
  632. // The closest loop surrounding the call instruction.
  633. Loop *L = LI.getLoopFor(II.getParent());
  634. // The access function and base pointer for memory intrinsics.
  635. const SCEV *AF;
  636. const SCEVUnknown *BP;
  637. switch (II.getIntrinsicID()) {
  638. // Memory intrinsics that can be represented are supported.
  639. case Intrinsic::memmove:
  640. case Intrinsic::memcpy:
  641. AF = SE.getSCEVAtScope(cast<MemTransferInst>(II).getSource(), L);
  642. if (!AF->isZero()) {
  643. BP = dyn_cast<SCEVUnknown>(SE.getPointerBase(AF));
  644. // Bail if the source pointer is not valid.
  645. if (!isValidAccess(&II, AF, BP, Context))
  646. return false;
  647. }
  648. LLVM_FALLTHROUGH;
  649. case Intrinsic::memset:
  650. AF = SE.getSCEVAtScope(cast<MemIntrinsic>(II).getDest(), L);
  651. if (!AF->isZero()) {
  652. BP = dyn_cast<SCEVUnknown>(SE.getPointerBase(AF));
  653. // Bail if the destination pointer is not valid.
  654. if (!isValidAccess(&II, AF, BP, Context))
  655. return false;
  656. }
  657. // Bail if the length is not affine.
  658. if (!isAffine(SE.getSCEVAtScope(cast<MemIntrinsic>(II).getLength(), L), L,
  659. Context))
  660. return false;
  661. return true;
  662. default:
  663. break;
  664. }
  665. return false;
  666. }
  667. bool ScopDetection::isInvariant(Value &Val, const Region &Reg,
  668. DetectionContext &Ctx) const {
  669. // A reference to function argument or constant value is invariant.
  670. if (isa<Argument>(Val) || isa<Constant>(Val))
  671. return true;
  672. Instruction *I = dyn_cast<Instruction>(&Val);
  673. if (!I)
  674. return false;
  675. if (!Reg.contains(I))
  676. return true;
  677. // Loads within the SCoP may read arbitrary values, need to hoist them. If it
  678. // is not hoistable, it will be rejected later, but here we assume it is and
  679. // that makes the value invariant.
  680. if (auto LI = dyn_cast<LoadInst>(I)) {
  681. Ctx.RequiredILS.insert(LI);
  682. return true;
  683. }
  684. return false;
  685. }
  686. namespace {
  687. /// Remove smax of smax(0, size) expressions from a SCEV expression and
  688. /// register the '...' components.
  689. ///
  690. /// Array access expressions as they are generated by GFortran contain smax(0,
  691. /// size) expressions that confuse the 'normal' delinearization algorithm.
  692. /// However, if we extract such expressions before the normal delinearization
  693. /// takes place they can actually help to identify array size expressions in
  694. /// Fortran accesses. For the subsequently following delinearization the smax(0,
  695. /// size) component can be replaced by just 'size'. This is correct as we will
  696. /// always add and verify the assumption that for all subscript expressions
  697. /// 'exp' the inequality 0 <= exp < size holds. Hence, we will also verify
  698. /// that 0 <= size, which means smax(0, size) == size.
  699. class SCEVRemoveMax : public SCEVRewriteVisitor<SCEVRemoveMax> {
  700. public:
  701. SCEVRemoveMax(ScalarEvolution &SE, std::vector<const SCEV *> *Terms)
  702. : SCEVRewriteVisitor(SE), Terms(Terms) {}
  703. static const SCEV *rewrite(const SCEV *Scev, ScalarEvolution &SE,
  704. std::vector<const SCEV *> *Terms = nullptr) {
  705. SCEVRemoveMax Rewriter(SE, Terms);
  706. return Rewriter.visit(Scev);
  707. }
  708. const SCEV *visitSMaxExpr(const SCEVSMaxExpr *Expr) {
  709. if ((Expr->getNumOperands() == 2) && Expr->getOperand(0)->isZero()) {
  710. auto Res = visit(Expr->getOperand(1));
  711. if (Terms)
  712. (*Terms).push_back(Res);
  713. return Res;
  714. }
  715. return Expr;
  716. }
  717. private:
  718. std::vector<const SCEV *> *Terms;
  719. };
  720. } // namespace
  721. SmallVector<const SCEV *, 4>
  722. ScopDetection::getDelinearizationTerms(DetectionContext &Context,
  723. const SCEVUnknown *BasePointer) const {
  724. SmallVector<const SCEV *, 4> Terms;
  725. for (const auto &Pair : Context.Accesses[BasePointer]) {
  726. std::vector<const SCEV *> MaxTerms;
  727. SCEVRemoveMax::rewrite(Pair.second, SE, &MaxTerms);
  728. if (!MaxTerms.empty()) {
  729. Terms.insert(Terms.begin(), MaxTerms.begin(), MaxTerms.end());
  730. continue;
  731. }
  732. // In case the outermost expression is a plain add, we check if any of its
  733. // terms has the form 4 * %inst * %param * %param ..., aka a term that
  734. // contains a product between a parameter and an instruction that is
  735. // inside the scop. Such instructions, if allowed at all, are instructions
  736. // SCEV can not represent, but Polly is still looking through. As a
  737. // result, these instructions can depend on induction variables and are
  738. // most likely no array sizes. However, terms that are multiplied with
  739. // them are likely candidates for array sizes.
  740. if (auto *AF = dyn_cast<SCEVAddExpr>(Pair.second)) {
  741. for (auto Op : AF->operands()) {
  742. if (auto *AF2 = dyn_cast<SCEVAddRecExpr>(Op))
  743. collectParametricTerms(SE, AF2, Terms);
  744. if (auto *AF2 = dyn_cast<SCEVMulExpr>(Op)) {
  745. SmallVector<const SCEV *, 0> Operands;
  746. for (auto *MulOp : AF2->operands()) {
  747. if (auto *Const = dyn_cast<SCEVConstant>(MulOp))
  748. Operands.push_back(Const);
  749. if (auto *Unknown = dyn_cast<SCEVUnknown>(MulOp)) {
  750. if (auto *Inst = dyn_cast<Instruction>(Unknown->getValue())) {
  751. if (!Context.CurRegion.contains(Inst))
  752. Operands.push_back(MulOp);
  753. } else {
  754. Operands.push_back(MulOp);
  755. }
  756. }
  757. }
  758. if (Operands.size())
  759. Terms.push_back(SE.getMulExpr(Operands));
  760. }
  761. }
  762. }
  763. if (Terms.empty())
  764. collectParametricTerms(SE, Pair.second, Terms);
  765. }
  766. return Terms;
  767. }
  768. bool ScopDetection::hasValidArraySizes(DetectionContext &Context,
  769. SmallVectorImpl<const SCEV *> &Sizes,
  770. const SCEVUnknown *BasePointer,
  771. Loop *Scope) const {
  772. // If no sizes were found, all sizes are trivially valid. We allow this case
  773. // to make it possible to pass known-affine accesses to the delinearization to
  774. // try to recover some interesting multi-dimensional accesses, but to still
  775. // allow the already known to be affine access in case the delinearization
  776. // fails. In such situations, the delinearization will just return a Sizes
  777. // array of size zero.
  778. if (Sizes.size() == 0)
  779. return true;
  780. Value *BaseValue = BasePointer->getValue();
  781. Region &CurRegion = Context.CurRegion;
  782. for (const SCEV *DelinearizedSize : Sizes) {
  783. // Don't pass down the scope to isAfffine; array dimensions must be
  784. // invariant across the entire scop.
  785. if (!isAffine(DelinearizedSize, nullptr, Context)) {
  786. Sizes.clear();
  787. break;
  788. }
  789. if (auto *Unknown = dyn_cast<SCEVUnknown>(DelinearizedSize)) {
  790. auto *V = dyn_cast<Value>(Unknown->getValue());
  791. if (auto *Load = dyn_cast<LoadInst>(V)) {
  792. if (Context.CurRegion.contains(Load) &&
  793. isHoistableLoad(Load, CurRegion, LI, SE, DT, Context.RequiredILS))
  794. Context.RequiredILS.insert(Load);
  795. continue;
  796. }
  797. }
  798. if (hasScalarDepsInsideRegion(DelinearizedSize, &CurRegion, Scope, false,
  799. Context.RequiredILS))
  800. return invalid<ReportNonAffineAccess>(
  801. Context, /*Assert=*/true, DelinearizedSize,
  802. Context.Accesses[BasePointer].front().first, BaseValue);
  803. }
  804. // No array shape derived.
  805. if (Sizes.empty()) {
  806. if (AllowNonAffine)
  807. return true;
  808. for (const auto &Pair : Context.Accesses[BasePointer]) {
  809. const Instruction *Insn = Pair.first;
  810. const SCEV *AF = Pair.second;
  811. if (!isAffine(AF, Scope, Context)) {
  812. invalid<ReportNonAffineAccess>(Context, /*Assert=*/true, AF, Insn,
  813. BaseValue);
  814. if (!KeepGoing)
  815. return false;
  816. }
  817. }
  818. return false;
  819. }
  820. return true;
  821. }
  822. // We first store the resulting memory accesses in TempMemoryAccesses. Only
  823. // if the access functions for all memory accesses have been successfully
  824. // delinearized we continue. Otherwise, we either report a failure or, if
  825. // non-affine accesses are allowed, we drop the information. In case the
  826. // information is dropped the memory accesses need to be overapproximated
  827. // when translated to a polyhedral representation.
  828. bool ScopDetection::computeAccessFunctions(
  829. DetectionContext &Context, const SCEVUnknown *BasePointer,
  830. std::shared_ptr<ArrayShape> Shape) const {
  831. Value *BaseValue = BasePointer->getValue();
  832. bool BasePtrHasNonAffine = false;
  833. MapInsnToMemAcc TempMemoryAccesses;
  834. for (const auto &Pair : Context.Accesses[BasePointer]) {
  835. const Instruction *Insn = Pair.first;
  836. auto *AF = Pair.second;
  837. AF = SCEVRemoveMax::rewrite(AF, SE);
  838. bool IsNonAffine = false;
  839. TempMemoryAccesses.insert(std::make_pair(Insn, MemAcc(Insn, Shape)));
  840. MemAcc *Acc = &TempMemoryAccesses.find(Insn)->second;
  841. auto *Scope = LI.getLoopFor(Insn->getParent());
  842. if (!AF) {
  843. if (isAffine(Pair.second, Scope, Context))
  844. Acc->DelinearizedSubscripts.push_back(Pair.second);
  845. else
  846. IsNonAffine = true;
  847. } else {
  848. if (Shape->DelinearizedSizes.size() == 0) {
  849. Acc->DelinearizedSubscripts.push_back(AF);
  850. } else {
  851. llvm::computeAccessFunctions(SE, AF, Acc->DelinearizedSubscripts,
  852. Shape->DelinearizedSizes);
  853. if (Acc->DelinearizedSubscripts.size() == 0)
  854. IsNonAffine = true;
  855. }
  856. for (const SCEV *S : Acc->DelinearizedSubscripts)
  857. if (!isAffine(S, Scope, Context))
  858. IsNonAffine = true;
  859. }
  860. // (Possibly) report non affine access
  861. if (IsNonAffine) {
  862. BasePtrHasNonAffine = true;
  863. if (!AllowNonAffine)
  864. invalid<ReportNonAffineAccess>(Context, /*Assert=*/true, Pair.second,
  865. Insn, BaseValue);
  866. if (!KeepGoing && !AllowNonAffine)
  867. return false;
  868. }
  869. }
  870. if (!BasePtrHasNonAffine)
  871. Context.InsnToMemAcc.insert(TempMemoryAccesses.begin(),
  872. TempMemoryAccesses.end());
  873. return true;
  874. }
  875. bool ScopDetection::hasBaseAffineAccesses(DetectionContext &Context,
  876. const SCEVUnknown *BasePointer,
  877. Loop *Scope) const {
  878. auto Shape = std::shared_ptr<ArrayShape>(new ArrayShape(BasePointer));
  879. auto Terms = getDelinearizationTerms(Context, BasePointer);
  880. findArrayDimensions(SE, Terms, Shape->DelinearizedSizes,
  881. Context.ElementSize[BasePointer]);
  882. if (!hasValidArraySizes(Context, Shape->DelinearizedSizes, BasePointer,
  883. Scope))
  884. return false;
  885. return computeAccessFunctions(Context, BasePointer, Shape);
  886. }
  887. bool ScopDetection::hasAffineMemoryAccesses(DetectionContext &Context) const {
  888. // TODO: If we have an unknown access and other non-affine accesses we do
  889. // not try to delinearize them for now.
  890. if (Context.HasUnknownAccess && !Context.NonAffineAccesses.empty())
  891. return AllowNonAffine;
  892. for (auto &Pair : Context.NonAffineAccesses) {
  893. auto *BasePointer = Pair.first;
  894. auto *Scope = Pair.second;
  895. if (!hasBaseAffineAccesses(Context, BasePointer, Scope)) {
  896. if (KeepGoing)
  897. continue;
  898. else
  899. return false;
  900. }
  901. }
  902. return true;
  903. }
  904. bool ScopDetection::isValidAccess(Instruction *Inst, const SCEV *AF,
  905. const SCEVUnknown *BP,
  906. DetectionContext &Context) const {
  907. if (!BP)
  908. return invalid<ReportNoBasePtr>(Context, /*Assert=*/true, Inst);
  909. auto *BV = BP->getValue();
  910. if (isa<UndefValue>(BV))
  911. return invalid<ReportUndefBasePtr>(Context, /*Assert=*/true, Inst);
  912. // FIXME: Think about allowing IntToPtrInst
  913. if (IntToPtrInst *Inst = dyn_cast<IntToPtrInst>(BV))
  914. return invalid<ReportIntToPtr>(Context, /*Assert=*/true, Inst);
  915. // Check that the base address of the access is invariant in the current
  916. // region.
  917. if (!isInvariant(*BV, Context.CurRegion, Context))
  918. return invalid<ReportVariantBasePtr>(Context, /*Assert=*/true, BV, Inst);
  919. AF = SE.getMinusSCEV(AF, BP);
  920. const SCEV *Size;
  921. if (!isa<MemIntrinsic>(Inst)) {
  922. Size = SE.getElementSize(Inst);
  923. } else {
  924. auto *SizeTy =
  925. SE.getEffectiveSCEVType(PointerType::getInt8PtrTy(SE.getContext()));
  926. Size = SE.getConstant(SizeTy, 8);
  927. }
  928. if (Context.ElementSize[BP]) {
  929. if (!AllowDifferentTypes && Context.ElementSize[BP] != Size)
  930. return invalid<ReportDifferentArrayElementSize>(Context, /*Assert=*/true,
  931. Inst, BV);
  932. Context.ElementSize[BP] = SE.getSMinExpr(Size, Context.ElementSize[BP]);
  933. } else {
  934. Context.ElementSize[BP] = Size;
  935. }
  936. bool IsVariantInNonAffineLoop = false;
  937. SetVector<const Loop *> Loops;
  938. findLoops(AF, Loops);
  939. for (const Loop *L : Loops)
  940. if (Context.BoxedLoopsSet.count(L))
  941. IsVariantInNonAffineLoop = true;
  942. auto *Scope = LI.getLoopFor(Inst->getParent());
  943. bool IsAffine = !IsVariantInNonAffineLoop && isAffine(AF, Scope, Context);
  944. // Do not try to delinearize memory intrinsics and force them to be affine.
  945. if (isa<MemIntrinsic>(Inst) && !IsAffine) {
  946. return invalid<ReportNonAffineAccess>(Context, /*Assert=*/true, AF, Inst,
  947. BV);
  948. } else if (PollyDelinearize && !IsVariantInNonAffineLoop) {
  949. Context.Accesses[BP].push_back({Inst, AF});
  950. if (!IsAffine)
  951. Context.NonAffineAccesses.insert(
  952. std::make_pair(BP, LI.getLoopFor(Inst->getParent())));
  953. } else if (!AllowNonAffine && !IsAffine) {
  954. return invalid<ReportNonAffineAccess>(Context, /*Assert=*/true, AF, Inst,
  955. BV);
  956. }
  957. if (IgnoreAliasing)
  958. return true;
  959. // Check if the base pointer of the memory access does alias with
  960. // any other pointer. This cannot be handled at the moment.
  961. AAMDNodes AATags = Inst->getAAMetadata();
  962. AliasSet &AS = Context.AST.getAliasSetFor(
  963. MemoryLocation::getBeforeOrAfter(BP->getValue(), AATags));
  964. if (!AS.isMustAlias()) {
  965. if (PollyUseRuntimeAliasChecks) {
  966. bool CanBuildRunTimeCheck = true;
  967. // The run-time alias check places code that involves the base pointer at
  968. // the beginning of the SCoP. This breaks if the base pointer is defined
  969. // inside the scop. Hence, we can only create a run-time check if we are
  970. // sure the base pointer is not an instruction defined inside the scop.
  971. // However, we can ignore loads that will be hoisted.
  972. InvariantLoadsSetTy VariantLS, InvariantLS;
  973. // In order to detect loads which are dependent on other invariant loads
  974. // as invariant, we use fixed-point iteration method here i.e we iterate
  975. // over the alias set for arbitrary number of times until it is safe to
  976. // assume that all the invariant loads have been detected
  977. while (true) {
  978. const unsigned int VariantSize = VariantLS.size(),
  979. InvariantSize = InvariantLS.size();
  980. for (const auto &Ptr : AS) {
  981. Instruction *Inst = dyn_cast<Instruction>(Ptr.getValue());
  982. if (Inst && Context.CurRegion.contains(Inst)) {
  983. auto *Load = dyn_cast<LoadInst>(Inst);
  984. if (Load && InvariantLS.count(Load))
  985. continue;
  986. if (Load && isHoistableLoad(Load, Context.CurRegion, LI, SE, DT,
  987. InvariantLS)) {
  988. if (VariantLS.count(Load))
  989. VariantLS.remove(Load);
  990. Context.RequiredILS.insert(Load);
  991. InvariantLS.insert(Load);
  992. } else {
  993. CanBuildRunTimeCheck = false;
  994. VariantLS.insert(Load);
  995. }
  996. }
  997. }
  998. if (InvariantSize == InvariantLS.size() &&
  999. VariantSize == VariantLS.size())
  1000. break;
  1001. }
  1002. if (CanBuildRunTimeCheck)
  1003. return true;
  1004. }
  1005. return invalid<ReportAlias>(Context, /*Assert=*/true, Inst, AS);
  1006. }
  1007. return true;
  1008. }
  1009. bool ScopDetection::isValidMemoryAccess(MemAccInst Inst,
  1010. DetectionContext &Context) const {
  1011. Value *Ptr = Inst.getPointerOperand();
  1012. Loop *L = LI.getLoopFor(Inst->getParent());
  1013. const SCEV *AccessFunction = SE.getSCEVAtScope(Ptr, L);
  1014. const SCEVUnknown *BasePointer;
  1015. BasePointer = dyn_cast<SCEVUnknown>(SE.getPointerBase(AccessFunction));
  1016. return isValidAccess(Inst, AccessFunction, BasePointer, Context);
  1017. }
  1018. bool ScopDetection::isValidInstruction(Instruction &Inst,
  1019. DetectionContext &Context) {
  1020. for (auto &Op : Inst.operands()) {
  1021. auto *OpInst = dyn_cast<Instruction>(&Op);
  1022. if (!OpInst)
  1023. continue;
  1024. if (isErrorBlock(*OpInst->getParent(), Context.CurRegion)) {
  1025. auto *PHI = dyn_cast<PHINode>(OpInst);
  1026. if (PHI) {
  1027. for (User *U : PHI->users()) {
  1028. auto *UI = dyn_cast<Instruction>(U);
  1029. if (!UI || !UI->isTerminator())
  1030. return false;
  1031. }
  1032. } else {
  1033. return false;
  1034. }
  1035. }
  1036. }
  1037. if (isa<LandingPadInst>(&Inst) || isa<ResumeInst>(&Inst))
  1038. return false;
  1039. // We only check the call instruction but not invoke instruction.
  1040. if (CallInst *CI = dyn_cast<CallInst>(&Inst)) {
  1041. if (isValidCallInst(*CI, Context))
  1042. return true;
  1043. return invalid<ReportFuncCall>(Context, /*Assert=*/true, &Inst);
  1044. }
  1045. if (!Inst.mayReadOrWriteMemory()) {
  1046. if (!isa<AllocaInst>(Inst))
  1047. return true;
  1048. return invalid<ReportAlloca>(Context, /*Assert=*/true, &Inst);
  1049. }
  1050. // Check the access function.
  1051. if (auto MemInst = MemAccInst::dyn_cast(Inst)) {
  1052. Context.hasStores |= isa<StoreInst>(MemInst);
  1053. Context.hasLoads |= isa<LoadInst>(MemInst);
  1054. if (!MemInst.isSimple())
  1055. return invalid<ReportNonSimpleMemoryAccess>(Context, /*Assert=*/true,
  1056. &Inst);
  1057. return isValidMemoryAccess(MemInst, Context);
  1058. }
  1059. // We do not know this instruction, therefore we assume it is invalid.
  1060. return invalid<ReportUnknownInst>(Context, /*Assert=*/true, &Inst);
  1061. }
  1062. /// Check whether @p L has exiting blocks.
  1063. ///
  1064. /// @param L The loop of interest
  1065. ///
  1066. /// @return True if the loop has exiting blocks, false otherwise.
  1067. static bool hasExitingBlocks(Loop *L) {
  1068. SmallVector<BasicBlock *, 4> ExitingBlocks;
  1069. L->getExitingBlocks(ExitingBlocks);
  1070. return !ExitingBlocks.empty();
  1071. }
  1072. bool ScopDetection::canUseISLTripCount(Loop *L, DetectionContext &Context) {
  1073. // Ensure the loop has valid exiting blocks as well as latches, otherwise we
  1074. // need to overapproximate it as a boxed loop.
  1075. SmallVector<BasicBlock *, 4> LoopControlBlocks;
  1076. L->getExitingBlocks(LoopControlBlocks);
  1077. L->getLoopLatches(LoopControlBlocks);
  1078. for (BasicBlock *ControlBB : LoopControlBlocks) {
  1079. if (!isValidCFG(*ControlBB, true, false, Context))
  1080. return false;
  1081. }
  1082. // We can use ISL to compute the trip count of L.
  1083. return true;
  1084. }
  1085. bool ScopDetection::isValidLoop(Loop *L, DetectionContext &Context) {
  1086. // Loops that contain part but not all of the blocks of a region cannot be
  1087. // handled by the schedule generation. Such loop constructs can happen
  1088. // because a region can contain BBs that have no path to the exit block
  1089. // (Infinite loops, UnreachableInst), but such blocks are never part of a
  1090. // loop.
  1091. //
  1092. // _______________
  1093. // | Loop Header | <-----------.
  1094. // --------------- |
  1095. // | |
  1096. // _______________ ______________
  1097. // | RegionEntry |-----> | RegionExit |----->
  1098. // --------------- --------------
  1099. // |
  1100. // _______________
  1101. // | EndlessLoop | <--.
  1102. // --------------- |
  1103. // | |
  1104. // \------------/
  1105. //
  1106. // In the example above, the loop (LoopHeader,RegionEntry,RegionExit) is
  1107. // neither entirely contained in the region RegionEntry->RegionExit
  1108. // (containing RegionEntry,EndlessLoop) nor is the region entirely contained
  1109. // in the loop.
  1110. // The block EndlessLoop is contained in the region because Region::contains
  1111. // tests whether it is not dominated by RegionExit. This is probably to not
  1112. // having to query the PostdominatorTree. Instead of an endless loop, a dead
  1113. // end can also be formed by an UnreachableInst. This case is already caught
  1114. // by isErrorBlock(). We hence only have to reject endless loops here.
  1115. if (!hasExitingBlocks(L))
  1116. return invalid<ReportLoopHasNoExit>(Context, /*Assert=*/true, L);
  1117. // The algorithm for domain construction assumes that loops has only a single
  1118. // exit block (and hence corresponds to a subregion). Note that we cannot use
  1119. // L->getExitBlock() because it does not check whether all exiting edges point
  1120. // to the same BB.
  1121. SmallVector<BasicBlock *, 4> ExitBlocks;
  1122. L->getExitBlocks(ExitBlocks);
  1123. BasicBlock *TheExitBlock = ExitBlocks[0];
  1124. for (BasicBlock *ExitBB : ExitBlocks) {
  1125. if (TheExitBlock != ExitBB)
  1126. return invalid<ReportLoopHasMultipleExits>(Context, /*Assert=*/true, L);
  1127. }
  1128. if (canUseISLTripCount(L, Context))
  1129. return true;
  1130. if (AllowNonAffineSubLoops && AllowNonAffineSubRegions) {
  1131. Region *R = RI.getRegionFor(L->getHeader());
  1132. while (R != &Context.CurRegion && !R->contains(L))
  1133. R = R->getParent();
  1134. if (addOverApproximatedRegion(R, Context))
  1135. return true;
  1136. }
  1137. const SCEV *LoopCount = SE.getBackedgeTakenCount(L);
  1138. return invalid<ReportLoopBound>(Context, /*Assert=*/true, L, LoopCount);
  1139. }
  1140. /// Return the number of loops in @p L (incl. @p L) that have a trip
  1141. /// count that is not known to be less than @MinProfitableTrips.
  1142. ScopDetection::LoopStats
  1143. ScopDetection::countBeneficialSubLoops(Loop *L, ScalarEvolution &SE,
  1144. unsigned MinProfitableTrips) {
  1145. auto *TripCount = SE.getBackedgeTakenCount(L);
  1146. int NumLoops = 1;
  1147. int MaxLoopDepth = 1;
  1148. if (MinProfitableTrips > 0)
  1149. if (auto *TripCountC = dyn_cast<SCEVConstant>(TripCount))
  1150. if (TripCountC->getType()->getScalarSizeInBits() <= 64)
  1151. if (TripCountC->getValue()->getZExtValue() <= MinProfitableTrips)
  1152. NumLoops -= 1;
  1153. for (auto &SubLoop : *L) {
  1154. LoopStats Stats = countBeneficialSubLoops(SubLoop, SE, MinProfitableTrips);
  1155. NumLoops += Stats.NumLoops;
  1156. MaxLoopDepth = std::max(MaxLoopDepth, Stats.MaxDepth + 1);
  1157. }
  1158. return {NumLoops, MaxLoopDepth};
  1159. }
  1160. ScopDetection::LoopStats
  1161. ScopDetection::countBeneficialLoops(Region *R, ScalarEvolution &SE,
  1162. LoopInfo &LI, unsigned MinProfitableTrips) {
  1163. int LoopNum = 0;
  1164. int MaxLoopDepth = 0;
  1165. auto L = LI.getLoopFor(R->getEntry());
  1166. // If L is fully contained in R, move to first loop surrounding R. Otherwise,
  1167. // L is either nullptr or already surrounding R.
  1168. if (L && R->contains(L)) {
  1169. L = R->outermostLoopInRegion(L);
  1170. L = L->getParentLoop();
  1171. }
  1172. auto SubLoops =
  1173. L ? L->getSubLoopsVector() : std::vector<Loop *>(LI.begin(), LI.end());
  1174. for (auto &SubLoop : SubLoops)
  1175. if (R->contains(SubLoop)) {
  1176. LoopStats Stats =
  1177. countBeneficialSubLoops(SubLoop, SE, MinProfitableTrips);
  1178. LoopNum += Stats.NumLoops;
  1179. MaxLoopDepth = std::max(MaxLoopDepth, Stats.MaxDepth);
  1180. }
  1181. return {LoopNum, MaxLoopDepth};
  1182. }
  1183. static bool isErrorBlockImpl(BasicBlock &BB, const Region &R, LoopInfo &LI,
  1184. const DominatorTree &DT) {
  1185. if (isa<UnreachableInst>(BB.getTerminator()))
  1186. return true;
  1187. if (LI.isLoopHeader(&BB))
  1188. return false;
  1189. // Don't consider something outside the SCoP as error block. It will precede
  1190. // the code versioning runtime check.
  1191. if (!R.contains(&BB))
  1192. return false;
  1193. // Basic blocks that are always executed are not considered error blocks,
  1194. // as their execution can not be a rare event.
  1195. bool DominatesAllPredecessors = true;
  1196. if (R.isTopLevelRegion()) {
  1197. for (BasicBlock &I : *R.getEntry()->getParent()) {
  1198. if (isa<ReturnInst>(I.getTerminator()) && !DT.dominates(&BB, &I)) {
  1199. DominatesAllPredecessors = false;
  1200. break;
  1201. }
  1202. }
  1203. } else {
  1204. for (auto Pred : predecessors(R.getExit())) {
  1205. if (R.contains(Pred) && !DT.dominates(&BB, Pred)) {
  1206. DominatesAllPredecessors = false;
  1207. break;
  1208. }
  1209. }
  1210. }
  1211. if (DominatesAllPredecessors)
  1212. return false;
  1213. for (Instruction &Inst : BB)
  1214. if (CallInst *CI = dyn_cast<CallInst>(&Inst)) {
  1215. if (isDebugCall(CI))
  1216. continue;
  1217. if (isIgnoredIntrinsic(CI))
  1218. continue;
  1219. // memset, memcpy and memmove are modeled intrinsics.
  1220. if (isa<MemSetInst>(CI) || isa<MemTransferInst>(CI))
  1221. continue;
  1222. if (!CI->doesNotAccessMemory())
  1223. return true;
  1224. if (CI->doesNotReturn())
  1225. return true;
  1226. }
  1227. return false;
  1228. }
  1229. bool ScopDetection::isErrorBlock(llvm::BasicBlock &BB, const llvm::Region &R) {
  1230. if (!PollyAllowErrorBlocks)
  1231. return false;
  1232. auto It = ErrorBlockCache.insert({std::make_pair(&BB, &R), false});
  1233. if (!It.second)
  1234. return It.first->getSecond();
  1235. bool Result = isErrorBlockImpl(BB, R, LI, DT);
  1236. It.first->second = Result;
  1237. return Result;
  1238. }
  1239. Region *ScopDetection::expandRegion(Region &R) {
  1240. // Initial no valid region was found (greater than R)
  1241. std::unique_ptr<Region> LastValidRegion;
  1242. auto ExpandedRegion = std::unique_ptr<Region>(R.getExpandedRegion());
  1243. LLVM_DEBUG(dbgs() << "\tExpanding " << R.getNameStr() << "\n");
  1244. while (ExpandedRegion) {
  1245. BBPair P = getBBPairForRegion(ExpandedRegion.get());
  1246. std::unique_ptr<DetectionContext> &Entry = DetectionContextMap[P];
  1247. Entry = std::make_unique<DetectionContext>(*ExpandedRegion, AA,
  1248. /*Verifying=*/false);
  1249. DetectionContext &Context = *Entry.get();
  1250. LLVM_DEBUG(dbgs() << "\t\tTrying " << ExpandedRegion->getNameStr() << "\n");
  1251. // Only expand when we did not collect errors.
  1252. if (!Context.Log.hasErrors()) {
  1253. // If the exit is valid check all blocks
  1254. // - if true, a valid region was found => store it + keep expanding
  1255. // - if false, .tbd. => stop (should this really end the loop?)
  1256. if (!allBlocksValid(Context) || Context.Log.hasErrors()) {
  1257. removeCachedResults(*ExpandedRegion);
  1258. DetectionContextMap.erase(P);
  1259. break;
  1260. }
  1261. // Store this region, because it is the greatest valid (encountered so
  1262. // far).
  1263. if (LastValidRegion) {
  1264. removeCachedResults(*LastValidRegion);
  1265. DetectionContextMap.erase(P);
  1266. }
  1267. LastValidRegion = std::move(ExpandedRegion);
  1268. // Create and test the next greater region (if any)
  1269. ExpandedRegion =
  1270. std::unique_ptr<Region>(LastValidRegion->getExpandedRegion());
  1271. } else {
  1272. // Create and test the next greater region (if any)
  1273. removeCachedResults(*ExpandedRegion);
  1274. DetectionContextMap.erase(P);
  1275. ExpandedRegion =
  1276. std::unique_ptr<Region>(ExpandedRegion->getExpandedRegion());
  1277. }
  1278. }
  1279. LLVM_DEBUG({
  1280. if (LastValidRegion)
  1281. dbgs() << "\tto " << LastValidRegion->getNameStr() << "\n";
  1282. else
  1283. dbgs() << "\tExpanding " << R.getNameStr() << " failed\n";
  1284. });
  1285. return LastValidRegion.release();
  1286. }
  1287. static bool regionWithoutLoops(Region &R, LoopInfo &LI) {
  1288. for (const BasicBlock *BB : R.blocks())
  1289. if (R.contains(LI.getLoopFor(BB)))
  1290. return false;
  1291. return true;
  1292. }
  1293. void ScopDetection::removeCachedResultsRecursively(const Region &R) {
  1294. for (auto &SubRegion : R) {
  1295. if (ValidRegions.count(SubRegion.get())) {
  1296. removeCachedResults(*SubRegion.get());
  1297. } else
  1298. removeCachedResultsRecursively(*SubRegion);
  1299. }
  1300. }
  1301. void ScopDetection::removeCachedResults(const Region &R) {
  1302. ValidRegions.remove(&R);
  1303. }
  1304. void ScopDetection::findScops(Region &R) {
  1305. std::unique_ptr<DetectionContext> &Entry =
  1306. DetectionContextMap[getBBPairForRegion(&R)];
  1307. Entry = std::make_unique<DetectionContext>(R, AA, /*Verifying=*/false);
  1308. DetectionContext &Context = *Entry.get();
  1309. bool RegionIsValid = false;
  1310. if (!PollyProcessUnprofitable && regionWithoutLoops(R, LI))
  1311. invalid<ReportUnprofitable>(Context, /*Assert=*/true, &R);
  1312. else
  1313. RegionIsValid = isValidRegion(Context);
  1314. bool HasErrors = !RegionIsValid || Context.Log.size() > 0;
  1315. if (HasErrors) {
  1316. removeCachedResults(R);
  1317. } else {
  1318. ValidRegions.insert(&R);
  1319. return;
  1320. }
  1321. for (auto &SubRegion : R)
  1322. findScops(*SubRegion);
  1323. // Try to expand regions.
  1324. //
  1325. // As the region tree normally only contains canonical regions, non canonical
  1326. // regions that form a Scop are not found. Therefore, those non canonical
  1327. // regions are checked by expanding the canonical ones.
  1328. std::vector<Region *> ToExpand;
  1329. for (auto &SubRegion : R)
  1330. ToExpand.push_back(SubRegion.get());
  1331. for (Region *CurrentRegion : ToExpand) {
  1332. // Skip invalid regions. Regions may become invalid, if they are element of
  1333. // an already expanded region.
  1334. if (!ValidRegions.count(CurrentRegion))
  1335. continue;
  1336. // Skip regions that had errors.
  1337. bool HadErrors = lookupRejectionLog(CurrentRegion)->hasErrors();
  1338. if (HadErrors)
  1339. continue;
  1340. Region *ExpandedR = expandRegion(*CurrentRegion);
  1341. if (!ExpandedR)
  1342. continue;
  1343. R.addSubRegion(ExpandedR, true);
  1344. ValidRegions.insert(ExpandedR);
  1345. removeCachedResults(*CurrentRegion);
  1346. removeCachedResultsRecursively(*ExpandedR);
  1347. }
  1348. }
  1349. bool ScopDetection::allBlocksValid(DetectionContext &Context) {
  1350. Region &CurRegion = Context.CurRegion;
  1351. for (const BasicBlock *BB : CurRegion.blocks()) {
  1352. Loop *L = LI.getLoopFor(BB);
  1353. if (L && L->getHeader() == BB) {
  1354. if (CurRegion.contains(L)) {
  1355. if (!isValidLoop(L, Context) && !KeepGoing)
  1356. return false;
  1357. } else {
  1358. SmallVector<BasicBlock *, 1> Latches;
  1359. L->getLoopLatches(Latches);
  1360. for (BasicBlock *Latch : Latches)
  1361. if (CurRegion.contains(Latch))
  1362. return invalid<ReportLoopOnlySomeLatches>(Context, /*Assert=*/true,
  1363. L);
  1364. }
  1365. }
  1366. }
  1367. for (BasicBlock *BB : CurRegion.blocks()) {
  1368. bool IsErrorBlock = isErrorBlock(*BB, CurRegion);
  1369. // Also check exception blocks (and possibly register them as non-affine
  1370. // regions). Even though exception blocks are not modeled, we use them
  1371. // to forward-propagate domain constraints during ScopInfo construction.
  1372. if (!isValidCFG(*BB, false, IsErrorBlock, Context) && !KeepGoing)
  1373. return false;
  1374. if (IsErrorBlock)
  1375. continue;
  1376. for (BasicBlock::iterator I = BB->begin(), E = --BB->end(); I != E; ++I)
  1377. if (!isValidInstruction(*I, Context) && !KeepGoing)
  1378. return false;
  1379. }
  1380. if (!hasAffineMemoryAccesses(Context))
  1381. return false;
  1382. return true;
  1383. }
  1384. bool ScopDetection::hasSufficientCompute(DetectionContext &Context,
  1385. int NumLoops) const {
  1386. int InstCount = 0;
  1387. if (NumLoops == 0)
  1388. return false;
  1389. for (auto *BB : Context.CurRegion.blocks())
  1390. if (Context.CurRegion.contains(LI.getLoopFor(BB)))
  1391. InstCount += BB->size();
  1392. InstCount = InstCount / NumLoops;
  1393. return InstCount >= ProfitabilityMinPerLoopInstructions;
  1394. }
  1395. bool ScopDetection::hasPossiblyDistributableLoop(
  1396. DetectionContext &Context) const {
  1397. for (auto *BB : Context.CurRegion.blocks()) {
  1398. auto *L = LI.getLoopFor(BB);
  1399. if (!Context.CurRegion.contains(L))
  1400. continue;
  1401. if (Context.BoxedLoopsSet.count(L))
  1402. continue;
  1403. unsigned StmtsWithStoresInLoops = 0;
  1404. for (auto *LBB : L->blocks()) {
  1405. bool MemStore = false;
  1406. for (auto &I : *LBB)
  1407. MemStore |= isa<StoreInst>(&I);
  1408. StmtsWithStoresInLoops += MemStore;
  1409. }
  1410. return (StmtsWithStoresInLoops > 1);
  1411. }
  1412. return false;
  1413. }
  1414. bool ScopDetection::isProfitableRegion(DetectionContext &Context) const {
  1415. Region &CurRegion = Context.CurRegion;
  1416. if (PollyProcessUnprofitable)
  1417. return true;
  1418. // We can probably not do a lot on scops that only write or only read
  1419. // data.
  1420. if (!Context.hasStores || !Context.hasLoads)
  1421. return invalid<ReportUnprofitable>(Context, /*Assert=*/true, &CurRegion);
  1422. int NumLoops =
  1423. countBeneficialLoops(&CurRegion, SE, LI, MIN_LOOP_TRIP_COUNT).NumLoops;
  1424. int NumAffineLoops = NumLoops - Context.BoxedLoopsSet.size();
  1425. // Scops with at least two loops may allow either loop fusion or tiling and
  1426. // are consequently interesting to look at.
  1427. if (NumAffineLoops >= 2)
  1428. return true;
  1429. // A loop with multiple non-trivial blocks might be amendable to distribution.
  1430. if (NumAffineLoops == 1 && hasPossiblyDistributableLoop(Context))
  1431. return true;
  1432. // Scops that contain a loop with a non-trivial amount of computation per
  1433. // loop-iteration are interesting as we may be able to parallelize such
  1434. // loops. Individual loops that have only a small amount of computation
  1435. // per-iteration are performance-wise very fragile as any change to the
  1436. // loop induction variables may affect performance. To not cause spurious
  1437. // performance regressions, we do not consider such loops.
  1438. if (NumAffineLoops == 1 && hasSufficientCompute(Context, NumLoops))
  1439. return true;
  1440. return invalid<ReportUnprofitable>(Context, /*Assert=*/true, &CurRegion);
  1441. }
  1442. bool ScopDetection::isValidRegion(DetectionContext &Context) {
  1443. Region &CurRegion = Context.CurRegion;
  1444. LLVM_DEBUG(dbgs() << "Checking region: " << CurRegion.getNameStr() << "\n\t");
  1445. if (!PollyAllowFullFunction && CurRegion.isTopLevelRegion()) {
  1446. LLVM_DEBUG(dbgs() << "Top level region is invalid\n");
  1447. return false;
  1448. }
  1449. DebugLoc DbgLoc;
  1450. if (CurRegion.getExit() &&
  1451. isa<UnreachableInst>(CurRegion.getExit()->getTerminator())) {
  1452. LLVM_DEBUG(dbgs() << "Unreachable in exit\n");
  1453. return invalid<ReportUnreachableInExit>(Context, /*Assert=*/true,
  1454. CurRegion.getExit(), DbgLoc);
  1455. }
  1456. if (!OnlyRegion.empty() &&
  1457. !CurRegion.getEntry()->getName().count(OnlyRegion)) {
  1458. LLVM_DEBUG({
  1459. dbgs() << "Region entry does not match -polly-only-region";
  1460. dbgs() << "\n";
  1461. });
  1462. return false;
  1463. }
  1464. for (BasicBlock *Pred : predecessors(CurRegion.getEntry())) {
  1465. Instruction *PredTerm = Pred->getTerminator();
  1466. if (isa<IndirectBrInst>(PredTerm) || isa<CallBrInst>(PredTerm))
  1467. return invalid<ReportIndirectPredecessor>(
  1468. Context, /*Assert=*/true, PredTerm, PredTerm->getDebugLoc());
  1469. }
  1470. // SCoP cannot contain the entry block of the function, because we need
  1471. // to insert alloca instruction there when translate scalar to array.
  1472. if (!PollyAllowFullFunction &&
  1473. CurRegion.getEntry() ==
  1474. &(CurRegion.getEntry()->getParent()->getEntryBlock()))
  1475. return invalid<ReportEntry>(Context, /*Assert=*/true, CurRegion.getEntry());
  1476. if (!allBlocksValid(Context))
  1477. return false;
  1478. if (!isReducibleRegion(CurRegion, DbgLoc))
  1479. return invalid<ReportIrreducibleRegion>(Context, /*Assert=*/true,
  1480. &CurRegion, DbgLoc);
  1481. LLVM_DEBUG(dbgs() << "OK\n");
  1482. return true;
  1483. }
  1484. void ScopDetection::markFunctionAsInvalid(Function *F) {
  1485. F->addFnAttr(PollySkipFnAttr);
  1486. }
  1487. bool ScopDetection::isValidFunction(Function &F) {
  1488. return !F.hasFnAttribute(PollySkipFnAttr);
  1489. }
  1490. void ScopDetection::printLocations(Function &F) {
  1491. for (const Region *R : *this) {
  1492. unsigned LineEntry, LineExit;
  1493. std::string FileName;
  1494. getDebugLocation(R, LineEntry, LineExit, FileName);
  1495. DiagnosticScopFound Diagnostic(F, FileName, LineEntry, LineExit);
  1496. F.getContext().diagnose(Diagnostic);
  1497. }
  1498. }
  1499. void ScopDetection::emitMissedRemarks(const Function &F) {
  1500. for (auto &DIt : DetectionContextMap) {
  1501. DetectionContext &DC = *DIt.getSecond().get();
  1502. if (DC.Log.hasErrors())
  1503. emitRejectionRemarks(DIt.getFirst(), DC.Log, ORE);
  1504. }
  1505. }
  1506. bool ScopDetection::isReducibleRegion(Region &R, DebugLoc &DbgLoc) const {
  1507. /// Enum for coloring BBs in Region.
  1508. ///
  1509. /// WHITE - Unvisited BB in DFS walk.
  1510. /// GREY - BBs which are currently on the DFS stack for processing.
  1511. /// BLACK - Visited and completely processed BB.
  1512. enum Color { WHITE, GREY, BLACK };
  1513. BasicBlock *REntry = R.getEntry();
  1514. BasicBlock *RExit = R.getExit();
  1515. // Map to match the color of a BasicBlock during the DFS walk.
  1516. DenseMap<const BasicBlock *, Color> BBColorMap;
  1517. // Stack keeping track of current BB and index of next child to be processed.
  1518. std::stack<std::pair<BasicBlock *, unsigned>> DFSStack;
  1519. unsigned AdjacentBlockIndex = 0;
  1520. BasicBlock *CurrBB, *SuccBB;
  1521. CurrBB = REntry;
  1522. // Initialize the map for all BB with WHITE color.
  1523. for (auto *BB : R.blocks())
  1524. BBColorMap[BB] = WHITE;
  1525. // Process the entry block of the Region.
  1526. BBColorMap[CurrBB] = GREY;
  1527. DFSStack.push(std::make_pair(CurrBB, 0));
  1528. while (!DFSStack.empty()) {
  1529. // Get next BB on stack to be processed.
  1530. CurrBB = DFSStack.top().first;
  1531. AdjacentBlockIndex = DFSStack.top().second;
  1532. DFSStack.pop();
  1533. // Loop to iterate over the successors of current BB.
  1534. const Instruction *TInst = CurrBB->getTerminator();
  1535. unsigned NSucc = TInst->getNumSuccessors();
  1536. for (unsigned I = AdjacentBlockIndex; I < NSucc;
  1537. ++I, ++AdjacentBlockIndex) {
  1538. SuccBB = TInst->getSuccessor(I);
  1539. // Checks for region exit block and self-loops in BB.
  1540. if (SuccBB == RExit || SuccBB == CurrBB)
  1541. continue;
  1542. // WHITE indicates an unvisited BB in DFS walk.
  1543. if (BBColorMap[SuccBB] == WHITE) {
  1544. // Push the current BB and the index of the next child to be visited.
  1545. DFSStack.push(std::make_pair(CurrBB, I + 1));
  1546. // Push the next BB to be processed.
  1547. DFSStack.push(std::make_pair(SuccBB, 0));
  1548. // First time the BB is being processed.
  1549. BBColorMap[SuccBB] = GREY;
  1550. break;
  1551. } else if (BBColorMap[SuccBB] == GREY) {
  1552. // GREY indicates a loop in the control flow.
  1553. // If the destination dominates the source, it is a natural loop
  1554. // else, an irreducible control flow in the region is detected.
  1555. if (!DT.dominates(SuccBB, CurrBB)) {
  1556. // Get debug info of instruction which causes irregular control flow.
  1557. DbgLoc = TInst->getDebugLoc();
  1558. return false;
  1559. }
  1560. }
  1561. }
  1562. // If all children of current BB have been processed,
  1563. // then mark that BB as fully processed.
  1564. if (AdjacentBlockIndex == NSucc)
  1565. BBColorMap[CurrBB] = BLACK;
  1566. }
  1567. return true;
  1568. }
  1569. static void updateLoopCountStatistic(ScopDetection::LoopStats Stats,
  1570. bool OnlyProfitable) {
  1571. if (!OnlyProfitable) {
  1572. NumLoopsInScop += Stats.NumLoops;
  1573. MaxNumLoopsInScop =
  1574. std::max(MaxNumLoopsInScop.getValue(), (unsigned)Stats.NumLoops);
  1575. if (Stats.MaxDepth == 0)
  1576. NumScopsDepthZero++;
  1577. else if (Stats.MaxDepth == 1)
  1578. NumScopsDepthOne++;
  1579. else if (Stats.MaxDepth == 2)
  1580. NumScopsDepthTwo++;
  1581. else if (Stats.MaxDepth == 3)
  1582. NumScopsDepthThree++;
  1583. else if (Stats.MaxDepth == 4)
  1584. NumScopsDepthFour++;
  1585. else if (Stats.MaxDepth == 5)
  1586. NumScopsDepthFive++;
  1587. else
  1588. NumScopsDepthLarger++;
  1589. } else {
  1590. NumLoopsInProfScop += Stats.NumLoops;
  1591. MaxNumLoopsInProfScop =
  1592. std::max(MaxNumLoopsInProfScop.getValue(), (unsigned)Stats.NumLoops);
  1593. if (Stats.MaxDepth == 0)
  1594. NumProfScopsDepthZero++;
  1595. else if (Stats.MaxDepth == 1)
  1596. NumProfScopsDepthOne++;
  1597. else if (Stats.MaxDepth == 2)
  1598. NumProfScopsDepthTwo++;
  1599. else if (Stats.MaxDepth == 3)
  1600. NumProfScopsDepthThree++;
  1601. else if (Stats.MaxDepth == 4)
  1602. NumProfScopsDepthFour++;
  1603. else if (Stats.MaxDepth == 5)
  1604. NumProfScopsDepthFive++;
  1605. else
  1606. NumProfScopsDepthLarger++;
  1607. }
  1608. }
  1609. ScopDetection::DetectionContext *
  1610. ScopDetection::getDetectionContext(const Region *R) const {
  1611. auto DCMIt = DetectionContextMap.find(getBBPairForRegion(R));
  1612. if (DCMIt == DetectionContextMap.end())
  1613. return nullptr;
  1614. return DCMIt->second.get();
  1615. }
  1616. const RejectLog *ScopDetection::lookupRejectionLog(const Region *R) const {
  1617. const DetectionContext *DC = getDetectionContext(R);
  1618. return DC ? &DC->Log : nullptr;
  1619. }
  1620. void ScopDetection::verifyRegion(const Region &R) {
  1621. assert(isMaxRegionInScop(R) && "Expect R is a valid region.");
  1622. DetectionContext Context(const_cast<Region &>(R), AA, true /*verifying*/);
  1623. isValidRegion(Context);
  1624. }
  1625. void ScopDetection::verifyAnalysis() {
  1626. if (!VerifyScops)
  1627. return;
  1628. for (const Region *R : ValidRegions)
  1629. verifyRegion(*R);
  1630. }
  1631. bool ScopDetectionWrapperPass::runOnFunction(Function &F) {
  1632. auto &LI = getAnalysis<LoopInfoWrapperPass>().getLoopInfo();
  1633. auto &RI = getAnalysis<RegionInfoPass>().getRegionInfo();
  1634. auto &AA = getAnalysis<AAResultsWrapperPass>().getAAResults();
  1635. auto &SE = getAnalysis<ScalarEvolutionWrapperPass>().getSE();
  1636. auto &DT = getAnalysis<DominatorTreeWrapperPass>().getDomTree();
  1637. auto &ORE = getAnalysis<OptimizationRemarkEmitterWrapperPass>().getORE();
  1638. Result = std::make_unique<ScopDetection>(DT, SE, LI, RI, AA, ORE);
  1639. Result->detect(F);
  1640. return false;
  1641. }
  1642. void ScopDetectionWrapperPass::getAnalysisUsage(AnalysisUsage &AU) const {
  1643. AU.addRequired<LoopInfoWrapperPass>();
  1644. AU.addRequiredTransitive<ScalarEvolutionWrapperPass>();
  1645. AU.addRequired<DominatorTreeWrapperPass>();
  1646. AU.addRequired<OptimizationRemarkEmitterWrapperPass>();
  1647. // We also need AA and RegionInfo when we are verifying analysis.
  1648. AU.addRequiredTransitive<AAResultsWrapperPass>();
  1649. AU.addRequiredTransitive<RegionInfoPass>();
  1650. AU.setPreservesAll();
  1651. }
  1652. void ScopDetectionWrapperPass::print(raw_ostream &OS, const Module *) const {
  1653. for (const Region *R : Result->ValidRegions)
  1654. OS << "Valid Region for Scop: " << R->getNameStr() << '\n';
  1655. OS << "\n";
  1656. }
  1657. ScopDetectionWrapperPass::ScopDetectionWrapperPass() : FunctionPass(ID) {
  1658. // Disable runtime alias checks if we ignore aliasing all together.
  1659. if (IgnoreAliasing)
  1660. PollyUseRuntimeAliasChecks = false;
  1661. }
  1662. ScopAnalysis::ScopAnalysis() {
  1663. // Disable runtime alias checks if we ignore aliasing all together.
  1664. if (IgnoreAliasing)
  1665. PollyUseRuntimeAliasChecks = false;
  1666. }
  1667. void ScopDetectionWrapperPass::releaseMemory() { Result.reset(); }
  1668. char ScopDetectionWrapperPass::ID;
  1669. AnalysisKey ScopAnalysis::Key;
  1670. ScopDetection ScopAnalysis::run(Function &F, FunctionAnalysisManager &FAM) {
  1671. auto &LI = FAM.getResult<LoopAnalysis>(F);
  1672. auto &RI = FAM.getResult<RegionInfoAnalysis>(F);
  1673. auto &AA = FAM.getResult<AAManager>(F);
  1674. auto &SE = FAM.getResult<ScalarEvolutionAnalysis>(F);
  1675. auto &DT = FAM.getResult<DominatorTreeAnalysis>(F);
  1676. auto &ORE = FAM.getResult<OptimizationRemarkEmitterAnalysis>(F);
  1677. ScopDetection Result(DT, SE, LI, RI, AA, ORE);
  1678. Result.detect(F);
  1679. return Result;
  1680. }
  1681. PreservedAnalyses ScopAnalysisPrinterPass::run(Function &F,
  1682. FunctionAnalysisManager &FAM) {
  1683. OS << "Detected Scops in Function " << F.getName() << "\n";
  1684. auto &SD = FAM.getResult<ScopAnalysis>(F);
  1685. for (const Region *R : SD.ValidRegions)
  1686. OS << "Valid Region for Scop: " << R->getNameStr() << '\n';
  1687. OS << "\n";
  1688. return PreservedAnalyses::all();
  1689. }
  1690. Pass *polly::createScopDetectionWrapperPassPass() {
  1691. return new ScopDetectionWrapperPass();
  1692. }
  1693. INITIALIZE_PASS_BEGIN(ScopDetectionWrapperPass, "polly-detect",
  1694. "Polly - Detect static control parts (SCoPs)", false,
  1695. false);
  1696. INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass);
  1697. INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass);
  1698. INITIALIZE_PASS_DEPENDENCY(RegionInfoPass);
  1699. INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass);
  1700. INITIALIZE_PASS_DEPENDENCY(ScalarEvolutionWrapperPass);
  1701. INITIALIZE_PASS_DEPENDENCY(OptimizationRemarkEmitterWrapperPass);
  1702. INITIALIZE_PASS_END(ScopDetectionWrapperPass, "polly-detect",
  1703. "Polly - Detect static control parts (SCoPs)", false, false)