ScopDetection.cpp 72 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111
  1. //===- ScopDetection.cpp - Detect Scops -----------------------------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // Detect the maximal Scops of a function.
  10. //
  11. // A static control part (Scop) is a subgraph of the control flow graph (CFG)
  12. // that only has statically known control flow and can therefore be described
  13. // within the polyhedral model.
  14. //
  15. // Every Scop fulfills these restrictions:
  16. //
  17. // * It is a single entry single exit region
  18. //
  19. // * Only affine linear bounds in the loops
  20. //
  21. // Every natural loop in a Scop must have a number of loop iterations that can
  22. // be described as an affine linear function in surrounding loop iterators or
  23. // parameters. (A parameter is a scalar that does not change its value during
  24. // execution of the Scop).
  25. //
  26. // * Only comparisons of affine linear expressions in conditions
  27. //
  28. // * All loops and conditions perfectly nested
  29. //
  30. // The control flow needs to be structured such that it could be written using
  31. // just 'for' and 'if' statements, without the need for any 'goto', 'break' or
  32. // 'continue'.
  33. //
  34. // * Side effect free functions call
  35. //
  36. // Function calls and intrinsics that do not have side effects (readnone)
  37. // or memory intrinsics (memset, memcpy, memmove) are allowed.
  38. //
  39. // The Scop detection finds the largest Scops by checking if the largest
  40. // region is a Scop. If this is not the case, its canonical subregions are
  41. // checked until a region is a Scop. It is now tried to extend this Scop by
  42. // creating a larger non canonical region.
  43. //
  44. //===----------------------------------------------------------------------===//
  45. #include "polly/ScopDetection.h"
  46. #include "polly/LinkAllPasses.h"
  47. #include "polly/Options.h"
  48. #include "polly/ScopDetectionDiagnostic.h"
  49. #include "polly/Support/SCEVValidator.h"
  50. #include "polly/Support/ScopHelper.h"
  51. #include "polly/Support/ScopLocation.h"
  52. #include "llvm/ADT/SmallPtrSet.h"
  53. #include "llvm/ADT/Statistic.h"
  54. #include "llvm/Analysis/AliasAnalysis.h"
  55. #include "llvm/Analysis/Delinearization.h"
  56. #include "llvm/Analysis/Loads.h"
  57. #include "llvm/Analysis/LoopInfo.h"
  58. #include "llvm/Analysis/OptimizationRemarkEmitter.h"
  59. #include "llvm/Analysis/RegionInfo.h"
  60. #include "llvm/Analysis/ScalarEvolution.h"
  61. #include "llvm/Analysis/ScalarEvolutionExpressions.h"
  62. #include "llvm/IR/BasicBlock.h"
  63. #include "llvm/IR/DebugLoc.h"
  64. #include "llvm/IR/DerivedTypes.h"
  65. #include "llvm/IR/DiagnosticInfo.h"
  66. #include "llvm/IR/DiagnosticPrinter.h"
  67. #include "llvm/IR/Dominators.h"
  68. #include "llvm/IR/Function.h"
  69. #include "llvm/IR/InstrTypes.h"
  70. #include "llvm/IR/Instruction.h"
  71. #include "llvm/IR/Instructions.h"
  72. #include "llvm/IR/IntrinsicInst.h"
  73. #include "llvm/IR/Metadata.h"
  74. #include "llvm/IR/Module.h"
  75. #include "llvm/IR/PassManager.h"
  76. #include "llvm/IR/Value.h"
  77. #include "llvm/InitializePasses.h"
  78. #include "llvm/Pass.h"
  79. #include "llvm/Support/Debug.h"
  80. #include "llvm/Support/Regex.h"
  81. #include "llvm/Support/raw_ostream.h"
  82. #include <algorithm>
  83. #include <cassert>
  84. #include <memory>
  85. #include <stack>
  86. #include <string>
  87. #include <utility>
  88. #include <vector>
  89. using namespace llvm;
  90. using namespace polly;
  91. #define DEBUG_TYPE "polly-detect"
  92. // This option is set to a very high value, as analyzing such loops increases
  93. // compile time on several cases. For experiments that enable this option,
  94. // a value of around 40 has been working to avoid run-time regressions with
  95. // Polly while still exposing interesting optimization opportunities.
  96. static cl::opt<int> ProfitabilityMinPerLoopInstructions(
  97. "polly-detect-profitability-min-per-loop-insts",
  98. cl::desc("The minimal number of per-loop instructions before a single loop "
  99. "region is considered profitable"),
  100. cl::Hidden, cl::ValueRequired, cl::init(100000000), cl::cat(PollyCategory));
  101. bool polly::PollyProcessUnprofitable;
  102. static cl::opt<bool, true> XPollyProcessUnprofitable(
  103. "polly-process-unprofitable",
  104. cl::desc(
  105. "Process scops that are unlikely to benefit from Polly optimizations."),
  106. cl::location(PollyProcessUnprofitable), cl::cat(PollyCategory));
  107. static cl::list<std::string> OnlyFunctions(
  108. "polly-only-func",
  109. cl::desc("Only run on functions that match a regex. "
  110. "Multiple regexes can be comma separated. "
  111. "Scop detection will run on all functions that match "
  112. "ANY of the regexes provided."),
  113. cl::CommaSeparated, cl::cat(PollyCategory));
  114. static cl::list<std::string> IgnoredFunctions(
  115. "polly-ignore-func",
  116. cl::desc("Ignore functions that match a regex. "
  117. "Multiple regexes can be comma separated. "
  118. "Scop detection will ignore all functions that match "
  119. "ANY of the regexes provided."),
  120. cl::CommaSeparated, cl::cat(PollyCategory));
  121. bool polly::PollyAllowFullFunction;
  122. static cl::opt<bool, true>
  123. XAllowFullFunction("polly-detect-full-functions",
  124. cl::desc("Allow the detection of full functions"),
  125. cl::location(polly::PollyAllowFullFunction),
  126. cl::init(false), cl::cat(PollyCategory));
  127. static cl::opt<std::string> OnlyRegion(
  128. "polly-only-region",
  129. cl::desc("Only run on certain regions (The provided identifier must "
  130. "appear in the name of the region's entry block"),
  131. cl::value_desc("identifier"), cl::ValueRequired, cl::init(""),
  132. cl::cat(PollyCategory));
  133. static cl::opt<bool>
  134. IgnoreAliasing("polly-ignore-aliasing",
  135. cl::desc("Ignore possible aliasing of the array bases"),
  136. cl::Hidden, cl::cat(PollyCategory));
  137. bool polly::PollyAllowUnsignedOperations;
  138. static cl::opt<bool, true> XPollyAllowUnsignedOperations(
  139. "polly-allow-unsigned-operations",
  140. cl::desc("Allow unsigned operations such as comparisons or zero-extends."),
  141. cl::location(PollyAllowUnsignedOperations), cl::Hidden, cl::init(true),
  142. cl::cat(PollyCategory));
  143. bool polly::PollyUseRuntimeAliasChecks;
  144. static cl::opt<bool, true> XPollyUseRuntimeAliasChecks(
  145. "polly-use-runtime-alias-checks",
  146. cl::desc("Use runtime alias checks to resolve possible aliasing."),
  147. cl::location(PollyUseRuntimeAliasChecks), cl::Hidden, cl::init(true),
  148. cl::cat(PollyCategory));
  149. static cl::opt<bool>
  150. ReportLevel("polly-report",
  151. cl::desc("Print information about the activities of Polly"),
  152. cl::cat(PollyCategory));
  153. static cl::opt<bool> AllowDifferentTypes(
  154. "polly-allow-differing-element-types",
  155. cl::desc("Allow different element types for array accesses"), cl::Hidden,
  156. cl::init(true), cl::cat(PollyCategory));
  157. static cl::opt<bool>
  158. AllowNonAffine("polly-allow-nonaffine",
  159. cl::desc("Allow non affine access functions in arrays"),
  160. cl::Hidden, cl::cat(PollyCategory));
  161. static cl::opt<bool>
  162. AllowModrefCall("polly-allow-modref-calls",
  163. cl::desc("Allow functions with known modref behavior"),
  164. cl::Hidden, cl::cat(PollyCategory));
  165. static cl::opt<bool> AllowNonAffineSubRegions(
  166. "polly-allow-nonaffine-branches",
  167. cl::desc("Allow non affine conditions for branches"), cl::Hidden,
  168. cl::init(true), cl::cat(PollyCategory));
  169. static cl::opt<bool>
  170. AllowNonAffineSubLoops("polly-allow-nonaffine-loops",
  171. cl::desc("Allow non affine conditions for loops"),
  172. cl::Hidden, cl::cat(PollyCategory));
  173. static cl::opt<bool, true>
  174. TrackFailures("polly-detect-track-failures",
  175. cl::desc("Track failure strings in detecting scop regions"),
  176. cl::location(PollyTrackFailures), cl::Hidden, cl::init(true),
  177. cl::cat(PollyCategory));
  178. static cl::opt<bool> KeepGoing("polly-detect-keep-going",
  179. cl::desc("Do not fail on the first error."),
  180. cl::Hidden, cl::cat(PollyCategory));
  181. static cl::opt<bool, true>
  182. PollyDelinearizeX("polly-delinearize",
  183. cl::desc("Delinearize array access functions"),
  184. cl::location(PollyDelinearize), cl::Hidden,
  185. cl::init(true), cl::cat(PollyCategory));
  186. static cl::opt<bool>
  187. VerifyScops("polly-detect-verify",
  188. cl::desc("Verify the detected SCoPs after each transformation"),
  189. cl::Hidden, cl::cat(PollyCategory));
  190. bool polly::PollyInvariantLoadHoisting;
  191. static cl::opt<bool, true>
  192. XPollyInvariantLoadHoisting("polly-invariant-load-hoisting",
  193. cl::desc("Hoist invariant loads."),
  194. cl::location(PollyInvariantLoadHoisting),
  195. cl::Hidden, cl::cat(PollyCategory));
  196. static cl::opt<bool> PollyAllowErrorBlocks(
  197. "polly-allow-error-blocks",
  198. cl::desc("Allow to speculate on the execution of 'error blocks'."),
  199. cl::Hidden, cl::init(true), cl::cat(PollyCategory));
  200. /// The minimal trip count under which loops are considered unprofitable.
  201. static const unsigned MIN_LOOP_TRIP_COUNT = 8;
  202. bool polly::PollyTrackFailures = false;
  203. bool polly::PollyDelinearize = false;
  204. StringRef polly::PollySkipFnAttr = "polly.skip.fn";
  205. //===----------------------------------------------------------------------===//
  206. // Statistics.
  207. STATISTIC(NumScopRegions, "Number of scops");
  208. STATISTIC(NumLoopsInScop, "Number of loops in scops");
  209. STATISTIC(NumScopsDepthZero, "Number of scops with maximal loop depth 0");
  210. STATISTIC(NumScopsDepthOne, "Number of scops with maximal loop depth 1");
  211. STATISTIC(NumScopsDepthTwo, "Number of scops with maximal loop depth 2");
  212. STATISTIC(NumScopsDepthThree, "Number of scops with maximal loop depth 3");
  213. STATISTIC(NumScopsDepthFour, "Number of scops with maximal loop depth 4");
  214. STATISTIC(NumScopsDepthFive, "Number of scops with maximal loop depth 5");
  215. STATISTIC(NumScopsDepthLarger,
  216. "Number of scops with maximal loop depth 6 and larger");
  217. STATISTIC(NumProfScopRegions, "Number of scops (profitable scops only)");
  218. STATISTIC(NumLoopsInProfScop,
  219. "Number of loops in scops (profitable scops only)");
  220. STATISTIC(NumLoopsOverall, "Number of total loops");
  221. STATISTIC(NumProfScopsDepthZero,
  222. "Number of scops with maximal loop depth 0 (profitable scops only)");
  223. STATISTIC(NumProfScopsDepthOne,
  224. "Number of scops with maximal loop depth 1 (profitable scops only)");
  225. STATISTIC(NumProfScopsDepthTwo,
  226. "Number of scops with maximal loop depth 2 (profitable scops only)");
  227. STATISTIC(NumProfScopsDepthThree,
  228. "Number of scops with maximal loop depth 3 (profitable scops only)");
  229. STATISTIC(NumProfScopsDepthFour,
  230. "Number of scops with maximal loop depth 4 (profitable scops only)");
  231. STATISTIC(NumProfScopsDepthFive,
  232. "Number of scops with maximal loop depth 5 (profitable scops only)");
  233. STATISTIC(NumProfScopsDepthLarger,
  234. "Number of scops with maximal loop depth 6 and larger "
  235. "(profitable scops only)");
  236. STATISTIC(MaxNumLoopsInScop, "Maximal number of loops in scops");
  237. STATISTIC(MaxNumLoopsInProfScop,
  238. "Maximal number of loops in scops (profitable scops only)");
  239. static void updateLoopCountStatistic(ScopDetection::LoopStats Stats,
  240. bool OnlyProfitable);
  241. namespace {
  242. class DiagnosticScopFound final : public DiagnosticInfo {
  243. private:
  244. static int PluginDiagnosticKind;
  245. Function &F;
  246. std::string FileName;
  247. unsigned EntryLine, ExitLine;
  248. public:
  249. DiagnosticScopFound(Function &F, std::string FileName, unsigned EntryLine,
  250. unsigned ExitLine)
  251. : DiagnosticInfo(PluginDiagnosticKind, DS_Note), F(F), FileName(FileName),
  252. EntryLine(EntryLine), ExitLine(ExitLine) {}
  253. void print(DiagnosticPrinter &DP) const override;
  254. static bool classof(const DiagnosticInfo *DI) {
  255. return DI->getKind() == PluginDiagnosticKind;
  256. }
  257. };
  258. } // namespace
  259. int DiagnosticScopFound::PluginDiagnosticKind =
  260. getNextAvailablePluginDiagnosticKind();
  261. void DiagnosticScopFound::print(DiagnosticPrinter &DP) const {
  262. DP << "Polly detected an optimizable loop region (scop) in function '" << F
  263. << "'\n";
  264. if (FileName.empty()) {
  265. DP << "Scop location is unknown. Compile with debug info "
  266. "(-g) to get more precise information. ";
  267. return;
  268. }
  269. DP << FileName << ":" << EntryLine << ": Start of scop\n";
  270. DP << FileName << ":" << ExitLine << ": End of scop";
  271. }
  272. /// Check if a string matches any regex in a list of regexes.
  273. /// @param Str the input string to match against.
  274. /// @param RegexList a list of strings that are regular expressions.
  275. static bool doesStringMatchAnyRegex(StringRef Str,
  276. const cl::list<std::string> &RegexList) {
  277. for (auto RegexStr : RegexList) {
  278. Regex R(RegexStr);
  279. std::string Err;
  280. if (!R.isValid(Err))
  281. report_fatal_error(Twine("invalid regex given as input to polly: ") + Err,
  282. true);
  283. if (R.match(Str))
  284. return true;
  285. }
  286. return false;
  287. }
  288. //===----------------------------------------------------------------------===//
  289. // ScopDetection.
  290. ScopDetection::ScopDetection(const DominatorTree &DT, ScalarEvolution &SE,
  291. LoopInfo &LI, RegionInfo &RI, AAResults &AA,
  292. OptimizationRemarkEmitter &ORE)
  293. : DT(DT), SE(SE), LI(LI), RI(RI), AA(AA), ORE(ORE) {}
  294. void ScopDetection::detect(Function &F) {
  295. assert(ValidRegions.empty() && "Detection must run only once");
  296. if (!PollyProcessUnprofitable && LI.empty())
  297. return;
  298. Region *TopRegion = RI.getTopLevelRegion();
  299. if (!OnlyFunctions.empty() &&
  300. !doesStringMatchAnyRegex(F.getName(), OnlyFunctions))
  301. return;
  302. if (doesStringMatchAnyRegex(F.getName(), IgnoredFunctions))
  303. return;
  304. if (!isValidFunction(F))
  305. return;
  306. findScops(*TopRegion);
  307. NumScopRegions += ValidRegions.size();
  308. // Prune non-profitable regions.
  309. for (auto &DIt : DetectionContextMap) {
  310. DetectionContext &DC = *DIt.getSecond().get();
  311. if (DC.Log.hasErrors())
  312. continue;
  313. if (!ValidRegions.count(&DC.CurRegion))
  314. continue;
  315. LoopStats Stats = countBeneficialLoops(&DC.CurRegion, SE, LI, 0);
  316. updateLoopCountStatistic(Stats, false /* OnlyProfitable */);
  317. if (isProfitableRegion(DC)) {
  318. updateLoopCountStatistic(Stats, true /* OnlyProfitable */);
  319. continue;
  320. }
  321. ValidRegions.remove(&DC.CurRegion);
  322. }
  323. NumProfScopRegions += ValidRegions.size();
  324. NumLoopsOverall += countBeneficialLoops(TopRegion, SE, LI, 0).NumLoops;
  325. // Only makes sense when we tracked errors.
  326. if (PollyTrackFailures)
  327. emitMissedRemarks(F);
  328. if (ReportLevel)
  329. printLocations(F);
  330. assert(ValidRegions.size() <= DetectionContextMap.size() &&
  331. "Cached more results than valid regions");
  332. }
  333. template <class RR, typename... Args>
  334. inline bool ScopDetection::invalid(DetectionContext &Context, bool Assert,
  335. Args &&...Arguments) const {
  336. if (!Context.Verifying) {
  337. RejectLog &Log = Context.Log;
  338. std::shared_ptr<RR> RejectReason = std::make_shared<RR>(Arguments...);
  339. Context.IsInvalid = true;
  340. // Log even if PollyTrackFailures is false, the log entries are also used in
  341. // canUseISLTripCount().
  342. Log.report(RejectReason);
  343. LLVM_DEBUG(dbgs() << RejectReason->getMessage());
  344. LLVM_DEBUG(dbgs() << "\n");
  345. } else {
  346. assert(!Assert && "Verification of detected scop failed");
  347. }
  348. return false;
  349. }
  350. bool ScopDetection::isMaxRegionInScop(const Region &R, bool Verify) {
  351. if (!ValidRegions.count(&R))
  352. return false;
  353. if (Verify) {
  354. BBPair P = getBBPairForRegion(&R);
  355. std::unique_ptr<DetectionContext> &Entry = DetectionContextMap[P];
  356. // Free previous DetectionContext for the region and create and verify a new
  357. // one. Be sure that the DetectionContext is not still used by a ScopInfop.
  358. // Due to changes but CodeGeneration of another Scop, the Region object and
  359. // the BBPair might not match anymore.
  360. Entry = std::make_unique<DetectionContext>(const_cast<Region &>(R), AA,
  361. /*Verifying=*/false);
  362. return isValidRegion(*Entry.get());
  363. }
  364. return true;
  365. }
  366. std::string ScopDetection::regionIsInvalidBecause(const Region *R) const {
  367. // Get the first error we found. Even in keep-going mode, this is the first
  368. // reason that caused the candidate to be rejected.
  369. auto *Log = lookupRejectionLog(R);
  370. // This can happen when we marked a region invalid, but didn't track
  371. // an error for it.
  372. if (!Log || !Log->hasErrors())
  373. return "";
  374. RejectReasonPtr RR = *Log->begin();
  375. return RR->getMessage();
  376. }
  377. bool ScopDetection::addOverApproximatedRegion(Region *AR,
  378. DetectionContext &Context) const {
  379. // If we already know about Ar we can exit.
  380. if (!Context.NonAffineSubRegionSet.insert(AR))
  381. return true;
  382. // All loops in the region have to be overapproximated too if there
  383. // are accesses that depend on the iteration count.
  384. for (BasicBlock *BB : AR->blocks()) {
  385. Loop *L = LI.getLoopFor(BB);
  386. if (AR->contains(L))
  387. Context.BoxedLoopsSet.insert(L);
  388. }
  389. return (AllowNonAffineSubLoops || Context.BoxedLoopsSet.empty());
  390. }
  391. bool ScopDetection::onlyValidRequiredInvariantLoads(
  392. InvariantLoadsSetTy &RequiredILS, DetectionContext &Context) const {
  393. Region &CurRegion = Context.CurRegion;
  394. const DataLayout &DL = CurRegion.getEntry()->getModule()->getDataLayout();
  395. if (!PollyInvariantLoadHoisting && !RequiredILS.empty())
  396. return false;
  397. for (LoadInst *Load : RequiredILS) {
  398. // If we already know a load has been accepted as required invariant, we
  399. // already run the validation below once and consequently don't need to
  400. // run it again. Hence, we return early. For certain test cases (e.g.,
  401. // COSMO this avoids us spending 50% of scop-detection time in this
  402. // very function (and its children).
  403. if (Context.RequiredILS.count(Load))
  404. continue;
  405. if (!isHoistableLoad(Load, CurRegion, LI, SE, DT, Context.RequiredILS))
  406. return false;
  407. for (auto NonAffineRegion : Context.NonAffineSubRegionSet) {
  408. if (isSafeToLoadUnconditionally(Load->getPointerOperand(),
  409. Load->getType(), Load->getAlign(), DL))
  410. continue;
  411. if (NonAffineRegion->contains(Load) &&
  412. Load->getParent() != NonAffineRegion->getEntry())
  413. return false;
  414. }
  415. }
  416. Context.RequiredILS.insert(RequiredILS.begin(), RequiredILS.end());
  417. return true;
  418. }
  419. bool ScopDetection::involvesMultiplePtrs(const SCEV *S0, const SCEV *S1,
  420. Loop *Scope) const {
  421. SetVector<Value *> Values;
  422. findValues(S0, SE, Values);
  423. if (S1)
  424. findValues(S1, SE, Values);
  425. SmallPtrSet<Value *, 8> PtrVals;
  426. for (auto *V : Values) {
  427. if (auto *P2I = dyn_cast<PtrToIntInst>(V))
  428. V = P2I->getOperand(0);
  429. if (!V->getType()->isPointerTy())
  430. continue;
  431. auto *PtrSCEV = SE.getSCEVAtScope(V, Scope);
  432. if (isa<SCEVConstant>(PtrSCEV))
  433. continue;
  434. auto *BasePtr = dyn_cast<SCEVUnknown>(SE.getPointerBase(PtrSCEV));
  435. if (!BasePtr)
  436. return true;
  437. auto *BasePtrVal = BasePtr->getValue();
  438. if (PtrVals.insert(BasePtrVal).second) {
  439. for (auto *PtrVal : PtrVals)
  440. if (PtrVal != BasePtrVal && !AA.isNoAlias(PtrVal, BasePtrVal))
  441. return true;
  442. }
  443. }
  444. return false;
  445. }
  446. bool ScopDetection::isAffine(const SCEV *S, Loop *Scope,
  447. DetectionContext &Context) const {
  448. InvariantLoadsSetTy AccessILS;
  449. if (!isAffineExpr(&Context.CurRegion, Scope, S, SE, &AccessILS))
  450. return false;
  451. if (!onlyValidRequiredInvariantLoads(AccessILS, Context))
  452. return false;
  453. return true;
  454. }
  455. bool ScopDetection::isValidSwitch(BasicBlock &BB, SwitchInst *SI,
  456. Value *Condition, bool IsLoopBranch,
  457. DetectionContext &Context) const {
  458. Loop *L = LI.getLoopFor(&BB);
  459. const SCEV *ConditionSCEV = SE.getSCEVAtScope(Condition, L);
  460. if (IsLoopBranch && L->isLoopLatch(&BB))
  461. return false;
  462. // Check for invalid usage of different pointers in one expression.
  463. if (involvesMultiplePtrs(ConditionSCEV, nullptr, L))
  464. return false;
  465. if (isAffine(ConditionSCEV, L, Context))
  466. return true;
  467. if (AllowNonAffineSubRegions &&
  468. addOverApproximatedRegion(RI.getRegionFor(&BB), Context))
  469. return true;
  470. return invalid<ReportNonAffBranch>(Context, /*Assert=*/true, &BB,
  471. ConditionSCEV, ConditionSCEV, SI);
  472. }
  473. bool ScopDetection::isValidBranch(BasicBlock &BB, BranchInst *BI,
  474. Value *Condition, bool IsLoopBranch,
  475. DetectionContext &Context) {
  476. // Constant integer conditions are always affine.
  477. if (isa<ConstantInt>(Condition))
  478. return true;
  479. if (BinaryOperator *BinOp = dyn_cast<BinaryOperator>(Condition)) {
  480. auto Opcode = BinOp->getOpcode();
  481. if (Opcode == Instruction::And || Opcode == Instruction::Or) {
  482. Value *Op0 = BinOp->getOperand(0);
  483. Value *Op1 = BinOp->getOperand(1);
  484. return isValidBranch(BB, BI, Op0, IsLoopBranch, Context) &&
  485. isValidBranch(BB, BI, Op1, IsLoopBranch, Context);
  486. }
  487. }
  488. if (auto PHI = dyn_cast<PHINode>(Condition)) {
  489. auto *Unique = dyn_cast_or_null<ConstantInt>(
  490. getUniqueNonErrorValue(PHI, &Context.CurRegion, this));
  491. if (Unique && (Unique->isZero() || Unique->isOne()))
  492. return true;
  493. }
  494. if (auto Load = dyn_cast<LoadInst>(Condition))
  495. if (!IsLoopBranch && Context.CurRegion.contains(Load)) {
  496. Context.RequiredILS.insert(Load);
  497. return true;
  498. }
  499. // Non constant conditions of branches need to be ICmpInst.
  500. if (!isa<ICmpInst>(Condition)) {
  501. if (!IsLoopBranch && AllowNonAffineSubRegions &&
  502. addOverApproximatedRegion(RI.getRegionFor(&BB), Context))
  503. return true;
  504. return invalid<ReportInvalidCond>(Context, /*Assert=*/true, BI, &BB);
  505. }
  506. ICmpInst *ICmp = cast<ICmpInst>(Condition);
  507. // Are both operands of the ICmp affine?
  508. if (isa<UndefValue>(ICmp->getOperand(0)) ||
  509. isa<UndefValue>(ICmp->getOperand(1)))
  510. return invalid<ReportUndefOperand>(Context, /*Assert=*/true, &BB, ICmp);
  511. Loop *L = LI.getLoopFor(&BB);
  512. const SCEV *LHS = SE.getSCEVAtScope(ICmp->getOperand(0), L);
  513. const SCEV *RHS = SE.getSCEVAtScope(ICmp->getOperand(1), L);
  514. LHS = tryForwardThroughPHI(LHS, Context.CurRegion, SE, this);
  515. RHS = tryForwardThroughPHI(RHS, Context.CurRegion, SE, this);
  516. // If unsigned operations are not allowed try to approximate the region.
  517. if (ICmp->isUnsigned() && !PollyAllowUnsignedOperations)
  518. return !IsLoopBranch && AllowNonAffineSubRegions &&
  519. addOverApproximatedRegion(RI.getRegionFor(&BB), Context);
  520. // Check for invalid usage of different pointers in one expression.
  521. if (ICmp->isEquality() && involvesMultiplePtrs(LHS, nullptr, L) &&
  522. involvesMultiplePtrs(RHS, nullptr, L))
  523. return false;
  524. // Check for invalid usage of different pointers in a relational comparison.
  525. if (ICmp->isRelational() && involvesMultiplePtrs(LHS, RHS, L))
  526. return false;
  527. if (isAffine(LHS, L, Context) && isAffine(RHS, L, Context))
  528. return true;
  529. if (!IsLoopBranch && AllowNonAffineSubRegions &&
  530. addOverApproximatedRegion(RI.getRegionFor(&BB), Context))
  531. return true;
  532. if (IsLoopBranch)
  533. return false;
  534. return invalid<ReportNonAffBranch>(Context, /*Assert=*/true, &BB, LHS, RHS,
  535. ICmp);
  536. }
  537. bool ScopDetection::isValidCFG(BasicBlock &BB, bool IsLoopBranch,
  538. bool AllowUnreachable,
  539. DetectionContext &Context) {
  540. Region &CurRegion = Context.CurRegion;
  541. Instruction *TI = BB.getTerminator();
  542. if (AllowUnreachable && isa<UnreachableInst>(TI))
  543. return true;
  544. // Return instructions are only valid if the region is the top level region.
  545. if (isa<ReturnInst>(TI) && CurRegion.isTopLevelRegion())
  546. return true;
  547. Value *Condition = getConditionFromTerminator(TI);
  548. if (!Condition)
  549. return invalid<ReportInvalidTerminator>(Context, /*Assert=*/true, &BB);
  550. // UndefValue is not allowed as condition.
  551. if (isa<UndefValue>(Condition))
  552. return invalid<ReportUndefCond>(Context, /*Assert=*/true, TI, &BB);
  553. if (BranchInst *BI = dyn_cast<BranchInst>(TI))
  554. return isValidBranch(BB, BI, Condition, IsLoopBranch, Context);
  555. SwitchInst *SI = dyn_cast<SwitchInst>(TI);
  556. assert(SI && "Terminator was neither branch nor switch");
  557. return isValidSwitch(BB, SI, Condition, IsLoopBranch, Context);
  558. }
  559. bool ScopDetection::isValidCallInst(CallInst &CI,
  560. DetectionContext &Context) const {
  561. if (CI.doesNotReturn())
  562. return false;
  563. if (CI.doesNotAccessMemory())
  564. return true;
  565. if (auto *II = dyn_cast<IntrinsicInst>(&CI))
  566. if (isValidIntrinsicInst(*II, Context))
  567. return true;
  568. Function *CalledFunction = CI.getCalledFunction();
  569. // Indirect calls are not supported.
  570. if (CalledFunction == nullptr)
  571. return false;
  572. if (isDebugCall(&CI)) {
  573. LLVM_DEBUG(dbgs() << "Allow call to debug function: "
  574. << CalledFunction->getName() << '\n');
  575. return true;
  576. }
  577. if (AllowModrefCall) {
  578. MemoryEffects ME = AA.getMemoryEffects(CalledFunction);
  579. if (ME.onlyAccessesArgPointees()) {
  580. for (const auto &Arg : CI.args()) {
  581. if (!Arg->getType()->isPointerTy())
  582. continue;
  583. // Bail if a pointer argument has a base address not known to
  584. // ScalarEvolution. Note that a zero pointer is acceptable.
  585. auto *ArgSCEV = SE.getSCEVAtScope(Arg, LI.getLoopFor(CI.getParent()));
  586. if (ArgSCEV->isZero())
  587. continue;
  588. auto *BP = dyn_cast<SCEVUnknown>(SE.getPointerBase(ArgSCEV));
  589. if (!BP)
  590. return false;
  591. // Implicitly disable delinearization since we have an unknown
  592. // accesses with an unknown access function.
  593. Context.HasUnknownAccess = true;
  594. }
  595. // Explicitly use addUnknown so we don't put a loop-variant
  596. // pointer into the alias set.
  597. Context.AST.addUnknown(&CI);
  598. return true;
  599. }
  600. if (ME.onlyReadsMemory()) {
  601. // Implicitly disable delinearization since we have an unknown
  602. // accesses with an unknown access function.
  603. Context.HasUnknownAccess = true;
  604. // Explicitly use addUnknown so we don't put a loop-variant
  605. // pointer into the alias set.
  606. Context.AST.addUnknown(&CI);
  607. return true;
  608. }
  609. return false;
  610. }
  611. return false;
  612. }
  613. bool ScopDetection::isValidIntrinsicInst(IntrinsicInst &II,
  614. DetectionContext &Context) const {
  615. if (isIgnoredIntrinsic(&II))
  616. return true;
  617. // The closest loop surrounding the call instruction.
  618. Loop *L = LI.getLoopFor(II.getParent());
  619. // The access function and base pointer for memory intrinsics.
  620. const SCEV *AF;
  621. const SCEVUnknown *BP;
  622. switch (II.getIntrinsicID()) {
  623. // Memory intrinsics that can be represented are supported.
  624. case Intrinsic::memmove:
  625. case Intrinsic::memcpy:
  626. AF = SE.getSCEVAtScope(cast<MemTransferInst>(II).getSource(), L);
  627. if (!AF->isZero()) {
  628. BP = dyn_cast<SCEVUnknown>(SE.getPointerBase(AF));
  629. // Bail if the source pointer is not valid.
  630. if (!isValidAccess(&II, AF, BP, Context))
  631. return false;
  632. }
  633. [[fallthrough]];
  634. case Intrinsic::memset:
  635. AF = SE.getSCEVAtScope(cast<MemIntrinsic>(II).getDest(), L);
  636. if (!AF->isZero()) {
  637. BP = dyn_cast<SCEVUnknown>(SE.getPointerBase(AF));
  638. // Bail if the destination pointer is not valid.
  639. if (!isValidAccess(&II, AF, BP, Context))
  640. return false;
  641. }
  642. // Bail if the length is not affine.
  643. if (!isAffine(SE.getSCEVAtScope(cast<MemIntrinsic>(II).getLength(), L), L,
  644. Context))
  645. return false;
  646. return true;
  647. default:
  648. break;
  649. }
  650. return false;
  651. }
  652. bool ScopDetection::isInvariant(Value &Val, const Region &Reg,
  653. DetectionContext &Ctx) const {
  654. // A reference to function argument or constant value is invariant.
  655. if (isa<Argument>(Val) || isa<Constant>(Val))
  656. return true;
  657. Instruction *I = dyn_cast<Instruction>(&Val);
  658. if (!I)
  659. return false;
  660. if (!Reg.contains(I))
  661. return true;
  662. // Loads within the SCoP may read arbitrary values, need to hoist them. If it
  663. // is not hoistable, it will be rejected later, but here we assume it is and
  664. // that makes the value invariant.
  665. if (auto LI = dyn_cast<LoadInst>(I)) {
  666. Ctx.RequiredILS.insert(LI);
  667. return true;
  668. }
  669. return false;
  670. }
  671. namespace {
  672. /// Remove smax of smax(0, size) expressions from a SCEV expression and
  673. /// register the '...' components.
  674. ///
  675. /// Array access expressions as they are generated by GFortran contain smax(0,
  676. /// size) expressions that confuse the 'normal' delinearization algorithm.
  677. /// However, if we extract such expressions before the normal delinearization
  678. /// takes place they can actually help to identify array size expressions in
  679. /// Fortran accesses. For the subsequently following delinearization the smax(0,
  680. /// size) component can be replaced by just 'size'. This is correct as we will
  681. /// always add and verify the assumption that for all subscript expressions
  682. /// 'exp' the inequality 0 <= exp < size holds. Hence, we will also verify
  683. /// that 0 <= size, which means smax(0, size) == size.
  684. class SCEVRemoveMax final : public SCEVRewriteVisitor<SCEVRemoveMax> {
  685. public:
  686. SCEVRemoveMax(ScalarEvolution &SE, std::vector<const SCEV *> *Terms)
  687. : SCEVRewriteVisitor(SE), Terms(Terms) {}
  688. static const SCEV *rewrite(const SCEV *Scev, ScalarEvolution &SE,
  689. std::vector<const SCEV *> *Terms = nullptr) {
  690. SCEVRemoveMax Rewriter(SE, Terms);
  691. return Rewriter.visit(Scev);
  692. }
  693. const SCEV *visitSMaxExpr(const SCEVSMaxExpr *Expr) {
  694. if ((Expr->getNumOperands() == 2) && Expr->getOperand(0)->isZero()) {
  695. auto Res = visit(Expr->getOperand(1));
  696. if (Terms)
  697. (*Terms).push_back(Res);
  698. return Res;
  699. }
  700. return Expr;
  701. }
  702. private:
  703. std::vector<const SCEV *> *Terms;
  704. };
  705. } // namespace
  706. SmallVector<const SCEV *, 4>
  707. ScopDetection::getDelinearizationTerms(DetectionContext &Context,
  708. const SCEVUnknown *BasePointer) const {
  709. SmallVector<const SCEV *, 4> Terms;
  710. for (const auto &Pair : Context.Accesses[BasePointer]) {
  711. std::vector<const SCEV *> MaxTerms;
  712. SCEVRemoveMax::rewrite(Pair.second, SE, &MaxTerms);
  713. if (!MaxTerms.empty()) {
  714. Terms.insert(Terms.begin(), MaxTerms.begin(), MaxTerms.end());
  715. continue;
  716. }
  717. // In case the outermost expression is a plain add, we check if any of its
  718. // terms has the form 4 * %inst * %param * %param ..., aka a term that
  719. // contains a product between a parameter and an instruction that is
  720. // inside the scop. Such instructions, if allowed at all, are instructions
  721. // SCEV can not represent, but Polly is still looking through. As a
  722. // result, these instructions can depend on induction variables and are
  723. // most likely no array sizes. However, terms that are multiplied with
  724. // them are likely candidates for array sizes.
  725. if (auto *AF = dyn_cast<SCEVAddExpr>(Pair.second)) {
  726. for (auto Op : AF->operands()) {
  727. if (auto *AF2 = dyn_cast<SCEVAddRecExpr>(Op))
  728. collectParametricTerms(SE, AF2, Terms);
  729. if (auto *AF2 = dyn_cast<SCEVMulExpr>(Op)) {
  730. SmallVector<const SCEV *, 0> Operands;
  731. for (auto *MulOp : AF2->operands()) {
  732. if (auto *Const = dyn_cast<SCEVConstant>(MulOp))
  733. Operands.push_back(Const);
  734. if (auto *Unknown = dyn_cast<SCEVUnknown>(MulOp)) {
  735. if (auto *Inst = dyn_cast<Instruction>(Unknown->getValue())) {
  736. if (!Context.CurRegion.contains(Inst))
  737. Operands.push_back(MulOp);
  738. } else {
  739. Operands.push_back(MulOp);
  740. }
  741. }
  742. }
  743. if (Operands.size())
  744. Terms.push_back(SE.getMulExpr(Operands));
  745. }
  746. }
  747. }
  748. if (Terms.empty())
  749. collectParametricTerms(SE, Pair.second, Terms);
  750. }
  751. return Terms;
  752. }
  753. bool ScopDetection::hasValidArraySizes(DetectionContext &Context,
  754. SmallVectorImpl<const SCEV *> &Sizes,
  755. const SCEVUnknown *BasePointer,
  756. Loop *Scope) const {
  757. // If no sizes were found, all sizes are trivially valid. We allow this case
  758. // to make it possible to pass known-affine accesses to the delinearization to
  759. // try to recover some interesting multi-dimensional accesses, but to still
  760. // allow the already known to be affine access in case the delinearization
  761. // fails. In such situations, the delinearization will just return a Sizes
  762. // array of size zero.
  763. if (Sizes.size() == 0)
  764. return true;
  765. Value *BaseValue = BasePointer->getValue();
  766. Region &CurRegion = Context.CurRegion;
  767. for (const SCEV *DelinearizedSize : Sizes) {
  768. // Don't pass down the scope to isAfffine; array dimensions must be
  769. // invariant across the entire scop.
  770. if (!isAffine(DelinearizedSize, nullptr, Context)) {
  771. Sizes.clear();
  772. break;
  773. }
  774. if (auto *Unknown = dyn_cast<SCEVUnknown>(DelinearizedSize)) {
  775. auto *V = dyn_cast<Value>(Unknown->getValue());
  776. if (auto *Load = dyn_cast<LoadInst>(V)) {
  777. if (Context.CurRegion.contains(Load) &&
  778. isHoistableLoad(Load, CurRegion, LI, SE, DT, Context.RequiredILS))
  779. Context.RequiredILS.insert(Load);
  780. continue;
  781. }
  782. }
  783. if (hasScalarDepsInsideRegion(DelinearizedSize, &CurRegion, Scope, false,
  784. Context.RequiredILS))
  785. return invalid<ReportNonAffineAccess>(
  786. Context, /*Assert=*/true, DelinearizedSize,
  787. Context.Accesses[BasePointer].front().first, BaseValue);
  788. }
  789. // No array shape derived.
  790. if (Sizes.empty()) {
  791. if (AllowNonAffine)
  792. return true;
  793. for (const auto &Pair : Context.Accesses[BasePointer]) {
  794. const Instruction *Insn = Pair.first;
  795. const SCEV *AF = Pair.second;
  796. if (!isAffine(AF, Scope, Context)) {
  797. invalid<ReportNonAffineAccess>(Context, /*Assert=*/true, AF, Insn,
  798. BaseValue);
  799. if (!KeepGoing)
  800. return false;
  801. }
  802. }
  803. return false;
  804. }
  805. return true;
  806. }
  807. // We first store the resulting memory accesses in TempMemoryAccesses. Only
  808. // if the access functions for all memory accesses have been successfully
  809. // delinearized we continue. Otherwise, we either report a failure or, if
  810. // non-affine accesses are allowed, we drop the information. In case the
  811. // information is dropped the memory accesses need to be overapproximated
  812. // when translated to a polyhedral representation.
  813. bool ScopDetection::computeAccessFunctions(
  814. DetectionContext &Context, const SCEVUnknown *BasePointer,
  815. std::shared_ptr<ArrayShape> Shape) const {
  816. Value *BaseValue = BasePointer->getValue();
  817. bool BasePtrHasNonAffine = false;
  818. MapInsnToMemAcc TempMemoryAccesses;
  819. for (const auto &Pair : Context.Accesses[BasePointer]) {
  820. const Instruction *Insn = Pair.first;
  821. auto *AF = Pair.second;
  822. AF = SCEVRemoveMax::rewrite(AF, SE);
  823. bool IsNonAffine = false;
  824. TempMemoryAccesses.insert(std::make_pair(Insn, MemAcc(Insn, Shape)));
  825. MemAcc *Acc = &TempMemoryAccesses.find(Insn)->second;
  826. auto *Scope = LI.getLoopFor(Insn->getParent());
  827. if (!AF) {
  828. if (isAffine(Pair.second, Scope, Context))
  829. Acc->DelinearizedSubscripts.push_back(Pair.second);
  830. else
  831. IsNonAffine = true;
  832. } else {
  833. if (Shape->DelinearizedSizes.size() == 0) {
  834. Acc->DelinearizedSubscripts.push_back(AF);
  835. } else {
  836. llvm::computeAccessFunctions(SE, AF, Acc->DelinearizedSubscripts,
  837. Shape->DelinearizedSizes);
  838. if (Acc->DelinearizedSubscripts.size() == 0)
  839. IsNonAffine = true;
  840. }
  841. for (const SCEV *S : Acc->DelinearizedSubscripts)
  842. if (!isAffine(S, Scope, Context))
  843. IsNonAffine = true;
  844. }
  845. // (Possibly) report non affine access
  846. if (IsNonAffine) {
  847. BasePtrHasNonAffine = true;
  848. if (!AllowNonAffine) {
  849. invalid<ReportNonAffineAccess>(Context, /*Assert=*/true, Pair.second,
  850. Insn, BaseValue);
  851. if (!KeepGoing)
  852. return false;
  853. }
  854. }
  855. }
  856. if (!BasePtrHasNonAffine)
  857. Context.InsnToMemAcc.insert(TempMemoryAccesses.begin(),
  858. TempMemoryAccesses.end());
  859. return true;
  860. }
  861. bool ScopDetection::hasBaseAffineAccesses(DetectionContext &Context,
  862. const SCEVUnknown *BasePointer,
  863. Loop *Scope) const {
  864. auto Shape = std::shared_ptr<ArrayShape>(new ArrayShape(BasePointer));
  865. auto Terms = getDelinearizationTerms(Context, BasePointer);
  866. findArrayDimensions(SE, Terms, Shape->DelinearizedSizes,
  867. Context.ElementSize[BasePointer]);
  868. if (!hasValidArraySizes(Context, Shape->DelinearizedSizes, BasePointer,
  869. Scope))
  870. return false;
  871. return computeAccessFunctions(Context, BasePointer, Shape);
  872. }
  873. bool ScopDetection::hasAffineMemoryAccesses(DetectionContext &Context) const {
  874. // TODO: If we have an unknown access and other non-affine accesses we do
  875. // not try to delinearize them for now.
  876. if (Context.HasUnknownAccess && !Context.NonAffineAccesses.empty())
  877. return AllowNonAffine;
  878. for (auto &Pair : Context.NonAffineAccesses) {
  879. auto *BasePointer = Pair.first;
  880. auto *Scope = Pair.second;
  881. if (!hasBaseAffineAccesses(Context, BasePointer, Scope)) {
  882. Context.IsInvalid = true;
  883. if (!KeepGoing)
  884. return false;
  885. }
  886. }
  887. return true;
  888. }
  889. bool ScopDetection::isValidAccess(Instruction *Inst, const SCEV *AF,
  890. const SCEVUnknown *BP,
  891. DetectionContext &Context) const {
  892. if (!BP)
  893. return invalid<ReportNoBasePtr>(Context, /*Assert=*/true, Inst);
  894. auto *BV = BP->getValue();
  895. if (isa<UndefValue>(BV))
  896. return invalid<ReportUndefBasePtr>(Context, /*Assert=*/true, Inst);
  897. // FIXME: Think about allowing IntToPtrInst
  898. if (IntToPtrInst *Inst = dyn_cast<IntToPtrInst>(BV))
  899. return invalid<ReportIntToPtr>(Context, /*Assert=*/true, Inst);
  900. // Check that the base address of the access is invariant in the current
  901. // region.
  902. if (!isInvariant(*BV, Context.CurRegion, Context))
  903. return invalid<ReportVariantBasePtr>(Context, /*Assert=*/true, BV, Inst);
  904. AF = SE.getMinusSCEV(AF, BP);
  905. const SCEV *Size;
  906. if (!isa<MemIntrinsic>(Inst)) {
  907. Size = SE.getElementSize(Inst);
  908. } else {
  909. auto *SizeTy =
  910. SE.getEffectiveSCEVType(PointerType::getInt8PtrTy(SE.getContext()));
  911. Size = SE.getConstant(SizeTy, 8);
  912. }
  913. if (Context.ElementSize[BP]) {
  914. if (!AllowDifferentTypes && Context.ElementSize[BP] != Size)
  915. return invalid<ReportDifferentArrayElementSize>(Context, /*Assert=*/true,
  916. Inst, BV);
  917. Context.ElementSize[BP] = SE.getSMinExpr(Size, Context.ElementSize[BP]);
  918. } else {
  919. Context.ElementSize[BP] = Size;
  920. }
  921. bool IsVariantInNonAffineLoop = false;
  922. SetVector<const Loop *> Loops;
  923. findLoops(AF, Loops);
  924. for (const Loop *L : Loops)
  925. if (Context.BoxedLoopsSet.count(L))
  926. IsVariantInNonAffineLoop = true;
  927. auto *Scope = LI.getLoopFor(Inst->getParent());
  928. bool IsAffine = !IsVariantInNonAffineLoop && isAffine(AF, Scope, Context);
  929. // Do not try to delinearize memory intrinsics and force them to be affine.
  930. if (isa<MemIntrinsic>(Inst) && !IsAffine) {
  931. return invalid<ReportNonAffineAccess>(Context, /*Assert=*/true, AF, Inst,
  932. BV);
  933. } else if (PollyDelinearize && !IsVariantInNonAffineLoop) {
  934. Context.Accesses[BP].push_back({Inst, AF});
  935. if (!IsAffine)
  936. Context.NonAffineAccesses.insert(
  937. std::make_pair(BP, LI.getLoopFor(Inst->getParent())));
  938. } else if (!AllowNonAffine && !IsAffine) {
  939. return invalid<ReportNonAffineAccess>(Context, /*Assert=*/true, AF, Inst,
  940. BV);
  941. }
  942. if (IgnoreAliasing)
  943. return true;
  944. // Check if the base pointer of the memory access does alias with
  945. // any other pointer. This cannot be handled at the moment.
  946. AAMDNodes AATags = Inst->getAAMetadata();
  947. AliasSet &AS = Context.AST.getAliasSetFor(
  948. MemoryLocation::getBeforeOrAfter(BP->getValue(), AATags));
  949. if (!AS.isMustAlias()) {
  950. if (PollyUseRuntimeAliasChecks) {
  951. bool CanBuildRunTimeCheck = true;
  952. // The run-time alias check places code that involves the base pointer at
  953. // the beginning of the SCoP. This breaks if the base pointer is defined
  954. // inside the scop. Hence, we can only create a run-time check if we are
  955. // sure the base pointer is not an instruction defined inside the scop.
  956. // However, we can ignore loads that will be hoisted.
  957. InvariantLoadsSetTy VariantLS, InvariantLS;
  958. // In order to detect loads which are dependent on other invariant loads
  959. // as invariant, we use fixed-point iteration method here i.e we iterate
  960. // over the alias set for arbitrary number of times until it is safe to
  961. // assume that all the invariant loads have been detected
  962. while (true) {
  963. const unsigned int VariantSize = VariantLS.size(),
  964. InvariantSize = InvariantLS.size();
  965. for (const auto &Ptr : AS) {
  966. Instruction *Inst = dyn_cast<Instruction>(Ptr.getValue());
  967. if (Inst && Context.CurRegion.contains(Inst)) {
  968. auto *Load = dyn_cast<LoadInst>(Inst);
  969. if (Load && InvariantLS.count(Load))
  970. continue;
  971. if (Load && isHoistableLoad(Load, Context.CurRegion, LI, SE, DT,
  972. InvariantLS)) {
  973. if (VariantLS.count(Load))
  974. VariantLS.remove(Load);
  975. Context.RequiredILS.insert(Load);
  976. InvariantLS.insert(Load);
  977. } else {
  978. CanBuildRunTimeCheck = false;
  979. VariantLS.insert(Load);
  980. }
  981. }
  982. }
  983. if (InvariantSize == InvariantLS.size() &&
  984. VariantSize == VariantLS.size())
  985. break;
  986. }
  987. if (CanBuildRunTimeCheck)
  988. return true;
  989. }
  990. return invalid<ReportAlias>(Context, /*Assert=*/true, Inst, AS);
  991. }
  992. return true;
  993. }
  994. bool ScopDetection::isValidMemoryAccess(MemAccInst Inst,
  995. DetectionContext &Context) const {
  996. Value *Ptr = Inst.getPointerOperand();
  997. Loop *L = LI.getLoopFor(Inst->getParent());
  998. const SCEV *AccessFunction = SE.getSCEVAtScope(Ptr, L);
  999. const SCEVUnknown *BasePointer;
  1000. BasePointer = dyn_cast<SCEVUnknown>(SE.getPointerBase(AccessFunction));
  1001. return isValidAccess(Inst, AccessFunction, BasePointer, Context);
  1002. }
  1003. bool ScopDetection::isValidInstruction(Instruction &Inst,
  1004. DetectionContext &Context) {
  1005. for (auto &Op : Inst.operands()) {
  1006. auto *OpInst = dyn_cast<Instruction>(&Op);
  1007. if (!OpInst)
  1008. continue;
  1009. if (isErrorBlock(*OpInst->getParent(), Context.CurRegion)) {
  1010. auto *PHI = dyn_cast<PHINode>(OpInst);
  1011. if (PHI) {
  1012. for (User *U : PHI->users()) {
  1013. auto *UI = dyn_cast<Instruction>(U);
  1014. if (!UI || !UI->isTerminator())
  1015. return false;
  1016. }
  1017. } else {
  1018. return false;
  1019. }
  1020. }
  1021. }
  1022. if (isa<LandingPadInst>(&Inst) || isa<ResumeInst>(&Inst))
  1023. return false;
  1024. // We only check the call instruction but not invoke instruction.
  1025. if (CallInst *CI = dyn_cast<CallInst>(&Inst)) {
  1026. if (isValidCallInst(*CI, Context))
  1027. return true;
  1028. return invalid<ReportFuncCall>(Context, /*Assert=*/true, &Inst);
  1029. }
  1030. if (!Inst.mayReadOrWriteMemory()) {
  1031. if (!isa<AllocaInst>(Inst))
  1032. return true;
  1033. return invalid<ReportAlloca>(Context, /*Assert=*/true, &Inst);
  1034. }
  1035. // Check the access function.
  1036. if (auto MemInst = MemAccInst::dyn_cast(Inst)) {
  1037. Context.hasStores |= isa<StoreInst>(MemInst);
  1038. Context.hasLoads |= isa<LoadInst>(MemInst);
  1039. if (!MemInst.isSimple())
  1040. return invalid<ReportNonSimpleMemoryAccess>(Context, /*Assert=*/true,
  1041. &Inst);
  1042. return isValidMemoryAccess(MemInst, Context);
  1043. }
  1044. // We do not know this instruction, therefore we assume it is invalid.
  1045. return invalid<ReportUnknownInst>(Context, /*Assert=*/true, &Inst);
  1046. }
  1047. /// Check whether @p L has exiting blocks.
  1048. ///
  1049. /// @param L The loop of interest
  1050. ///
  1051. /// @return True if the loop has exiting blocks, false otherwise.
  1052. static bool hasExitingBlocks(Loop *L) {
  1053. SmallVector<BasicBlock *, 4> ExitingBlocks;
  1054. L->getExitingBlocks(ExitingBlocks);
  1055. return !ExitingBlocks.empty();
  1056. }
  1057. bool ScopDetection::canUseISLTripCount(Loop *L, DetectionContext &Context) {
  1058. // FIXME: Yes, this is bad. isValidCFG() may call invalid<Reason>() which
  1059. // causes the SCoP to be rejected regardless on whether non-ISL trip counts
  1060. // could be used. We currently preserve the legacy behaviour of rejecting
  1061. // based on Context.Log.size() added by isValidCFG() or before, regardless on
  1062. // whether the ISL trip count can be used or can be used as a non-affine
  1063. // region. However, we allow rejections by isValidCFG() that do not result in
  1064. // an error log entry.
  1065. bool OldIsInvalid = Context.IsInvalid;
  1066. // Ensure the loop has valid exiting blocks as well as latches, otherwise we
  1067. // need to overapproximate it as a boxed loop.
  1068. SmallVector<BasicBlock *, 4> LoopControlBlocks;
  1069. L->getExitingBlocks(LoopControlBlocks);
  1070. L->getLoopLatches(LoopControlBlocks);
  1071. for (BasicBlock *ControlBB : LoopControlBlocks) {
  1072. if (!isValidCFG(*ControlBB, true, false, Context)) {
  1073. Context.IsInvalid = OldIsInvalid || Context.Log.size();
  1074. return false;
  1075. }
  1076. }
  1077. // We can use ISL to compute the trip count of L.
  1078. Context.IsInvalid = OldIsInvalid || Context.Log.size();
  1079. return true;
  1080. }
  1081. bool ScopDetection::isValidLoop(Loop *L, DetectionContext &Context) {
  1082. // Loops that contain part but not all of the blocks of a region cannot be
  1083. // handled by the schedule generation. Such loop constructs can happen
  1084. // because a region can contain BBs that have no path to the exit block
  1085. // (Infinite loops, UnreachableInst), but such blocks are never part of a
  1086. // loop.
  1087. //
  1088. // _______________
  1089. // | Loop Header | <-----------.
  1090. // --------------- |
  1091. // | |
  1092. // _______________ ______________
  1093. // | RegionEntry |-----> | RegionExit |----->
  1094. // --------------- --------------
  1095. // |
  1096. // _______________
  1097. // | EndlessLoop | <--.
  1098. // --------------- |
  1099. // | |
  1100. // \------------/
  1101. //
  1102. // In the example above, the loop (LoopHeader,RegionEntry,RegionExit) is
  1103. // neither entirely contained in the region RegionEntry->RegionExit
  1104. // (containing RegionEntry,EndlessLoop) nor is the region entirely contained
  1105. // in the loop.
  1106. // The block EndlessLoop is contained in the region because Region::contains
  1107. // tests whether it is not dominated by RegionExit. This is probably to not
  1108. // having to query the PostdominatorTree. Instead of an endless loop, a dead
  1109. // end can also be formed by an UnreachableInst. This case is already caught
  1110. // by isErrorBlock(). We hence only have to reject endless loops here.
  1111. if (!hasExitingBlocks(L))
  1112. return invalid<ReportLoopHasNoExit>(Context, /*Assert=*/true, L);
  1113. // The algorithm for domain construction assumes that loops has only a single
  1114. // exit block (and hence corresponds to a subregion). Note that we cannot use
  1115. // L->getExitBlock() because it does not check whether all exiting edges point
  1116. // to the same BB.
  1117. SmallVector<BasicBlock *, 4> ExitBlocks;
  1118. L->getExitBlocks(ExitBlocks);
  1119. BasicBlock *TheExitBlock = ExitBlocks[0];
  1120. for (BasicBlock *ExitBB : ExitBlocks) {
  1121. if (TheExitBlock != ExitBB)
  1122. return invalid<ReportLoopHasMultipleExits>(Context, /*Assert=*/true, L);
  1123. }
  1124. if (canUseISLTripCount(L, Context))
  1125. return true;
  1126. if (AllowNonAffineSubLoops && AllowNonAffineSubRegions) {
  1127. Region *R = RI.getRegionFor(L->getHeader());
  1128. while (R != &Context.CurRegion && !R->contains(L))
  1129. R = R->getParent();
  1130. if (addOverApproximatedRegion(R, Context))
  1131. return true;
  1132. }
  1133. const SCEV *LoopCount = SE.getBackedgeTakenCount(L);
  1134. return invalid<ReportLoopBound>(Context, /*Assert=*/true, L, LoopCount);
  1135. }
  1136. /// Return the number of loops in @p L (incl. @p L) that have a trip
  1137. /// count that is not known to be less than @MinProfitableTrips.
  1138. ScopDetection::LoopStats
  1139. ScopDetection::countBeneficialSubLoops(Loop *L, ScalarEvolution &SE,
  1140. unsigned MinProfitableTrips) {
  1141. auto *TripCount = SE.getBackedgeTakenCount(L);
  1142. int NumLoops = 1;
  1143. int MaxLoopDepth = 1;
  1144. if (MinProfitableTrips > 0)
  1145. if (auto *TripCountC = dyn_cast<SCEVConstant>(TripCount))
  1146. if (TripCountC->getType()->getScalarSizeInBits() <= 64)
  1147. if (TripCountC->getValue()->getZExtValue() <= MinProfitableTrips)
  1148. NumLoops -= 1;
  1149. for (auto &SubLoop : *L) {
  1150. LoopStats Stats = countBeneficialSubLoops(SubLoop, SE, MinProfitableTrips);
  1151. NumLoops += Stats.NumLoops;
  1152. MaxLoopDepth = std::max(MaxLoopDepth, Stats.MaxDepth + 1);
  1153. }
  1154. return {NumLoops, MaxLoopDepth};
  1155. }
  1156. ScopDetection::LoopStats
  1157. ScopDetection::countBeneficialLoops(Region *R, ScalarEvolution &SE,
  1158. LoopInfo &LI, unsigned MinProfitableTrips) {
  1159. int LoopNum = 0;
  1160. int MaxLoopDepth = 0;
  1161. auto L = LI.getLoopFor(R->getEntry());
  1162. // If L is fully contained in R, move to first loop surrounding R. Otherwise,
  1163. // L is either nullptr or already surrounding R.
  1164. if (L && R->contains(L)) {
  1165. L = R->outermostLoopInRegion(L);
  1166. L = L->getParentLoop();
  1167. }
  1168. auto SubLoops =
  1169. L ? L->getSubLoopsVector() : std::vector<Loop *>(LI.begin(), LI.end());
  1170. for (auto &SubLoop : SubLoops)
  1171. if (R->contains(SubLoop)) {
  1172. LoopStats Stats =
  1173. countBeneficialSubLoops(SubLoop, SE, MinProfitableTrips);
  1174. LoopNum += Stats.NumLoops;
  1175. MaxLoopDepth = std::max(MaxLoopDepth, Stats.MaxDepth);
  1176. }
  1177. return {LoopNum, MaxLoopDepth};
  1178. }
  1179. static bool isErrorBlockImpl(BasicBlock &BB, const Region &R, LoopInfo &LI,
  1180. const DominatorTree &DT) {
  1181. if (isa<UnreachableInst>(BB.getTerminator()))
  1182. return true;
  1183. if (LI.isLoopHeader(&BB))
  1184. return false;
  1185. // Don't consider something outside the SCoP as error block. It will precede
  1186. // the code versioning runtime check.
  1187. if (!R.contains(&BB))
  1188. return false;
  1189. // Basic blocks that are always executed are not considered error blocks,
  1190. // as their execution can not be a rare event.
  1191. bool DominatesAllPredecessors = true;
  1192. if (R.isTopLevelRegion()) {
  1193. for (BasicBlock &I : *R.getEntry()->getParent()) {
  1194. if (isa<ReturnInst>(I.getTerminator()) && !DT.dominates(&BB, &I)) {
  1195. DominatesAllPredecessors = false;
  1196. break;
  1197. }
  1198. }
  1199. } else {
  1200. for (auto Pred : predecessors(R.getExit())) {
  1201. if (R.contains(Pred) && !DT.dominates(&BB, Pred)) {
  1202. DominatesAllPredecessors = false;
  1203. break;
  1204. }
  1205. }
  1206. }
  1207. if (DominatesAllPredecessors)
  1208. return false;
  1209. for (Instruction &Inst : BB)
  1210. if (CallInst *CI = dyn_cast<CallInst>(&Inst)) {
  1211. if (isDebugCall(CI))
  1212. continue;
  1213. if (isIgnoredIntrinsic(CI))
  1214. continue;
  1215. // memset, memcpy and memmove are modeled intrinsics.
  1216. if (isa<MemSetInst>(CI) || isa<MemTransferInst>(CI))
  1217. continue;
  1218. if (!CI->doesNotAccessMemory())
  1219. return true;
  1220. if (CI->doesNotReturn())
  1221. return true;
  1222. }
  1223. return false;
  1224. }
  1225. bool ScopDetection::isErrorBlock(llvm::BasicBlock &BB, const llvm::Region &R) {
  1226. if (!PollyAllowErrorBlocks)
  1227. return false;
  1228. auto It = ErrorBlockCache.insert({std::make_pair(&BB, &R), false});
  1229. if (!It.second)
  1230. return It.first->getSecond();
  1231. bool Result = isErrorBlockImpl(BB, R, LI, DT);
  1232. It.first->second = Result;
  1233. return Result;
  1234. }
  1235. Region *ScopDetection::expandRegion(Region &R) {
  1236. // Initial no valid region was found (greater than R)
  1237. std::unique_ptr<Region> LastValidRegion;
  1238. auto ExpandedRegion = std::unique_ptr<Region>(R.getExpandedRegion());
  1239. LLVM_DEBUG(dbgs() << "\tExpanding " << R.getNameStr() << "\n");
  1240. while (ExpandedRegion) {
  1241. BBPair P = getBBPairForRegion(ExpandedRegion.get());
  1242. std::unique_ptr<DetectionContext> &Entry = DetectionContextMap[P];
  1243. Entry = std::make_unique<DetectionContext>(*ExpandedRegion, AA,
  1244. /*Verifying=*/false);
  1245. DetectionContext &Context = *Entry.get();
  1246. LLVM_DEBUG(dbgs() << "\t\tTrying " << ExpandedRegion->getNameStr() << "\n");
  1247. // Only expand when we did not collect errors.
  1248. if (!Context.Log.hasErrors()) {
  1249. // If the exit is valid check all blocks
  1250. // - if true, a valid region was found => store it + keep expanding
  1251. // - if false, .tbd. => stop (should this really end the loop?)
  1252. if (!allBlocksValid(Context) || Context.Log.hasErrors()) {
  1253. removeCachedResults(*ExpandedRegion);
  1254. DetectionContextMap.erase(P);
  1255. break;
  1256. }
  1257. // Store this region, because it is the greatest valid (encountered so
  1258. // far).
  1259. if (LastValidRegion) {
  1260. removeCachedResults(*LastValidRegion);
  1261. DetectionContextMap.erase(P);
  1262. }
  1263. LastValidRegion = std::move(ExpandedRegion);
  1264. // Create and test the next greater region (if any)
  1265. ExpandedRegion =
  1266. std::unique_ptr<Region>(LastValidRegion->getExpandedRegion());
  1267. } else {
  1268. // Create and test the next greater region (if any)
  1269. removeCachedResults(*ExpandedRegion);
  1270. DetectionContextMap.erase(P);
  1271. ExpandedRegion =
  1272. std::unique_ptr<Region>(ExpandedRegion->getExpandedRegion());
  1273. }
  1274. }
  1275. LLVM_DEBUG({
  1276. if (LastValidRegion)
  1277. dbgs() << "\tto " << LastValidRegion->getNameStr() << "\n";
  1278. else
  1279. dbgs() << "\tExpanding " << R.getNameStr() << " failed\n";
  1280. });
  1281. return LastValidRegion.release();
  1282. }
  1283. static bool regionWithoutLoops(Region &R, LoopInfo &LI) {
  1284. for (const BasicBlock *BB : R.blocks())
  1285. if (R.contains(LI.getLoopFor(BB)))
  1286. return false;
  1287. return true;
  1288. }
  1289. void ScopDetection::removeCachedResultsRecursively(const Region &R) {
  1290. for (auto &SubRegion : R) {
  1291. if (ValidRegions.count(SubRegion.get())) {
  1292. removeCachedResults(*SubRegion.get());
  1293. } else
  1294. removeCachedResultsRecursively(*SubRegion);
  1295. }
  1296. }
  1297. void ScopDetection::removeCachedResults(const Region &R) {
  1298. ValidRegions.remove(&R);
  1299. }
  1300. void ScopDetection::findScops(Region &R) {
  1301. std::unique_ptr<DetectionContext> &Entry =
  1302. DetectionContextMap[getBBPairForRegion(&R)];
  1303. Entry = std::make_unique<DetectionContext>(R, AA, /*Verifying=*/false);
  1304. DetectionContext &Context = *Entry.get();
  1305. bool DidBailout = true;
  1306. if (!PollyProcessUnprofitable && regionWithoutLoops(R, LI))
  1307. invalid<ReportUnprofitable>(Context, /*Assert=*/true, &R);
  1308. else
  1309. DidBailout = !isValidRegion(Context);
  1310. (void)DidBailout;
  1311. if (KeepGoing) {
  1312. assert((!DidBailout || Context.IsInvalid) &&
  1313. "With -polly-detect-keep-going, it is sufficient that if "
  1314. "isValidRegion short-circuited, that SCoP is invalid");
  1315. } else {
  1316. assert(DidBailout == Context.IsInvalid &&
  1317. "isValidRegion must short-circuit iff the ScoP is invalid");
  1318. }
  1319. if (Context.IsInvalid) {
  1320. removeCachedResults(R);
  1321. } else {
  1322. ValidRegions.insert(&R);
  1323. return;
  1324. }
  1325. for (auto &SubRegion : R)
  1326. findScops(*SubRegion);
  1327. // Try to expand regions.
  1328. //
  1329. // As the region tree normally only contains canonical regions, non canonical
  1330. // regions that form a Scop are not found. Therefore, those non canonical
  1331. // regions are checked by expanding the canonical ones.
  1332. std::vector<Region *> ToExpand;
  1333. for (auto &SubRegion : R)
  1334. ToExpand.push_back(SubRegion.get());
  1335. for (Region *CurrentRegion : ToExpand) {
  1336. // Skip invalid regions. Regions may become invalid, if they are element of
  1337. // an already expanded region.
  1338. if (!ValidRegions.count(CurrentRegion))
  1339. continue;
  1340. // Skip regions that had errors.
  1341. bool HadErrors = lookupRejectionLog(CurrentRegion)->hasErrors();
  1342. if (HadErrors)
  1343. continue;
  1344. Region *ExpandedR = expandRegion(*CurrentRegion);
  1345. if (!ExpandedR)
  1346. continue;
  1347. R.addSubRegion(ExpandedR, true);
  1348. ValidRegions.insert(ExpandedR);
  1349. removeCachedResults(*CurrentRegion);
  1350. removeCachedResultsRecursively(*ExpandedR);
  1351. }
  1352. }
  1353. bool ScopDetection::allBlocksValid(DetectionContext &Context) {
  1354. Region &CurRegion = Context.CurRegion;
  1355. for (const BasicBlock *BB : CurRegion.blocks()) {
  1356. Loop *L = LI.getLoopFor(BB);
  1357. if (L && L->getHeader() == BB) {
  1358. if (CurRegion.contains(L)) {
  1359. if (!isValidLoop(L, Context)) {
  1360. Context.IsInvalid = true;
  1361. if (!KeepGoing)
  1362. return false;
  1363. }
  1364. } else {
  1365. SmallVector<BasicBlock *, 1> Latches;
  1366. L->getLoopLatches(Latches);
  1367. for (BasicBlock *Latch : Latches)
  1368. if (CurRegion.contains(Latch))
  1369. return invalid<ReportLoopOnlySomeLatches>(Context, /*Assert=*/true,
  1370. L);
  1371. }
  1372. }
  1373. }
  1374. for (BasicBlock *BB : CurRegion.blocks()) {
  1375. bool IsErrorBlock = isErrorBlock(*BB, CurRegion);
  1376. // Also check exception blocks (and possibly register them as non-affine
  1377. // regions). Even though exception blocks are not modeled, we use them
  1378. // to forward-propagate domain constraints during ScopInfo construction.
  1379. if (!isValidCFG(*BB, false, IsErrorBlock, Context) && !KeepGoing)
  1380. return false;
  1381. if (IsErrorBlock)
  1382. continue;
  1383. for (BasicBlock::iterator I = BB->begin(), E = --BB->end(); I != E; ++I)
  1384. if (!isValidInstruction(*I, Context)) {
  1385. Context.IsInvalid = true;
  1386. if (!KeepGoing)
  1387. return false;
  1388. }
  1389. }
  1390. if (!hasAffineMemoryAccesses(Context))
  1391. return false;
  1392. return true;
  1393. }
  1394. bool ScopDetection::hasSufficientCompute(DetectionContext &Context,
  1395. int NumLoops) const {
  1396. int InstCount = 0;
  1397. if (NumLoops == 0)
  1398. return false;
  1399. for (auto *BB : Context.CurRegion.blocks())
  1400. if (Context.CurRegion.contains(LI.getLoopFor(BB)))
  1401. InstCount += BB->size();
  1402. InstCount = InstCount / NumLoops;
  1403. return InstCount >= ProfitabilityMinPerLoopInstructions;
  1404. }
  1405. bool ScopDetection::hasPossiblyDistributableLoop(
  1406. DetectionContext &Context) const {
  1407. for (auto *BB : Context.CurRegion.blocks()) {
  1408. auto *L = LI.getLoopFor(BB);
  1409. if (!Context.CurRegion.contains(L))
  1410. continue;
  1411. if (Context.BoxedLoopsSet.count(L))
  1412. continue;
  1413. unsigned StmtsWithStoresInLoops = 0;
  1414. for (auto *LBB : L->blocks()) {
  1415. bool MemStore = false;
  1416. for (auto &I : *LBB)
  1417. MemStore |= isa<StoreInst>(&I);
  1418. StmtsWithStoresInLoops += MemStore;
  1419. }
  1420. return (StmtsWithStoresInLoops > 1);
  1421. }
  1422. return false;
  1423. }
  1424. bool ScopDetection::isProfitableRegion(DetectionContext &Context) const {
  1425. Region &CurRegion = Context.CurRegion;
  1426. if (PollyProcessUnprofitable)
  1427. return true;
  1428. // We can probably not do a lot on scops that only write or only read
  1429. // data.
  1430. if (!Context.hasStores || !Context.hasLoads)
  1431. return invalid<ReportUnprofitable>(Context, /*Assert=*/true, &CurRegion);
  1432. int NumLoops =
  1433. countBeneficialLoops(&CurRegion, SE, LI, MIN_LOOP_TRIP_COUNT).NumLoops;
  1434. int NumAffineLoops = NumLoops - Context.BoxedLoopsSet.size();
  1435. // Scops with at least two loops may allow either loop fusion or tiling and
  1436. // are consequently interesting to look at.
  1437. if (NumAffineLoops >= 2)
  1438. return true;
  1439. // A loop with multiple non-trivial blocks might be amendable to distribution.
  1440. if (NumAffineLoops == 1 && hasPossiblyDistributableLoop(Context))
  1441. return true;
  1442. // Scops that contain a loop with a non-trivial amount of computation per
  1443. // loop-iteration are interesting as we may be able to parallelize such
  1444. // loops. Individual loops that have only a small amount of computation
  1445. // per-iteration are performance-wise very fragile as any change to the
  1446. // loop induction variables may affect performance. To not cause spurious
  1447. // performance regressions, we do not consider such loops.
  1448. if (NumAffineLoops == 1 && hasSufficientCompute(Context, NumLoops))
  1449. return true;
  1450. return invalid<ReportUnprofitable>(Context, /*Assert=*/true, &CurRegion);
  1451. }
  1452. bool ScopDetection::isValidRegion(DetectionContext &Context) {
  1453. Region &CurRegion = Context.CurRegion;
  1454. LLVM_DEBUG(dbgs() << "Checking region: " << CurRegion.getNameStr() << "\n\t");
  1455. if (!PollyAllowFullFunction && CurRegion.isTopLevelRegion()) {
  1456. LLVM_DEBUG(dbgs() << "Top level region is invalid\n");
  1457. Context.IsInvalid = true;
  1458. return false;
  1459. }
  1460. DebugLoc DbgLoc;
  1461. if (CurRegion.getExit() &&
  1462. isa<UnreachableInst>(CurRegion.getExit()->getTerminator())) {
  1463. LLVM_DEBUG(dbgs() << "Unreachable in exit\n");
  1464. return invalid<ReportUnreachableInExit>(Context, /*Assert=*/true,
  1465. CurRegion.getExit(), DbgLoc);
  1466. }
  1467. if (!OnlyRegion.empty() &&
  1468. !CurRegion.getEntry()->getName().count(OnlyRegion)) {
  1469. LLVM_DEBUG({
  1470. dbgs() << "Region entry does not match -polly-only-region";
  1471. dbgs() << "\n";
  1472. });
  1473. Context.IsInvalid = true;
  1474. return false;
  1475. }
  1476. for (BasicBlock *Pred : predecessors(CurRegion.getEntry())) {
  1477. Instruction *PredTerm = Pred->getTerminator();
  1478. if (isa<IndirectBrInst>(PredTerm) || isa<CallBrInst>(PredTerm))
  1479. return invalid<ReportIndirectPredecessor>(
  1480. Context, /*Assert=*/true, PredTerm, PredTerm->getDebugLoc());
  1481. }
  1482. // SCoP cannot contain the entry block of the function, because we need
  1483. // to insert alloca instruction there when translate scalar to array.
  1484. if (!PollyAllowFullFunction &&
  1485. CurRegion.getEntry() ==
  1486. &(CurRegion.getEntry()->getParent()->getEntryBlock()))
  1487. return invalid<ReportEntry>(Context, /*Assert=*/true, CurRegion.getEntry());
  1488. if (!allBlocksValid(Context)) {
  1489. // TODO: Every failure condition within allBlocksValid should call
  1490. // invalid<Reason>(). Otherwise we reject SCoPs without giving feedback to
  1491. // the user.
  1492. Context.IsInvalid = true;
  1493. return false;
  1494. }
  1495. if (!isReducibleRegion(CurRegion, DbgLoc))
  1496. return invalid<ReportIrreducibleRegion>(Context, /*Assert=*/true,
  1497. &CurRegion, DbgLoc);
  1498. LLVM_DEBUG(dbgs() << "OK\n");
  1499. return true;
  1500. }
  1501. void ScopDetection::markFunctionAsInvalid(Function *F) {
  1502. F->addFnAttr(PollySkipFnAttr);
  1503. }
  1504. bool ScopDetection::isValidFunction(Function &F) {
  1505. return !F.hasFnAttribute(PollySkipFnAttr);
  1506. }
  1507. void ScopDetection::printLocations(Function &F) {
  1508. for (const Region *R : *this) {
  1509. unsigned LineEntry, LineExit;
  1510. std::string FileName;
  1511. getDebugLocation(R, LineEntry, LineExit, FileName);
  1512. DiagnosticScopFound Diagnostic(F, FileName, LineEntry, LineExit);
  1513. F.getContext().diagnose(Diagnostic);
  1514. }
  1515. }
  1516. void ScopDetection::emitMissedRemarks(const Function &F) {
  1517. for (auto &DIt : DetectionContextMap) {
  1518. DetectionContext &DC = *DIt.getSecond().get();
  1519. if (DC.Log.hasErrors())
  1520. emitRejectionRemarks(DIt.getFirst(), DC.Log, ORE);
  1521. }
  1522. }
  1523. bool ScopDetection::isReducibleRegion(Region &R, DebugLoc &DbgLoc) const {
  1524. /// Enum for coloring BBs in Region.
  1525. ///
  1526. /// WHITE - Unvisited BB in DFS walk.
  1527. /// GREY - BBs which are currently on the DFS stack for processing.
  1528. /// BLACK - Visited and completely processed BB.
  1529. enum Color { WHITE, GREY, BLACK };
  1530. BasicBlock *REntry = R.getEntry();
  1531. BasicBlock *RExit = R.getExit();
  1532. // Map to match the color of a BasicBlock during the DFS walk.
  1533. DenseMap<const BasicBlock *, Color> BBColorMap;
  1534. // Stack keeping track of current BB and index of next child to be processed.
  1535. std::stack<std::pair<BasicBlock *, unsigned>> DFSStack;
  1536. unsigned AdjacentBlockIndex = 0;
  1537. BasicBlock *CurrBB, *SuccBB;
  1538. CurrBB = REntry;
  1539. // Initialize the map for all BB with WHITE color.
  1540. for (auto *BB : R.blocks())
  1541. BBColorMap[BB] = WHITE;
  1542. // Process the entry block of the Region.
  1543. BBColorMap[CurrBB] = GREY;
  1544. DFSStack.push(std::make_pair(CurrBB, 0));
  1545. while (!DFSStack.empty()) {
  1546. // Get next BB on stack to be processed.
  1547. CurrBB = DFSStack.top().first;
  1548. AdjacentBlockIndex = DFSStack.top().second;
  1549. DFSStack.pop();
  1550. // Loop to iterate over the successors of current BB.
  1551. const Instruction *TInst = CurrBB->getTerminator();
  1552. unsigned NSucc = TInst->getNumSuccessors();
  1553. for (unsigned I = AdjacentBlockIndex; I < NSucc;
  1554. ++I, ++AdjacentBlockIndex) {
  1555. SuccBB = TInst->getSuccessor(I);
  1556. // Checks for region exit block and self-loops in BB.
  1557. if (SuccBB == RExit || SuccBB == CurrBB)
  1558. continue;
  1559. // WHITE indicates an unvisited BB in DFS walk.
  1560. if (BBColorMap[SuccBB] == WHITE) {
  1561. // Push the current BB and the index of the next child to be visited.
  1562. DFSStack.push(std::make_pair(CurrBB, I + 1));
  1563. // Push the next BB to be processed.
  1564. DFSStack.push(std::make_pair(SuccBB, 0));
  1565. // First time the BB is being processed.
  1566. BBColorMap[SuccBB] = GREY;
  1567. break;
  1568. } else if (BBColorMap[SuccBB] == GREY) {
  1569. // GREY indicates a loop in the control flow.
  1570. // If the destination dominates the source, it is a natural loop
  1571. // else, an irreducible control flow in the region is detected.
  1572. if (!DT.dominates(SuccBB, CurrBB)) {
  1573. // Get debug info of instruction which causes irregular control flow.
  1574. DbgLoc = TInst->getDebugLoc();
  1575. return false;
  1576. }
  1577. }
  1578. }
  1579. // If all children of current BB have been processed,
  1580. // then mark that BB as fully processed.
  1581. if (AdjacentBlockIndex == NSucc)
  1582. BBColorMap[CurrBB] = BLACK;
  1583. }
  1584. return true;
  1585. }
  1586. static void updateLoopCountStatistic(ScopDetection::LoopStats Stats,
  1587. bool OnlyProfitable) {
  1588. if (!OnlyProfitable) {
  1589. NumLoopsInScop += Stats.NumLoops;
  1590. MaxNumLoopsInScop =
  1591. std::max(MaxNumLoopsInScop.getValue(), (uint64_t)Stats.NumLoops);
  1592. if (Stats.MaxDepth == 0)
  1593. NumScopsDepthZero++;
  1594. else if (Stats.MaxDepth == 1)
  1595. NumScopsDepthOne++;
  1596. else if (Stats.MaxDepth == 2)
  1597. NumScopsDepthTwo++;
  1598. else if (Stats.MaxDepth == 3)
  1599. NumScopsDepthThree++;
  1600. else if (Stats.MaxDepth == 4)
  1601. NumScopsDepthFour++;
  1602. else if (Stats.MaxDepth == 5)
  1603. NumScopsDepthFive++;
  1604. else
  1605. NumScopsDepthLarger++;
  1606. } else {
  1607. NumLoopsInProfScop += Stats.NumLoops;
  1608. MaxNumLoopsInProfScop =
  1609. std::max(MaxNumLoopsInProfScop.getValue(), (uint64_t)Stats.NumLoops);
  1610. if (Stats.MaxDepth == 0)
  1611. NumProfScopsDepthZero++;
  1612. else if (Stats.MaxDepth == 1)
  1613. NumProfScopsDepthOne++;
  1614. else if (Stats.MaxDepth == 2)
  1615. NumProfScopsDepthTwo++;
  1616. else if (Stats.MaxDepth == 3)
  1617. NumProfScopsDepthThree++;
  1618. else if (Stats.MaxDepth == 4)
  1619. NumProfScopsDepthFour++;
  1620. else if (Stats.MaxDepth == 5)
  1621. NumProfScopsDepthFive++;
  1622. else
  1623. NumProfScopsDepthLarger++;
  1624. }
  1625. }
  1626. ScopDetection::DetectionContext *
  1627. ScopDetection::getDetectionContext(const Region *R) const {
  1628. auto DCMIt = DetectionContextMap.find(getBBPairForRegion(R));
  1629. if (DCMIt == DetectionContextMap.end())
  1630. return nullptr;
  1631. return DCMIt->second.get();
  1632. }
  1633. const RejectLog *ScopDetection::lookupRejectionLog(const Region *R) const {
  1634. const DetectionContext *DC = getDetectionContext(R);
  1635. return DC ? &DC->Log : nullptr;
  1636. }
  1637. void ScopDetection::verifyRegion(const Region &R) {
  1638. assert(isMaxRegionInScop(R) && "Expect R is a valid region.");
  1639. DetectionContext Context(const_cast<Region &>(R), AA, true /*verifying*/);
  1640. isValidRegion(Context);
  1641. }
  1642. void ScopDetection::verifyAnalysis() {
  1643. if (!VerifyScops)
  1644. return;
  1645. for (const Region *R : ValidRegions)
  1646. verifyRegion(*R);
  1647. }
  1648. bool ScopDetectionWrapperPass::runOnFunction(Function &F) {
  1649. auto &LI = getAnalysis<LoopInfoWrapperPass>().getLoopInfo();
  1650. auto &RI = getAnalysis<RegionInfoPass>().getRegionInfo();
  1651. auto &AA = getAnalysis<AAResultsWrapperPass>().getAAResults();
  1652. auto &SE = getAnalysis<ScalarEvolutionWrapperPass>().getSE();
  1653. auto &DT = getAnalysis<DominatorTreeWrapperPass>().getDomTree();
  1654. auto &ORE = getAnalysis<OptimizationRemarkEmitterWrapperPass>().getORE();
  1655. Result = std::make_unique<ScopDetection>(DT, SE, LI, RI, AA, ORE);
  1656. Result->detect(F);
  1657. return false;
  1658. }
  1659. void ScopDetectionWrapperPass::getAnalysisUsage(AnalysisUsage &AU) const {
  1660. AU.addRequired<LoopInfoWrapperPass>();
  1661. AU.addRequiredTransitive<ScalarEvolutionWrapperPass>();
  1662. AU.addRequired<DominatorTreeWrapperPass>();
  1663. AU.addRequired<OptimizationRemarkEmitterWrapperPass>();
  1664. // We also need AA and RegionInfo when we are verifying analysis.
  1665. AU.addRequiredTransitive<AAResultsWrapperPass>();
  1666. AU.addRequiredTransitive<RegionInfoPass>();
  1667. AU.setPreservesAll();
  1668. }
  1669. void ScopDetectionWrapperPass::print(raw_ostream &OS, const Module *) const {
  1670. for (const Region *R : Result->ValidRegions)
  1671. OS << "Valid Region for Scop: " << R->getNameStr() << '\n';
  1672. OS << "\n";
  1673. }
  1674. ScopDetectionWrapperPass::ScopDetectionWrapperPass() : FunctionPass(ID) {
  1675. // Disable runtime alias checks if we ignore aliasing all together.
  1676. if (IgnoreAliasing)
  1677. PollyUseRuntimeAliasChecks = false;
  1678. }
  1679. ScopAnalysis::ScopAnalysis() {
  1680. // Disable runtime alias checks if we ignore aliasing all together.
  1681. if (IgnoreAliasing)
  1682. PollyUseRuntimeAliasChecks = false;
  1683. }
  1684. void ScopDetectionWrapperPass::releaseMemory() { Result.reset(); }
  1685. char ScopDetectionWrapperPass::ID;
  1686. AnalysisKey ScopAnalysis::Key;
  1687. ScopDetection ScopAnalysis::run(Function &F, FunctionAnalysisManager &FAM) {
  1688. auto &LI = FAM.getResult<LoopAnalysis>(F);
  1689. auto &RI = FAM.getResult<RegionInfoAnalysis>(F);
  1690. auto &AA = FAM.getResult<AAManager>(F);
  1691. auto &SE = FAM.getResult<ScalarEvolutionAnalysis>(F);
  1692. auto &DT = FAM.getResult<DominatorTreeAnalysis>(F);
  1693. auto &ORE = FAM.getResult<OptimizationRemarkEmitterAnalysis>(F);
  1694. ScopDetection Result(DT, SE, LI, RI, AA, ORE);
  1695. Result.detect(F);
  1696. return Result;
  1697. }
  1698. PreservedAnalyses ScopAnalysisPrinterPass::run(Function &F,
  1699. FunctionAnalysisManager &FAM) {
  1700. OS << "Detected Scops in Function " << F.getName() << "\n";
  1701. auto &SD = FAM.getResult<ScopAnalysis>(F);
  1702. for (const Region *R : SD.ValidRegions)
  1703. OS << "Valid Region for Scop: " << R->getNameStr() << '\n';
  1704. OS << "\n";
  1705. return PreservedAnalyses::all();
  1706. }
  1707. Pass *polly::createScopDetectionWrapperPassPass() {
  1708. return new ScopDetectionWrapperPass();
  1709. }
  1710. INITIALIZE_PASS_BEGIN(ScopDetectionWrapperPass, "polly-detect",
  1711. "Polly - Detect static control parts (SCoPs)", false,
  1712. false);
  1713. INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass);
  1714. INITIALIZE_PASS_DEPENDENCY(LoopInfoWrapperPass);
  1715. INITIALIZE_PASS_DEPENDENCY(RegionInfoPass);
  1716. INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass);
  1717. INITIALIZE_PASS_DEPENDENCY(ScalarEvolutionWrapperPass);
  1718. INITIALIZE_PASS_DEPENDENCY(OptimizationRemarkEmitterWrapperPass);
  1719. INITIALIZE_PASS_END(ScopDetectionWrapperPass, "polly-detect",
  1720. "Polly - Detect static control parts (SCoPs)", false, false)
  1721. //===----------------------------------------------------------------------===//
  1722. namespace {
  1723. /// Print result from ScopDetectionWrapperPass.
  1724. class ScopDetectionPrinterLegacyPass final : public FunctionPass {
  1725. public:
  1726. static char ID;
  1727. ScopDetectionPrinterLegacyPass() : ScopDetectionPrinterLegacyPass(outs()) {}
  1728. explicit ScopDetectionPrinterLegacyPass(llvm::raw_ostream &OS)
  1729. : FunctionPass(ID), OS(OS) {}
  1730. bool runOnFunction(Function &F) override {
  1731. ScopDetectionWrapperPass &P = getAnalysis<ScopDetectionWrapperPass>();
  1732. OS << "Printing analysis '" << P.getPassName() << "' for function '"
  1733. << F.getName() << "':\n";
  1734. P.print(OS);
  1735. return false;
  1736. }
  1737. void getAnalysisUsage(AnalysisUsage &AU) const override {
  1738. FunctionPass::getAnalysisUsage(AU);
  1739. AU.addRequired<ScopDetectionWrapperPass>();
  1740. AU.setPreservesAll();
  1741. }
  1742. private:
  1743. llvm::raw_ostream &OS;
  1744. };
  1745. char ScopDetectionPrinterLegacyPass::ID = 0;
  1746. } // namespace
  1747. Pass *polly::createScopDetectionPrinterLegacyPass(raw_ostream &OS) {
  1748. return new ScopDetectionPrinterLegacyPass(OS);
  1749. }
  1750. INITIALIZE_PASS_BEGIN(ScopDetectionPrinterLegacyPass, "polly-print-detect",
  1751. "Polly - Print static control parts (SCoPs)", false,
  1752. false);
  1753. INITIALIZE_PASS_DEPENDENCY(ScopDetectionWrapperPass);
  1754. INITIALIZE_PASS_END(ScopDetectionPrinterLegacyPass, "polly-print-detect",
  1755. "Polly - Print static control parts (SCoPs)", false, false)