VPlan.h 98 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718
  1. //===- VPlan.h - Represent A Vectorizer Plan --------------------*- C++ -*-===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. /// \file
  10. /// This file contains the declarations of the Vectorization Plan base classes:
  11. /// 1. VPBasicBlock and VPRegionBlock that inherit from a common pure virtual
  12. /// VPBlockBase, together implementing a Hierarchical CFG;
  13. /// 2. Pure virtual VPRecipeBase serving as the base class for recipes contained
  14. /// within VPBasicBlocks;
  15. /// 3. VPInstruction, a concrete Recipe and VPUser modeling a single planned
  16. /// instruction;
  17. /// 4. The VPlan class holding a candidate for vectorization;
  18. /// 5. The VPlanPrinter class providing a way to print a plan in dot format;
  19. /// These are documented in docs/VectorizationPlan.rst.
  20. //
  21. //===----------------------------------------------------------------------===//
  22. #ifndef LLVM_TRANSFORMS_VECTORIZE_VPLAN_H
  23. #define LLVM_TRANSFORMS_VECTORIZE_VPLAN_H
  24. #include "VPlanValue.h"
  25. #include "llvm/ADT/DenseMap.h"
  26. #include "llvm/ADT/DepthFirstIterator.h"
  27. #include "llvm/ADT/MapVector.h"
  28. #include "llvm/ADT/SmallBitVector.h"
  29. #include "llvm/ADT/SmallPtrSet.h"
  30. #include "llvm/ADT/SmallVector.h"
  31. #include "llvm/ADT/Twine.h"
  32. #include "llvm/ADT/ilist.h"
  33. #include "llvm/ADT/ilist_node.h"
  34. #include "llvm/Analysis/LoopInfo.h"
  35. #include "llvm/Analysis/VectorUtils.h"
  36. #include "llvm/IR/DebugLoc.h"
  37. #include "llvm/IR/FMF.h"
  38. #include "llvm/Transforms/Utils/LoopVersioning.h"
  39. #include <algorithm>
  40. #include <cassert>
  41. #include <cstddef>
  42. #include <string>
  43. namespace llvm {
  44. class BasicBlock;
  45. class DominatorTree;
  46. class InductionDescriptor;
  47. class InnerLoopVectorizer;
  48. class IRBuilderBase;
  49. class LoopInfo;
  50. class PredicateScalarEvolution;
  51. class raw_ostream;
  52. class RecurrenceDescriptor;
  53. class SCEV;
  54. class Type;
  55. class VPBasicBlock;
  56. class VPRegionBlock;
  57. class VPlan;
  58. class VPReplicateRecipe;
  59. class VPlanSlp;
  60. class Value;
  61. namespace Intrinsic {
  62. typedef unsigned ID;
  63. }
  64. /// Returns a calculation for the total number of elements for a given \p VF.
  65. /// For fixed width vectors this value is a constant, whereas for scalable
  66. /// vectors it is an expression determined at runtime.
  67. Value *getRuntimeVF(IRBuilderBase &B, Type *Ty, ElementCount VF);
  68. /// Return a value for Step multiplied by VF.
  69. Value *createStepForVF(IRBuilderBase &B, Type *Ty, ElementCount VF,
  70. int64_t Step);
  71. const SCEV *createTripCountSCEV(Type *IdxTy, PredicatedScalarEvolution &PSE);
  72. /// A range of powers-of-2 vectorization factors with fixed start and
  73. /// adjustable end. The range includes start and excludes end, e.g.,:
  74. /// [1, 9) = {1, 2, 4, 8}
  75. struct VFRange {
  76. // A power of 2.
  77. const ElementCount Start;
  78. // Need not be a power of 2. If End <= Start range is empty.
  79. ElementCount End;
  80. bool isEmpty() const {
  81. return End.getKnownMinValue() <= Start.getKnownMinValue();
  82. }
  83. VFRange(const ElementCount &Start, const ElementCount &End)
  84. : Start(Start), End(End) {
  85. assert(Start.isScalable() == End.isScalable() &&
  86. "Both Start and End should have the same scalable flag");
  87. assert(isPowerOf2_32(Start.getKnownMinValue()) &&
  88. "Expected Start to be a power of 2");
  89. }
  90. };
  91. using VPlanPtr = std::unique_ptr<VPlan>;
  92. /// In what follows, the term "input IR" refers to code that is fed into the
  93. /// vectorizer whereas the term "output IR" refers to code that is generated by
  94. /// the vectorizer.
  95. /// VPLane provides a way to access lanes in both fixed width and scalable
  96. /// vectors, where for the latter the lane index sometimes needs calculating
  97. /// as a runtime expression.
  98. class VPLane {
  99. public:
  100. /// Kind describes how to interpret Lane.
  101. enum class Kind : uint8_t {
  102. /// For First, Lane is the index into the first N elements of a
  103. /// fixed-vector <N x <ElTy>> or a scalable vector <vscale x N x <ElTy>>.
  104. First,
  105. /// For ScalableLast, Lane is the offset from the start of the last
  106. /// N-element subvector in a scalable vector <vscale x N x <ElTy>>. For
  107. /// example, a Lane of 0 corresponds to lane `(vscale - 1) * N`, a Lane of
  108. /// 1 corresponds to `((vscale - 1) * N) + 1`, etc.
  109. ScalableLast
  110. };
  111. private:
  112. /// in [0..VF)
  113. unsigned Lane;
  114. /// Indicates how the Lane should be interpreted, as described above.
  115. Kind LaneKind;
  116. public:
  117. VPLane(unsigned Lane, Kind LaneKind) : Lane(Lane), LaneKind(LaneKind) {}
  118. static VPLane getFirstLane() { return VPLane(0, VPLane::Kind::First); }
  119. static VPLane getLastLaneForVF(const ElementCount &VF) {
  120. unsigned LaneOffset = VF.getKnownMinValue() - 1;
  121. Kind LaneKind;
  122. if (VF.isScalable())
  123. // In this case 'LaneOffset' refers to the offset from the start of the
  124. // last subvector with VF.getKnownMinValue() elements.
  125. LaneKind = VPLane::Kind::ScalableLast;
  126. else
  127. LaneKind = VPLane::Kind::First;
  128. return VPLane(LaneOffset, LaneKind);
  129. }
  130. /// Returns a compile-time known value for the lane index and asserts if the
  131. /// lane can only be calculated at runtime.
  132. unsigned getKnownLane() const {
  133. assert(LaneKind == Kind::First);
  134. return Lane;
  135. }
  136. /// Returns an expression describing the lane index that can be used at
  137. /// runtime.
  138. Value *getAsRuntimeExpr(IRBuilderBase &Builder, const ElementCount &VF) const;
  139. /// Returns the Kind of lane offset.
  140. Kind getKind() const { return LaneKind; }
  141. /// Returns true if this is the first lane of the whole vector.
  142. bool isFirstLane() const { return Lane == 0 && LaneKind == Kind::First; }
  143. /// Maps the lane to a cache index based on \p VF.
  144. unsigned mapToCacheIndex(const ElementCount &VF) const {
  145. switch (LaneKind) {
  146. case VPLane::Kind::ScalableLast:
  147. assert(VF.isScalable() && Lane < VF.getKnownMinValue());
  148. return VF.getKnownMinValue() + Lane;
  149. default:
  150. assert(Lane < VF.getKnownMinValue());
  151. return Lane;
  152. }
  153. }
  154. /// Returns the maxmimum number of lanes that we are able to consider
  155. /// caching for \p VF.
  156. static unsigned getNumCachedLanes(const ElementCount &VF) {
  157. return VF.getKnownMinValue() * (VF.isScalable() ? 2 : 1);
  158. }
  159. };
  160. /// VPIteration represents a single point in the iteration space of the output
  161. /// (vectorized and/or unrolled) IR loop.
  162. struct VPIteration {
  163. /// in [0..UF)
  164. unsigned Part;
  165. VPLane Lane;
  166. VPIteration(unsigned Part, unsigned Lane,
  167. VPLane::Kind Kind = VPLane::Kind::First)
  168. : Part(Part), Lane(Lane, Kind) {}
  169. VPIteration(unsigned Part, const VPLane &Lane) : Part(Part), Lane(Lane) {}
  170. bool isFirstIteration() const { return Part == 0 && Lane.isFirstLane(); }
  171. };
  172. /// VPTransformState holds information passed down when "executing" a VPlan,
  173. /// needed for generating the output IR.
  174. struct VPTransformState {
  175. VPTransformState(ElementCount VF, unsigned UF, LoopInfo *LI,
  176. DominatorTree *DT, IRBuilderBase &Builder,
  177. InnerLoopVectorizer *ILV, VPlan *Plan)
  178. : VF(VF), UF(UF), LI(LI), DT(DT), Builder(Builder), ILV(ILV), Plan(Plan),
  179. LVer(nullptr) {}
  180. /// The chosen Vectorization and Unroll Factors of the loop being vectorized.
  181. ElementCount VF;
  182. unsigned UF;
  183. /// Hold the indices to generate specific scalar instructions. Null indicates
  184. /// that all instances are to be generated, using either scalar or vector
  185. /// instructions.
  186. std::optional<VPIteration> Instance;
  187. struct DataState {
  188. /// A type for vectorized values in the new loop. Each value from the
  189. /// original loop, when vectorized, is represented by UF vector values in
  190. /// the new unrolled loop, where UF is the unroll factor.
  191. typedef SmallVector<Value *, 2> PerPartValuesTy;
  192. DenseMap<VPValue *, PerPartValuesTy> PerPartOutput;
  193. using ScalarsPerPartValuesTy = SmallVector<SmallVector<Value *, 4>, 2>;
  194. DenseMap<VPValue *, ScalarsPerPartValuesTy> PerPartScalars;
  195. } Data;
  196. /// Get the generated Value for a given VPValue and a given Part. Note that
  197. /// as some Defs are still created by ILV and managed in its ValueMap, this
  198. /// method will delegate the call to ILV in such cases in order to provide
  199. /// callers a consistent API.
  200. /// \see set.
  201. Value *get(VPValue *Def, unsigned Part);
  202. /// Get the generated Value for a given VPValue and given Part and Lane.
  203. Value *get(VPValue *Def, const VPIteration &Instance);
  204. bool hasVectorValue(VPValue *Def, unsigned Part) {
  205. auto I = Data.PerPartOutput.find(Def);
  206. return I != Data.PerPartOutput.end() && Part < I->second.size() &&
  207. I->second[Part];
  208. }
  209. bool hasAnyVectorValue(VPValue *Def) const {
  210. return Data.PerPartOutput.find(Def) != Data.PerPartOutput.end();
  211. }
  212. bool hasScalarValue(VPValue *Def, VPIteration Instance) {
  213. auto I = Data.PerPartScalars.find(Def);
  214. if (I == Data.PerPartScalars.end())
  215. return false;
  216. unsigned CacheIdx = Instance.Lane.mapToCacheIndex(VF);
  217. return Instance.Part < I->second.size() &&
  218. CacheIdx < I->second[Instance.Part].size() &&
  219. I->second[Instance.Part][CacheIdx];
  220. }
  221. /// Set the generated Value for a given VPValue and a given Part.
  222. void set(VPValue *Def, Value *V, unsigned Part) {
  223. if (!Data.PerPartOutput.count(Def)) {
  224. DataState::PerPartValuesTy Entry(UF);
  225. Data.PerPartOutput[Def] = Entry;
  226. }
  227. Data.PerPartOutput[Def][Part] = V;
  228. }
  229. /// Reset an existing vector value for \p Def and a given \p Part.
  230. void reset(VPValue *Def, Value *V, unsigned Part) {
  231. auto Iter = Data.PerPartOutput.find(Def);
  232. assert(Iter != Data.PerPartOutput.end() &&
  233. "need to overwrite existing value");
  234. Iter->second[Part] = V;
  235. }
  236. /// Set the generated scalar \p V for \p Def and the given \p Instance.
  237. void set(VPValue *Def, Value *V, const VPIteration &Instance) {
  238. auto Iter = Data.PerPartScalars.insert({Def, {}});
  239. auto &PerPartVec = Iter.first->second;
  240. while (PerPartVec.size() <= Instance.Part)
  241. PerPartVec.emplace_back();
  242. auto &Scalars = PerPartVec[Instance.Part];
  243. unsigned CacheIdx = Instance.Lane.mapToCacheIndex(VF);
  244. while (Scalars.size() <= CacheIdx)
  245. Scalars.push_back(nullptr);
  246. assert(!Scalars[CacheIdx] && "should overwrite existing value");
  247. Scalars[CacheIdx] = V;
  248. }
  249. /// Reset an existing scalar value for \p Def and a given \p Instance.
  250. void reset(VPValue *Def, Value *V, const VPIteration &Instance) {
  251. auto Iter = Data.PerPartScalars.find(Def);
  252. assert(Iter != Data.PerPartScalars.end() &&
  253. "need to overwrite existing value");
  254. assert(Instance.Part < Iter->second.size() &&
  255. "need to overwrite existing value");
  256. unsigned CacheIdx = Instance.Lane.mapToCacheIndex(VF);
  257. assert(CacheIdx < Iter->second[Instance.Part].size() &&
  258. "need to overwrite existing value");
  259. Iter->second[Instance.Part][CacheIdx] = V;
  260. }
  261. /// Add additional metadata to \p To that was not present on \p Orig.
  262. ///
  263. /// Currently this is used to add the noalias annotations based on the
  264. /// inserted memchecks. Use this for instructions that are *cloned* into the
  265. /// vector loop.
  266. void addNewMetadata(Instruction *To, const Instruction *Orig);
  267. /// Add metadata from one instruction to another.
  268. ///
  269. /// This includes both the original MDs from \p From and additional ones (\see
  270. /// addNewMetadata). Use this for *newly created* instructions in the vector
  271. /// loop.
  272. void addMetadata(Instruction *To, Instruction *From);
  273. /// Similar to the previous function but it adds the metadata to a
  274. /// vector of instructions.
  275. void addMetadata(ArrayRef<Value *> To, Instruction *From);
  276. /// Set the debug location in the builder using the debug location in \p V.
  277. void setDebugLocFromInst(const Value *V);
  278. /// Hold state information used when constructing the CFG of the output IR,
  279. /// traversing the VPBasicBlocks and generating corresponding IR BasicBlocks.
  280. struct CFGState {
  281. /// The previous VPBasicBlock visited. Initially set to null.
  282. VPBasicBlock *PrevVPBB = nullptr;
  283. /// The previous IR BasicBlock created or used. Initially set to the new
  284. /// header BasicBlock.
  285. BasicBlock *PrevBB = nullptr;
  286. /// The last IR BasicBlock in the output IR. Set to the exit block of the
  287. /// vector loop.
  288. BasicBlock *ExitBB = nullptr;
  289. /// A mapping of each VPBasicBlock to the corresponding BasicBlock. In case
  290. /// of replication, maps the BasicBlock of the last replica created.
  291. SmallDenseMap<VPBasicBlock *, BasicBlock *> VPBB2IRBB;
  292. CFGState() = default;
  293. /// Returns the BasicBlock* mapped to the pre-header of the loop region
  294. /// containing \p R.
  295. BasicBlock *getPreheaderBBFor(VPRecipeBase *R);
  296. } CFG;
  297. /// Hold a pointer to LoopInfo to register new basic blocks in the loop.
  298. LoopInfo *LI;
  299. /// Hold a pointer to Dominator Tree to register new basic blocks in the loop.
  300. DominatorTree *DT;
  301. /// Hold a reference to the IRBuilder used to generate output IR code.
  302. IRBuilderBase &Builder;
  303. VPValue2ValueTy VPValue2Value;
  304. /// Hold the canonical scalar IV of the vector loop (start=0, step=VF*UF).
  305. Value *CanonicalIV = nullptr;
  306. /// Hold a pointer to InnerLoopVectorizer to reuse its IR generation methods.
  307. InnerLoopVectorizer *ILV;
  308. /// Pointer to the VPlan code is generated for.
  309. VPlan *Plan;
  310. /// Holds recipes that may generate a poison value that is used after
  311. /// vectorization, even when their operands are not poison.
  312. SmallPtrSet<VPRecipeBase *, 16> MayGeneratePoisonRecipes;
  313. /// The loop object for the current parent region, or nullptr.
  314. Loop *CurrentVectorLoop = nullptr;
  315. /// LoopVersioning. It's only set up (non-null) if memchecks were
  316. /// used.
  317. ///
  318. /// This is currently only used to add no-alias metadata based on the
  319. /// memchecks. The actually versioning is performed manually.
  320. std::unique_ptr<LoopVersioning> LVer;
  321. };
  322. /// VPBlockBase is the building block of the Hierarchical Control-Flow Graph.
  323. /// A VPBlockBase can be either a VPBasicBlock or a VPRegionBlock.
  324. class VPBlockBase {
  325. friend class VPBlockUtils;
  326. const unsigned char SubclassID; ///< Subclass identifier (for isa/dyn_cast).
  327. /// An optional name for the block.
  328. std::string Name;
  329. /// The immediate VPRegionBlock which this VPBlockBase belongs to, or null if
  330. /// it is a topmost VPBlockBase.
  331. VPRegionBlock *Parent = nullptr;
  332. /// List of predecessor blocks.
  333. SmallVector<VPBlockBase *, 1> Predecessors;
  334. /// List of successor blocks.
  335. SmallVector<VPBlockBase *, 1> Successors;
  336. /// VPlan containing the block. Can only be set on the entry block of the
  337. /// plan.
  338. VPlan *Plan = nullptr;
  339. /// Add \p Successor as the last successor to this block.
  340. void appendSuccessor(VPBlockBase *Successor) {
  341. assert(Successor && "Cannot add nullptr successor!");
  342. Successors.push_back(Successor);
  343. }
  344. /// Add \p Predecessor as the last predecessor to this block.
  345. void appendPredecessor(VPBlockBase *Predecessor) {
  346. assert(Predecessor && "Cannot add nullptr predecessor!");
  347. Predecessors.push_back(Predecessor);
  348. }
  349. /// Remove \p Predecessor from the predecessors of this block.
  350. void removePredecessor(VPBlockBase *Predecessor) {
  351. auto Pos = find(Predecessors, Predecessor);
  352. assert(Pos && "Predecessor does not exist");
  353. Predecessors.erase(Pos);
  354. }
  355. /// Remove \p Successor from the successors of this block.
  356. void removeSuccessor(VPBlockBase *Successor) {
  357. auto Pos = find(Successors, Successor);
  358. assert(Pos && "Successor does not exist");
  359. Successors.erase(Pos);
  360. }
  361. protected:
  362. VPBlockBase(const unsigned char SC, const std::string &N)
  363. : SubclassID(SC), Name(N) {}
  364. public:
  365. /// An enumeration for keeping track of the concrete subclass of VPBlockBase
  366. /// that are actually instantiated. Values of this enumeration are kept in the
  367. /// SubclassID field of the VPBlockBase objects. They are used for concrete
  368. /// type identification.
  369. using VPBlockTy = enum { VPBasicBlockSC, VPRegionBlockSC };
  370. using VPBlocksTy = SmallVectorImpl<VPBlockBase *>;
  371. virtual ~VPBlockBase() = default;
  372. const std::string &getName() const { return Name; }
  373. void setName(const Twine &newName) { Name = newName.str(); }
  374. /// \return an ID for the concrete type of this object.
  375. /// This is used to implement the classof checks. This should not be used
  376. /// for any other purpose, as the values may change as LLVM evolves.
  377. unsigned getVPBlockID() const { return SubclassID; }
  378. VPRegionBlock *getParent() { return Parent; }
  379. const VPRegionBlock *getParent() const { return Parent; }
  380. /// \return A pointer to the plan containing the current block.
  381. VPlan *getPlan();
  382. const VPlan *getPlan() const;
  383. /// Sets the pointer of the plan containing the block. The block must be the
  384. /// entry block into the VPlan.
  385. void setPlan(VPlan *ParentPlan);
  386. void setParent(VPRegionBlock *P) { Parent = P; }
  387. /// \return the VPBasicBlock that is the entry of this VPBlockBase,
  388. /// recursively, if the latter is a VPRegionBlock. Otherwise, if this
  389. /// VPBlockBase is a VPBasicBlock, it is returned.
  390. const VPBasicBlock *getEntryBasicBlock() const;
  391. VPBasicBlock *getEntryBasicBlock();
  392. /// \return the VPBasicBlock that is the exiting this VPBlockBase,
  393. /// recursively, if the latter is a VPRegionBlock. Otherwise, if this
  394. /// VPBlockBase is a VPBasicBlock, it is returned.
  395. const VPBasicBlock *getExitingBasicBlock() const;
  396. VPBasicBlock *getExitingBasicBlock();
  397. const VPBlocksTy &getSuccessors() const { return Successors; }
  398. VPBlocksTy &getSuccessors() { return Successors; }
  399. iterator_range<VPBlockBase **> successors() { return Successors; }
  400. const VPBlocksTy &getPredecessors() const { return Predecessors; }
  401. VPBlocksTy &getPredecessors() { return Predecessors; }
  402. /// \return the successor of this VPBlockBase if it has a single successor.
  403. /// Otherwise return a null pointer.
  404. VPBlockBase *getSingleSuccessor() const {
  405. return (Successors.size() == 1 ? *Successors.begin() : nullptr);
  406. }
  407. /// \return the predecessor of this VPBlockBase if it has a single
  408. /// predecessor. Otherwise return a null pointer.
  409. VPBlockBase *getSinglePredecessor() const {
  410. return (Predecessors.size() == 1 ? *Predecessors.begin() : nullptr);
  411. }
  412. size_t getNumSuccessors() const { return Successors.size(); }
  413. size_t getNumPredecessors() const { return Predecessors.size(); }
  414. /// An Enclosing Block of a block B is any block containing B, including B
  415. /// itself. \return the closest enclosing block starting from "this", which
  416. /// has successors. \return the root enclosing block if all enclosing blocks
  417. /// have no successors.
  418. VPBlockBase *getEnclosingBlockWithSuccessors();
  419. /// \return the closest enclosing block starting from "this", which has
  420. /// predecessors. \return the root enclosing block if all enclosing blocks
  421. /// have no predecessors.
  422. VPBlockBase *getEnclosingBlockWithPredecessors();
  423. /// \return the successors either attached directly to this VPBlockBase or, if
  424. /// this VPBlockBase is the exit block of a VPRegionBlock and has no
  425. /// successors of its own, search recursively for the first enclosing
  426. /// VPRegionBlock that has successors and return them. If no such
  427. /// VPRegionBlock exists, return the (empty) successors of the topmost
  428. /// VPBlockBase reached.
  429. const VPBlocksTy &getHierarchicalSuccessors() {
  430. return getEnclosingBlockWithSuccessors()->getSuccessors();
  431. }
  432. /// \return the hierarchical successor of this VPBlockBase if it has a single
  433. /// hierarchical successor. Otherwise return a null pointer.
  434. VPBlockBase *getSingleHierarchicalSuccessor() {
  435. return getEnclosingBlockWithSuccessors()->getSingleSuccessor();
  436. }
  437. /// \return the predecessors either attached directly to this VPBlockBase or,
  438. /// if this VPBlockBase is the entry block of a VPRegionBlock and has no
  439. /// predecessors of its own, search recursively for the first enclosing
  440. /// VPRegionBlock that has predecessors and return them. If no such
  441. /// VPRegionBlock exists, return the (empty) predecessors of the topmost
  442. /// VPBlockBase reached.
  443. const VPBlocksTy &getHierarchicalPredecessors() {
  444. return getEnclosingBlockWithPredecessors()->getPredecessors();
  445. }
  446. /// \return the hierarchical predecessor of this VPBlockBase if it has a
  447. /// single hierarchical predecessor. Otherwise return a null pointer.
  448. VPBlockBase *getSingleHierarchicalPredecessor() {
  449. return getEnclosingBlockWithPredecessors()->getSinglePredecessor();
  450. }
  451. /// Set a given VPBlockBase \p Successor as the single successor of this
  452. /// VPBlockBase. This VPBlockBase is not added as predecessor of \p Successor.
  453. /// This VPBlockBase must have no successors.
  454. void setOneSuccessor(VPBlockBase *Successor) {
  455. assert(Successors.empty() && "Setting one successor when others exist.");
  456. appendSuccessor(Successor);
  457. }
  458. /// Set two given VPBlockBases \p IfTrue and \p IfFalse to be the two
  459. /// successors of this VPBlockBase. This VPBlockBase is not added as
  460. /// predecessor of \p IfTrue or \p IfFalse. This VPBlockBase must have no
  461. /// successors.
  462. void setTwoSuccessors(VPBlockBase *IfTrue, VPBlockBase *IfFalse) {
  463. assert(Successors.empty() && "Setting two successors when others exist.");
  464. appendSuccessor(IfTrue);
  465. appendSuccessor(IfFalse);
  466. }
  467. /// Set each VPBasicBlock in \p NewPreds as predecessor of this VPBlockBase.
  468. /// This VPBlockBase must have no predecessors. This VPBlockBase is not added
  469. /// as successor of any VPBasicBlock in \p NewPreds.
  470. void setPredecessors(ArrayRef<VPBlockBase *> NewPreds) {
  471. assert(Predecessors.empty() && "Block predecessors already set.");
  472. for (auto *Pred : NewPreds)
  473. appendPredecessor(Pred);
  474. }
  475. /// Remove all the predecessor of this block.
  476. void clearPredecessors() { Predecessors.clear(); }
  477. /// Remove all the successors of this block.
  478. void clearSuccessors() { Successors.clear(); }
  479. /// The method which generates the output IR that correspond to this
  480. /// VPBlockBase, thereby "executing" the VPlan.
  481. virtual void execute(VPTransformState *State) = 0;
  482. /// Delete all blocks reachable from a given VPBlockBase, inclusive.
  483. static void deleteCFG(VPBlockBase *Entry);
  484. /// Return true if it is legal to hoist instructions into this block.
  485. bool isLegalToHoistInto() {
  486. // There are currently no constraints that prevent an instruction to be
  487. // hoisted into a VPBlockBase.
  488. return true;
  489. }
  490. /// Replace all operands of VPUsers in the block with \p NewValue and also
  491. /// replaces all uses of VPValues defined in the block with NewValue.
  492. virtual void dropAllReferences(VPValue *NewValue) = 0;
  493. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  494. void printAsOperand(raw_ostream &OS, bool PrintType) const {
  495. OS << getName();
  496. }
  497. /// Print plain-text dump of this VPBlockBase to \p O, prefixing all lines
  498. /// with \p Indent. \p SlotTracker is used to print unnamed VPValue's using
  499. /// consequtive numbers.
  500. ///
  501. /// Note that the numbering is applied to the whole VPlan, so printing
  502. /// individual blocks is consistent with the whole VPlan printing.
  503. virtual void print(raw_ostream &O, const Twine &Indent,
  504. VPSlotTracker &SlotTracker) const = 0;
  505. /// Print plain-text dump of this VPlan to \p O.
  506. void print(raw_ostream &O) const {
  507. VPSlotTracker SlotTracker(getPlan());
  508. print(O, "", SlotTracker);
  509. }
  510. /// Print the successors of this block to \p O, prefixing all lines with \p
  511. /// Indent.
  512. void printSuccessors(raw_ostream &O, const Twine &Indent) const;
  513. /// Dump this VPBlockBase to dbgs().
  514. LLVM_DUMP_METHOD void dump() const { print(dbgs()); }
  515. #endif
  516. };
  517. /// A value that is used outside the VPlan. The operand of the user needs to be
  518. /// added to the associated LCSSA phi node.
  519. class VPLiveOut : public VPUser {
  520. PHINode *Phi;
  521. public:
  522. VPLiveOut(PHINode *Phi, VPValue *Op)
  523. : VPUser({Op}, VPUser::VPUserID::LiveOut), Phi(Phi) {}
  524. /// Fixup the wrapped LCSSA phi node in the unique exit block. This simply
  525. /// means we need to add the appropriate incoming value from the middle
  526. /// block as exiting edges from the scalar epilogue loop (if present) are
  527. /// already in place, and we exit the vector loop exclusively to the middle
  528. /// block.
  529. void fixPhi(VPlan &Plan, VPTransformState &State);
  530. /// Returns true if the VPLiveOut uses scalars of operand \p Op.
  531. bool usesScalars(const VPValue *Op) const override {
  532. assert(is_contained(operands(), Op) &&
  533. "Op must be an operand of the recipe");
  534. return true;
  535. }
  536. PHINode *getPhi() const { return Phi; }
  537. };
  538. /// VPRecipeBase is a base class modeling a sequence of one or more output IR
  539. /// instructions. VPRecipeBase owns the the VPValues it defines through VPDef
  540. /// and is responsible for deleting its defined values. Single-value
  541. /// VPRecipeBases that also inherit from VPValue must make sure to inherit from
  542. /// VPRecipeBase before VPValue.
  543. class VPRecipeBase : public ilist_node_with_parent<VPRecipeBase, VPBasicBlock>,
  544. public VPDef,
  545. public VPUser {
  546. friend VPBasicBlock;
  547. friend class VPBlockUtils;
  548. /// Each VPRecipe belongs to a single VPBasicBlock.
  549. VPBasicBlock *Parent = nullptr;
  550. public:
  551. VPRecipeBase(const unsigned char SC, ArrayRef<VPValue *> Operands)
  552. : VPDef(SC), VPUser(Operands, VPUser::VPUserID::Recipe) {}
  553. template <typename IterT>
  554. VPRecipeBase(const unsigned char SC, iterator_range<IterT> Operands)
  555. : VPDef(SC), VPUser(Operands, VPUser::VPUserID::Recipe) {}
  556. virtual ~VPRecipeBase() = default;
  557. /// \return the VPBasicBlock which this VPRecipe belongs to.
  558. VPBasicBlock *getParent() { return Parent; }
  559. const VPBasicBlock *getParent() const { return Parent; }
  560. /// The method which generates the output IR instructions that correspond to
  561. /// this VPRecipe, thereby "executing" the VPlan.
  562. virtual void execute(VPTransformState &State) = 0;
  563. /// Insert an unlinked recipe into a basic block immediately before
  564. /// the specified recipe.
  565. void insertBefore(VPRecipeBase *InsertPos);
  566. /// Insert an unlinked recipe into \p BB immediately before the insertion
  567. /// point \p IP;
  568. void insertBefore(VPBasicBlock &BB, iplist<VPRecipeBase>::iterator IP);
  569. /// Insert an unlinked Recipe into a basic block immediately after
  570. /// the specified Recipe.
  571. void insertAfter(VPRecipeBase *InsertPos);
  572. /// Unlink this recipe from its current VPBasicBlock and insert it into
  573. /// the VPBasicBlock that MovePos lives in, right after MovePos.
  574. void moveAfter(VPRecipeBase *MovePos);
  575. /// Unlink this recipe and insert into BB before I.
  576. ///
  577. /// \pre I is a valid iterator into BB.
  578. void moveBefore(VPBasicBlock &BB, iplist<VPRecipeBase>::iterator I);
  579. /// This method unlinks 'this' from the containing basic block, but does not
  580. /// delete it.
  581. void removeFromParent();
  582. /// This method unlinks 'this' from the containing basic block and deletes it.
  583. ///
  584. /// \returns an iterator pointing to the element after the erased one
  585. iplist<VPRecipeBase>::iterator eraseFromParent();
  586. /// Returns the underlying instruction, if the recipe is a VPValue or nullptr
  587. /// otherwise.
  588. Instruction *getUnderlyingInstr() {
  589. return cast<Instruction>(getVPSingleValue()->getUnderlyingValue());
  590. }
  591. const Instruction *getUnderlyingInstr() const {
  592. return cast<Instruction>(getVPSingleValue()->getUnderlyingValue());
  593. }
  594. /// Method to support type inquiry through isa, cast, and dyn_cast.
  595. static inline bool classof(const VPDef *D) {
  596. // All VPDefs are also VPRecipeBases.
  597. return true;
  598. }
  599. static inline bool classof(const VPUser *U) {
  600. return U->getVPUserID() == VPUser::VPUserID::Recipe;
  601. }
  602. /// Returns true if the recipe may have side-effects.
  603. bool mayHaveSideEffects() const;
  604. /// Returns true for PHI-like recipes.
  605. bool isPhi() const {
  606. return getVPDefID() >= VPFirstPHISC && getVPDefID() <= VPLastPHISC;
  607. }
  608. /// Returns true if the recipe may read from memory.
  609. bool mayReadFromMemory() const;
  610. /// Returns true if the recipe may write to memory.
  611. bool mayWriteToMemory() const;
  612. /// Returns true if the recipe may read from or write to memory.
  613. bool mayReadOrWriteMemory() const {
  614. return mayReadFromMemory() || mayWriteToMemory();
  615. }
  616. };
  617. // Helper macro to define common classof implementations for recipes.
  618. #define VP_CLASSOF_IMPL(VPDefID) \
  619. static inline bool classof(const VPDef *D) { \
  620. return D->getVPDefID() == VPDefID; \
  621. } \
  622. static inline bool classof(const VPValue *V) { \
  623. auto *R = V->getDefiningRecipe(); \
  624. return R && R->getVPDefID() == VPDefID; \
  625. } \
  626. static inline bool classof(const VPUser *U) { \
  627. auto *R = dyn_cast<VPRecipeBase>(U); \
  628. return R && R->getVPDefID() == VPDefID; \
  629. } \
  630. static inline bool classof(const VPRecipeBase *R) { \
  631. return R->getVPDefID() == VPDefID; \
  632. }
  633. /// This is a concrete Recipe that models a single VPlan-level instruction.
  634. /// While as any Recipe it may generate a sequence of IR instructions when
  635. /// executed, these instructions would always form a single-def expression as
  636. /// the VPInstruction is also a single def-use vertex.
  637. class VPInstruction : public VPRecipeBase, public VPValue {
  638. friend class VPlanSlp;
  639. public:
  640. /// VPlan opcodes, extending LLVM IR with idiomatics instructions.
  641. enum {
  642. FirstOrderRecurrenceSplice =
  643. Instruction::OtherOpsEnd + 1, // Combines the incoming and previous
  644. // values of a first-order recurrence.
  645. Not,
  646. ICmpULE,
  647. SLPLoad,
  648. SLPStore,
  649. ActiveLaneMask,
  650. CanonicalIVIncrement,
  651. CanonicalIVIncrementNUW,
  652. // The next two are similar to the above, but instead increment the
  653. // canonical IV separately for each unrolled part.
  654. CanonicalIVIncrementForPart,
  655. CanonicalIVIncrementForPartNUW,
  656. BranchOnCount,
  657. BranchOnCond
  658. };
  659. private:
  660. typedef unsigned char OpcodeTy;
  661. OpcodeTy Opcode;
  662. FastMathFlags FMF;
  663. DebugLoc DL;
  664. /// An optional name that can be used for the generated IR instruction.
  665. const std::string Name;
  666. /// Utility method serving execute(): generates a single instance of the
  667. /// modeled instruction.
  668. void generateInstruction(VPTransformState &State, unsigned Part);
  669. protected:
  670. void setUnderlyingInstr(Instruction *I) { setUnderlyingValue(I); }
  671. public:
  672. VPInstruction(unsigned Opcode, ArrayRef<VPValue *> Operands, DebugLoc DL,
  673. const Twine &Name = "")
  674. : VPRecipeBase(VPDef::VPInstructionSC, Operands), VPValue(this),
  675. Opcode(Opcode), DL(DL), Name(Name.str()) {}
  676. VPInstruction(unsigned Opcode, std::initializer_list<VPValue *> Operands,
  677. DebugLoc DL = {}, const Twine &Name = "")
  678. : VPInstruction(Opcode, ArrayRef<VPValue *>(Operands), DL, Name) {}
  679. VP_CLASSOF_IMPL(VPDef::VPInstructionSC)
  680. VPInstruction *clone() const {
  681. SmallVector<VPValue *, 2> Operands(operands());
  682. return new VPInstruction(Opcode, Operands, DL, Name);
  683. }
  684. unsigned getOpcode() const { return Opcode; }
  685. /// Generate the instruction.
  686. /// TODO: We currently execute only per-part unless a specific instance is
  687. /// provided.
  688. void execute(VPTransformState &State) override;
  689. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  690. /// Print the VPInstruction to \p O.
  691. void print(raw_ostream &O, const Twine &Indent,
  692. VPSlotTracker &SlotTracker) const override;
  693. /// Print the VPInstruction to dbgs() (for debugging).
  694. LLVM_DUMP_METHOD void dump() const;
  695. #endif
  696. /// Return true if this instruction may modify memory.
  697. bool mayWriteToMemory() const {
  698. // TODO: we can use attributes of the called function to rule out memory
  699. // modifications.
  700. return Opcode == Instruction::Store || Opcode == Instruction::Call ||
  701. Opcode == Instruction::Invoke || Opcode == SLPStore;
  702. }
  703. bool hasResult() const {
  704. // CallInst may or may not have a result, depending on the called function.
  705. // Conservatively return calls have results for now.
  706. switch (getOpcode()) {
  707. case Instruction::Ret:
  708. case Instruction::Br:
  709. case Instruction::Store:
  710. case Instruction::Switch:
  711. case Instruction::IndirectBr:
  712. case Instruction::Resume:
  713. case Instruction::CatchRet:
  714. case Instruction::Unreachable:
  715. case Instruction::Fence:
  716. case Instruction::AtomicRMW:
  717. case VPInstruction::BranchOnCond:
  718. case VPInstruction::BranchOnCount:
  719. return false;
  720. default:
  721. return true;
  722. }
  723. }
  724. /// Set the fast-math flags.
  725. void setFastMathFlags(FastMathFlags FMFNew);
  726. /// Returns true if the recipe only uses the first lane of operand \p Op.
  727. bool onlyFirstLaneUsed(const VPValue *Op) const override {
  728. assert(is_contained(operands(), Op) &&
  729. "Op must be an operand of the recipe");
  730. if (getOperand(0) != Op)
  731. return false;
  732. switch (getOpcode()) {
  733. default:
  734. return false;
  735. case VPInstruction::ActiveLaneMask:
  736. case VPInstruction::CanonicalIVIncrement:
  737. case VPInstruction::CanonicalIVIncrementNUW:
  738. case VPInstruction::CanonicalIVIncrementForPart:
  739. case VPInstruction::CanonicalIVIncrementForPartNUW:
  740. case VPInstruction::BranchOnCount:
  741. return true;
  742. };
  743. llvm_unreachable("switch should return");
  744. }
  745. };
  746. /// VPWidenRecipe is a recipe for producing a copy of vector type its
  747. /// ingredient. This recipe covers most of the traditional vectorization cases
  748. /// where each ingredient transforms into a vectorized version of itself.
  749. class VPWidenRecipe : public VPRecipeBase, public VPValue {
  750. public:
  751. template <typename IterT>
  752. VPWidenRecipe(Instruction &I, iterator_range<IterT> Operands)
  753. : VPRecipeBase(VPDef::VPWidenSC, Operands), VPValue(this, &I) {}
  754. ~VPWidenRecipe() override = default;
  755. VP_CLASSOF_IMPL(VPDef::VPWidenSC)
  756. /// Produce widened copies of all Ingredients.
  757. void execute(VPTransformState &State) override;
  758. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  759. /// Print the recipe.
  760. void print(raw_ostream &O, const Twine &Indent,
  761. VPSlotTracker &SlotTracker) const override;
  762. #endif
  763. };
  764. /// A recipe for widening Call instructions.
  765. class VPWidenCallRecipe : public VPRecipeBase, public VPValue {
  766. /// ID of the vector intrinsic to call when widening the call. If set the
  767. /// Intrinsic::not_intrinsic, a library call will be used instead.
  768. Intrinsic::ID VectorIntrinsicID;
  769. public:
  770. template <typename IterT>
  771. VPWidenCallRecipe(CallInst &I, iterator_range<IterT> CallArguments,
  772. Intrinsic::ID VectorIntrinsicID)
  773. : VPRecipeBase(VPDef::VPWidenCallSC, CallArguments), VPValue(this, &I),
  774. VectorIntrinsicID(VectorIntrinsicID) {}
  775. ~VPWidenCallRecipe() override = default;
  776. VP_CLASSOF_IMPL(VPDef::VPWidenCallSC)
  777. /// Produce a widened version of the call instruction.
  778. void execute(VPTransformState &State) override;
  779. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  780. /// Print the recipe.
  781. void print(raw_ostream &O, const Twine &Indent,
  782. VPSlotTracker &SlotTracker) const override;
  783. #endif
  784. };
  785. /// A recipe for widening select instructions.
  786. class VPWidenSelectRecipe : public VPRecipeBase, public VPValue {
  787. /// Is the condition of the select loop invariant?
  788. bool InvariantCond;
  789. public:
  790. template <typename IterT>
  791. VPWidenSelectRecipe(SelectInst &I, iterator_range<IterT> Operands,
  792. bool InvariantCond)
  793. : VPRecipeBase(VPDef::VPWidenSelectSC, Operands), VPValue(this, &I),
  794. InvariantCond(InvariantCond) {}
  795. ~VPWidenSelectRecipe() override = default;
  796. VP_CLASSOF_IMPL(VPDef::VPWidenSelectSC)
  797. /// Produce a widened version of the select instruction.
  798. void execute(VPTransformState &State) override;
  799. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  800. /// Print the recipe.
  801. void print(raw_ostream &O, const Twine &Indent,
  802. VPSlotTracker &SlotTracker) const override;
  803. #endif
  804. };
  805. /// A recipe for handling GEP instructions.
  806. class VPWidenGEPRecipe : public VPRecipeBase, public VPValue {
  807. bool IsPtrLoopInvariant;
  808. SmallBitVector IsIndexLoopInvariant;
  809. public:
  810. template <typename IterT>
  811. VPWidenGEPRecipe(GetElementPtrInst *GEP, iterator_range<IterT> Operands)
  812. : VPRecipeBase(VPDef::VPWidenGEPSC, Operands), VPValue(this, GEP),
  813. IsIndexLoopInvariant(GEP->getNumIndices(), false) {}
  814. template <typename IterT>
  815. VPWidenGEPRecipe(GetElementPtrInst *GEP, iterator_range<IterT> Operands,
  816. Loop *OrigLoop)
  817. : VPRecipeBase(VPDef::VPWidenGEPSC, Operands), VPValue(this, GEP),
  818. IsIndexLoopInvariant(GEP->getNumIndices(), false) {
  819. IsPtrLoopInvariant = OrigLoop->isLoopInvariant(GEP->getPointerOperand());
  820. for (auto Index : enumerate(GEP->indices()))
  821. IsIndexLoopInvariant[Index.index()] =
  822. OrigLoop->isLoopInvariant(Index.value().get());
  823. }
  824. ~VPWidenGEPRecipe() override = default;
  825. VP_CLASSOF_IMPL(VPDef::VPWidenGEPSC)
  826. /// Generate the gep nodes.
  827. void execute(VPTransformState &State) override;
  828. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  829. /// Print the recipe.
  830. void print(raw_ostream &O, const Twine &Indent,
  831. VPSlotTracker &SlotTracker) const override;
  832. #endif
  833. };
  834. /// A recipe for handling phi nodes of integer and floating-point inductions,
  835. /// producing their vector values.
  836. class VPWidenIntOrFpInductionRecipe : public VPRecipeBase, public VPValue {
  837. PHINode *IV;
  838. const InductionDescriptor &IndDesc;
  839. bool NeedsVectorIV;
  840. public:
  841. VPWidenIntOrFpInductionRecipe(PHINode *IV, VPValue *Start, VPValue *Step,
  842. const InductionDescriptor &IndDesc,
  843. bool NeedsVectorIV)
  844. : VPRecipeBase(VPDef::VPWidenIntOrFpInductionSC, {Start, Step}),
  845. VPValue(this, IV), IV(IV), IndDesc(IndDesc),
  846. NeedsVectorIV(NeedsVectorIV) {}
  847. VPWidenIntOrFpInductionRecipe(PHINode *IV, VPValue *Start, VPValue *Step,
  848. const InductionDescriptor &IndDesc,
  849. TruncInst *Trunc, bool NeedsVectorIV)
  850. : VPRecipeBase(VPDef::VPWidenIntOrFpInductionSC, {Start, Step}),
  851. VPValue(this, Trunc), IV(IV), IndDesc(IndDesc),
  852. NeedsVectorIV(NeedsVectorIV) {}
  853. ~VPWidenIntOrFpInductionRecipe() override = default;
  854. VP_CLASSOF_IMPL(VPDef::VPWidenIntOrFpInductionSC)
  855. /// Generate the vectorized and scalarized versions of the phi node as
  856. /// needed by their users.
  857. void execute(VPTransformState &State) override;
  858. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  859. /// Print the recipe.
  860. void print(raw_ostream &O, const Twine &Indent,
  861. VPSlotTracker &SlotTracker) const override;
  862. #endif
  863. /// Returns the start value of the induction.
  864. VPValue *getStartValue() { return getOperand(0); }
  865. const VPValue *getStartValue() const { return getOperand(0); }
  866. /// Returns the step value of the induction.
  867. VPValue *getStepValue() { return getOperand(1); }
  868. const VPValue *getStepValue() const { return getOperand(1); }
  869. /// Returns the first defined value as TruncInst, if it is one or nullptr
  870. /// otherwise.
  871. TruncInst *getTruncInst() {
  872. return dyn_cast_or_null<TruncInst>(getVPValue(0)->getUnderlyingValue());
  873. }
  874. const TruncInst *getTruncInst() const {
  875. return dyn_cast_or_null<TruncInst>(getVPValue(0)->getUnderlyingValue());
  876. }
  877. PHINode *getPHINode() { return IV; }
  878. /// Returns the induction descriptor for the recipe.
  879. const InductionDescriptor &getInductionDescriptor() const { return IndDesc; }
  880. /// Returns true if the induction is canonical, i.e. starting at 0 and
  881. /// incremented by UF * VF (= the original IV is incremented by 1).
  882. bool isCanonical() const;
  883. /// Returns the scalar type of the induction.
  884. const Type *getScalarType() const {
  885. const TruncInst *TruncI = getTruncInst();
  886. return TruncI ? TruncI->getType() : IV->getType();
  887. }
  888. /// Returns true if a vector phi needs to be created for the induction.
  889. bool needsVectorIV() const { return NeedsVectorIV; }
  890. };
  891. /// A pure virtual base class for all recipes modeling header phis, including
  892. /// phis for first order recurrences, pointer inductions and reductions. The
  893. /// start value is the first operand of the recipe and the incoming value from
  894. /// the backedge is the second operand.
  895. ///
  896. /// Inductions are modeled using the following sub-classes:
  897. /// * VPCanonicalIVPHIRecipe: Canonical scalar induction of the vector loop,
  898. /// starting at a specified value (zero for the main vector loop, the resume
  899. /// value for the epilogue vector loop) and stepping by 1. The induction
  900. /// controls exiting of the vector loop by comparing against the vector trip
  901. /// count. Produces a single scalar PHI for the induction value per
  902. /// iteration.
  903. /// * VPWidenIntOrFpInductionRecipe: Generates vector values for integer and
  904. /// floating point inductions with arbitrary start and step values. Produces
  905. /// a vector PHI per-part.
  906. /// * VPDerivedIVRecipe: Converts the canonical IV value to the corresponding
  907. /// value of an IV with different start and step values. Produces a single
  908. /// scalar value per iteration
  909. /// * VPScalarIVStepsRecipe: Generates scalar values per-lane based on a
  910. /// canonical or derived induction.
  911. /// * VPWidenPointerInductionRecipe: Generate vector and scalar values for a
  912. /// pointer induction. Produces either a vector PHI per-part or scalar values
  913. /// per-lane based on the canonical induction.
  914. class VPHeaderPHIRecipe : public VPRecipeBase, public VPValue {
  915. protected:
  916. VPHeaderPHIRecipe(unsigned char VPDefID, PHINode *Phi,
  917. VPValue *Start = nullptr)
  918. : VPRecipeBase(VPDefID, {}), VPValue(this, Phi) {
  919. if (Start)
  920. addOperand(Start);
  921. }
  922. public:
  923. ~VPHeaderPHIRecipe() override = default;
  924. /// Method to support type inquiry through isa, cast, and dyn_cast.
  925. static inline bool classof(const VPRecipeBase *B) {
  926. return B->getVPDefID() >= VPDef::VPFirstHeaderPHISC &&
  927. B->getVPDefID() <= VPDef::VPLastPHISC;
  928. }
  929. static inline bool classof(const VPValue *V) {
  930. auto *B = V->getDefiningRecipe();
  931. return B && B->getVPDefID() >= VPRecipeBase::VPFirstHeaderPHISC &&
  932. B->getVPDefID() <= VPRecipeBase::VPLastPHISC;
  933. }
  934. /// Generate the phi nodes.
  935. void execute(VPTransformState &State) override = 0;
  936. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  937. /// Print the recipe.
  938. void print(raw_ostream &O, const Twine &Indent,
  939. VPSlotTracker &SlotTracker) const override = 0;
  940. #endif
  941. /// Returns the start value of the phi, if one is set.
  942. VPValue *getStartValue() {
  943. return getNumOperands() == 0 ? nullptr : getOperand(0);
  944. }
  945. VPValue *getStartValue() const {
  946. return getNumOperands() == 0 ? nullptr : getOperand(0);
  947. }
  948. /// Update the start value of the recipe.
  949. void setStartValue(VPValue *V) { setOperand(0, V); }
  950. /// Returns the incoming value from the loop backedge.
  951. VPValue *getBackedgeValue() {
  952. return getOperand(1);
  953. }
  954. /// Returns the backedge value as a recipe. The backedge value is guaranteed
  955. /// to be a recipe.
  956. VPRecipeBase &getBackedgeRecipe() {
  957. return *getBackedgeValue()->getDefiningRecipe();
  958. }
  959. };
  960. class VPWidenPointerInductionRecipe : public VPHeaderPHIRecipe {
  961. const InductionDescriptor &IndDesc;
  962. bool IsScalarAfterVectorization;
  963. public:
  964. /// Create a new VPWidenPointerInductionRecipe for \p Phi with start value \p
  965. /// Start.
  966. VPWidenPointerInductionRecipe(PHINode *Phi, VPValue *Start, VPValue *Step,
  967. const InductionDescriptor &IndDesc,
  968. bool IsScalarAfterVectorization)
  969. : VPHeaderPHIRecipe(VPDef::VPWidenPointerInductionSC, Phi),
  970. IndDesc(IndDesc),
  971. IsScalarAfterVectorization(IsScalarAfterVectorization) {
  972. addOperand(Start);
  973. addOperand(Step);
  974. }
  975. ~VPWidenPointerInductionRecipe() override = default;
  976. VP_CLASSOF_IMPL(VPDef::VPWidenPointerInductionSC)
  977. /// Generate vector values for the pointer induction.
  978. void execute(VPTransformState &State) override;
  979. /// Returns true if only scalar values will be generated.
  980. bool onlyScalarsGenerated(ElementCount VF);
  981. /// Returns the induction descriptor for the recipe.
  982. const InductionDescriptor &getInductionDescriptor() const { return IndDesc; }
  983. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  984. /// Print the recipe.
  985. void print(raw_ostream &O, const Twine &Indent,
  986. VPSlotTracker &SlotTracker) const override;
  987. #endif
  988. };
  989. /// A recipe for handling header phis that are widened in the vector loop.
  990. /// In the VPlan native path, all incoming VPValues & VPBasicBlock pairs are
  991. /// managed in the recipe directly.
  992. class VPWidenPHIRecipe : public VPHeaderPHIRecipe {
  993. /// List of incoming blocks. Only used in the VPlan native path.
  994. SmallVector<VPBasicBlock *, 2> IncomingBlocks;
  995. public:
  996. /// Create a new VPWidenPHIRecipe for \p Phi with start value \p Start.
  997. VPWidenPHIRecipe(PHINode *Phi, VPValue *Start = nullptr)
  998. : VPHeaderPHIRecipe(VPDef::VPWidenPHISC, Phi) {
  999. if (Start)
  1000. addOperand(Start);
  1001. }
  1002. ~VPWidenPHIRecipe() override = default;
  1003. VP_CLASSOF_IMPL(VPDef::VPWidenPHISC)
  1004. /// Generate the phi/select nodes.
  1005. void execute(VPTransformState &State) override;
  1006. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1007. /// Print the recipe.
  1008. void print(raw_ostream &O, const Twine &Indent,
  1009. VPSlotTracker &SlotTracker) const override;
  1010. #endif
  1011. /// Adds a pair (\p IncomingV, \p IncomingBlock) to the phi.
  1012. void addIncoming(VPValue *IncomingV, VPBasicBlock *IncomingBlock) {
  1013. addOperand(IncomingV);
  1014. IncomingBlocks.push_back(IncomingBlock);
  1015. }
  1016. /// Returns the \p I th incoming VPBasicBlock.
  1017. VPBasicBlock *getIncomingBlock(unsigned I) { return IncomingBlocks[I]; }
  1018. /// Returns the \p I th incoming VPValue.
  1019. VPValue *getIncomingValue(unsigned I) { return getOperand(I); }
  1020. };
  1021. /// A recipe for handling first-order recurrence phis. The start value is the
  1022. /// first operand of the recipe and the incoming value from the backedge is the
  1023. /// second operand.
  1024. struct VPFirstOrderRecurrencePHIRecipe : public VPHeaderPHIRecipe {
  1025. VPFirstOrderRecurrencePHIRecipe(PHINode *Phi, VPValue &Start)
  1026. : VPHeaderPHIRecipe(VPDef::VPFirstOrderRecurrencePHISC, Phi, &Start) {}
  1027. VP_CLASSOF_IMPL(VPDef::VPFirstOrderRecurrencePHISC)
  1028. static inline bool classof(const VPHeaderPHIRecipe *R) {
  1029. return R->getVPDefID() == VPDef::VPFirstOrderRecurrencePHISC;
  1030. }
  1031. void execute(VPTransformState &State) override;
  1032. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1033. /// Print the recipe.
  1034. void print(raw_ostream &O, const Twine &Indent,
  1035. VPSlotTracker &SlotTracker) const override;
  1036. #endif
  1037. };
  1038. /// A recipe for handling reduction phis. The start value is the first operand
  1039. /// of the recipe and the incoming value from the backedge is the second
  1040. /// operand.
  1041. class VPReductionPHIRecipe : public VPHeaderPHIRecipe {
  1042. /// Descriptor for the reduction.
  1043. const RecurrenceDescriptor &RdxDesc;
  1044. /// The phi is part of an in-loop reduction.
  1045. bool IsInLoop;
  1046. /// The phi is part of an ordered reduction. Requires IsInLoop to be true.
  1047. bool IsOrdered;
  1048. public:
  1049. /// Create a new VPReductionPHIRecipe for the reduction \p Phi described by \p
  1050. /// RdxDesc.
  1051. VPReductionPHIRecipe(PHINode *Phi, const RecurrenceDescriptor &RdxDesc,
  1052. VPValue &Start, bool IsInLoop = false,
  1053. bool IsOrdered = false)
  1054. : VPHeaderPHIRecipe(VPDef::VPReductionPHISC, Phi, &Start),
  1055. RdxDesc(RdxDesc), IsInLoop(IsInLoop), IsOrdered(IsOrdered) {
  1056. assert((!IsOrdered || IsInLoop) && "IsOrdered requires IsInLoop");
  1057. }
  1058. ~VPReductionPHIRecipe() override = default;
  1059. VP_CLASSOF_IMPL(VPDef::VPReductionPHISC)
  1060. static inline bool classof(const VPHeaderPHIRecipe *R) {
  1061. return R->getVPDefID() == VPDef::VPReductionPHISC;
  1062. }
  1063. /// Generate the phi/select nodes.
  1064. void execute(VPTransformState &State) override;
  1065. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1066. /// Print the recipe.
  1067. void print(raw_ostream &O, const Twine &Indent,
  1068. VPSlotTracker &SlotTracker) const override;
  1069. #endif
  1070. const RecurrenceDescriptor &getRecurrenceDescriptor() const {
  1071. return RdxDesc;
  1072. }
  1073. /// Returns true, if the phi is part of an ordered reduction.
  1074. bool isOrdered() const { return IsOrdered; }
  1075. /// Returns true, if the phi is part of an in-loop reduction.
  1076. bool isInLoop() const { return IsInLoop; }
  1077. };
  1078. /// A recipe for vectorizing a phi-node as a sequence of mask-based select
  1079. /// instructions.
  1080. class VPBlendRecipe : public VPRecipeBase, public VPValue {
  1081. PHINode *Phi;
  1082. public:
  1083. /// The blend operation is a User of the incoming values and of their
  1084. /// respective masks, ordered [I0, M0, I1, M1, ...]. Note that a single value
  1085. /// might be incoming with a full mask for which there is no VPValue.
  1086. VPBlendRecipe(PHINode *Phi, ArrayRef<VPValue *> Operands)
  1087. : VPRecipeBase(VPDef::VPBlendSC, Operands), VPValue(this, Phi), Phi(Phi) {
  1088. assert(Operands.size() > 0 &&
  1089. ((Operands.size() == 1) || (Operands.size() % 2 == 0)) &&
  1090. "Expected either a single incoming value or a positive even number "
  1091. "of operands");
  1092. }
  1093. VP_CLASSOF_IMPL(VPDef::VPBlendSC)
  1094. /// Return the number of incoming values, taking into account that a single
  1095. /// incoming value has no mask.
  1096. unsigned getNumIncomingValues() const { return (getNumOperands() + 1) / 2; }
  1097. /// Return incoming value number \p Idx.
  1098. VPValue *getIncomingValue(unsigned Idx) const { return getOperand(Idx * 2); }
  1099. /// Return mask number \p Idx.
  1100. VPValue *getMask(unsigned Idx) const { return getOperand(Idx * 2 + 1); }
  1101. /// Generate the phi/select nodes.
  1102. void execute(VPTransformState &State) override;
  1103. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1104. /// Print the recipe.
  1105. void print(raw_ostream &O, const Twine &Indent,
  1106. VPSlotTracker &SlotTracker) const override;
  1107. #endif
  1108. /// Returns true if the recipe only uses the first lane of operand \p Op.
  1109. bool onlyFirstLaneUsed(const VPValue *Op) const override {
  1110. assert(is_contained(operands(), Op) &&
  1111. "Op must be an operand of the recipe");
  1112. // Recursing through Blend recipes only, must terminate at header phi's the
  1113. // latest.
  1114. return all_of(users(),
  1115. [this](VPUser *U) { return U->onlyFirstLaneUsed(this); });
  1116. }
  1117. };
  1118. /// VPInterleaveRecipe is a recipe for transforming an interleave group of load
  1119. /// or stores into one wide load/store and shuffles. The first operand of a
  1120. /// VPInterleave recipe is the address, followed by the stored values, followed
  1121. /// by an optional mask.
  1122. class VPInterleaveRecipe : public VPRecipeBase {
  1123. const InterleaveGroup<Instruction> *IG;
  1124. bool HasMask = false;
  1125. public:
  1126. VPInterleaveRecipe(const InterleaveGroup<Instruction> *IG, VPValue *Addr,
  1127. ArrayRef<VPValue *> StoredValues, VPValue *Mask)
  1128. : VPRecipeBase(VPDef::VPInterleaveSC, {Addr}), IG(IG) {
  1129. for (unsigned i = 0; i < IG->getFactor(); ++i)
  1130. if (Instruction *I = IG->getMember(i)) {
  1131. if (I->getType()->isVoidTy())
  1132. continue;
  1133. new VPValue(I, this);
  1134. }
  1135. for (auto *SV : StoredValues)
  1136. addOperand(SV);
  1137. if (Mask) {
  1138. HasMask = true;
  1139. addOperand(Mask);
  1140. }
  1141. }
  1142. ~VPInterleaveRecipe() override = default;
  1143. VP_CLASSOF_IMPL(VPDef::VPInterleaveSC)
  1144. /// Return the address accessed by this recipe.
  1145. VPValue *getAddr() const {
  1146. return getOperand(0); // Address is the 1st, mandatory operand.
  1147. }
  1148. /// Return the mask used by this recipe. Note that a full mask is represented
  1149. /// by a nullptr.
  1150. VPValue *getMask() const {
  1151. // Mask is optional and therefore the last, currently 2nd operand.
  1152. return HasMask ? getOperand(getNumOperands() - 1) : nullptr;
  1153. }
  1154. /// Return the VPValues stored by this interleave group. If it is a load
  1155. /// interleave group, return an empty ArrayRef.
  1156. ArrayRef<VPValue *> getStoredValues() const {
  1157. // The first operand is the address, followed by the stored values, followed
  1158. // by an optional mask.
  1159. return ArrayRef<VPValue *>(op_begin(), getNumOperands())
  1160. .slice(1, getNumStoreOperands());
  1161. }
  1162. /// Generate the wide load or store, and shuffles.
  1163. void execute(VPTransformState &State) override;
  1164. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1165. /// Print the recipe.
  1166. void print(raw_ostream &O, const Twine &Indent,
  1167. VPSlotTracker &SlotTracker) const override;
  1168. #endif
  1169. const InterleaveGroup<Instruction> *getInterleaveGroup() { return IG; }
  1170. /// Returns the number of stored operands of this interleave group. Returns 0
  1171. /// for load interleave groups.
  1172. unsigned getNumStoreOperands() const {
  1173. return getNumOperands() - (HasMask ? 2 : 1);
  1174. }
  1175. /// The recipe only uses the first lane of the address.
  1176. bool onlyFirstLaneUsed(const VPValue *Op) const override {
  1177. assert(is_contained(operands(), Op) &&
  1178. "Op must be an operand of the recipe");
  1179. return Op == getAddr() && !llvm::is_contained(getStoredValues(), Op);
  1180. }
  1181. };
  1182. /// A recipe to represent inloop reduction operations, performing a reduction on
  1183. /// a vector operand into a scalar value, and adding the result to a chain.
  1184. /// The Operands are {ChainOp, VecOp, [Condition]}.
  1185. class VPReductionRecipe : public VPRecipeBase, public VPValue {
  1186. /// The recurrence decriptor for the reduction in question.
  1187. const RecurrenceDescriptor *RdxDesc;
  1188. /// Pointer to the TTI, needed to create the target reduction
  1189. const TargetTransformInfo *TTI;
  1190. public:
  1191. VPReductionRecipe(const RecurrenceDescriptor *R, Instruction *I,
  1192. VPValue *ChainOp, VPValue *VecOp, VPValue *CondOp,
  1193. const TargetTransformInfo *TTI)
  1194. : VPRecipeBase(VPDef::VPReductionSC, {ChainOp, VecOp}), VPValue(this, I),
  1195. RdxDesc(R), TTI(TTI) {
  1196. if (CondOp)
  1197. addOperand(CondOp);
  1198. }
  1199. ~VPReductionRecipe() override = default;
  1200. VP_CLASSOF_IMPL(VPDef::VPReductionSC)
  1201. /// Generate the reduction in the loop
  1202. void execute(VPTransformState &State) override;
  1203. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1204. /// Print the recipe.
  1205. void print(raw_ostream &O, const Twine &Indent,
  1206. VPSlotTracker &SlotTracker) const override;
  1207. #endif
  1208. /// The VPValue of the scalar Chain being accumulated.
  1209. VPValue *getChainOp() const { return getOperand(0); }
  1210. /// The VPValue of the vector value to be reduced.
  1211. VPValue *getVecOp() const { return getOperand(1); }
  1212. /// The VPValue of the condition for the block.
  1213. VPValue *getCondOp() const {
  1214. return getNumOperands() > 2 ? getOperand(2) : nullptr;
  1215. }
  1216. };
  1217. /// VPReplicateRecipe replicates a given instruction producing multiple scalar
  1218. /// copies of the original scalar type, one per lane, instead of producing a
  1219. /// single copy of widened type for all lanes. If the instruction is known to be
  1220. /// uniform only one copy, per lane zero, will be generated.
  1221. class VPReplicateRecipe : public VPRecipeBase, public VPValue {
  1222. /// Indicator if only a single replica per lane is needed.
  1223. bool IsUniform;
  1224. /// Indicator if the replicas are also predicated.
  1225. bool IsPredicated;
  1226. /// Indicator if the scalar values should also be packed into a vector.
  1227. bool AlsoPack;
  1228. public:
  1229. template <typename IterT>
  1230. VPReplicateRecipe(Instruction *I, iterator_range<IterT> Operands,
  1231. bool IsUniform, bool IsPredicated = false)
  1232. : VPRecipeBase(VPDef::VPReplicateSC, Operands), VPValue(this, I),
  1233. IsUniform(IsUniform), IsPredicated(IsPredicated) {
  1234. // Retain the previous behavior of predicateInstructions(), where an
  1235. // insert-element of a predicated instruction got hoisted into the
  1236. // predicated basic block iff it was its only user. This is achieved by
  1237. // having predicated instructions also pack their values into a vector by
  1238. // default unless they have a replicated user which uses their scalar value.
  1239. AlsoPack = IsPredicated && !I->use_empty();
  1240. }
  1241. ~VPReplicateRecipe() override = default;
  1242. VP_CLASSOF_IMPL(VPDef::VPReplicateSC)
  1243. /// Generate replicas of the desired Ingredient. Replicas will be generated
  1244. /// for all parts and lanes unless a specific part and lane are specified in
  1245. /// the \p State.
  1246. void execute(VPTransformState &State) override;
  1247. void setAlsoPack(bool Pack) { AlsoPack = Pack; }
  1248. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1249. /// Print the recipe.
  1250. void print(raw_ostream &O, const Twine &Indent,
  1251. VPSlotTracker &SlotTracker) const override;
  1252. #endif
  1253. bool isUniform() const { return IsUniform; }
  1254. bool isPacked() const { return AlsoPack; }
  1255. bool isPredicated() const { return IsPredicated; }
  1256. /// Returns true if the recipe only uses the first lane of operand \p Op.
  1257. bool onlyFirstLaneUsed(const VPValue *Op) const override {
  1258. assert(is_contained(operands(), Op) &&
  1259. "Op must be an operand of the recipe");
  1260. return isUniform();
  1261. }
  1262. /// Returns true if the recipe uses scalars of operand \p Op.
  1263. bool usesScalars(const VPValue *Op) const override {
  1264. assert(is_contained(operands(), Op) &&
  1265. "Op must be an operand of the recipe");
  1266. return true;
  1267. }
  1268. };
  1269. /// A recipe for generating conditional branches on the bits of a mask.
  1270. class VPBranchOnMaskRecipe : public VPRecipeBase {
  1271. public:
  1272. VPBranchOnMaskRecipe(VPValue *BlockInMask)
  1273. : VPRecipeBase(VPDef::VPBranchOnMaskSC, {}) {
  1274. if (BlockInMask) // nullptr means all-one mask.
  1275. addOperand(BlockInMask);
  1276. }
  1277. VP_CLASSOF_IMPL(VPDef::VPBranchOnMaskSC)
  1278. /// Generate the extraction of the appropriate bit from the block mask and the
  1279. /// conditional branch.
  1280. void execute(VPTransformState &State) override;
  1281. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1282. /// Print the recipe.
  1283. void print(raw_ostream &O, const Twine &Indent,
  1284. VPSlotTracker &SlotTracker) const override {
  1285. O << Indent << "BRANCH-ON-MASK ";
  1286. if (VPValue *Mask = getMask())
  1287. Mask->printAsOperand(O, SlotTracker);
  1288. else
  1289. O << " All-One";
  1290. }
  1291. #endif
  1292. /// Return the mask used by this recipe. Note that a full mask is represented
  1293. /// by a nullptr.
  1294. VPValue *getMask() const {
  1295. assert(getNumOperands() <= 1 && "should have either 0 or 1 operands");
  1296. // Mask is optional.
  1297. return getNumOperands() == 1 ? getOperand(0) : nullptr;
  1298. }
  1299. /// Returns true if the recipe uses scalars of operand \p Op.
  1300. bool usesScalars(const VPValue *Op) const override {
  1301. assert(is_contained(operands(), Op) &&
  1302. "Op must be an operand of the recipe");
  1303. return true;
  1304. }
  1305. };
  1306. /// VPPredInstPHIRecipe is a recipe for generating the phi nodes needed when
  1307. /// control converges back from a Branch-on-Mask. The phi nodes are needed in
  1308. /// order to merge values that are set under such a branch and feed their uses.
  1309. /// The phi nodes can be scalar or vector depending on the users of the value.
  1310. /// This recipe works in concert with VPBranchOnMaskRecipe.
  1311. class VPPredInstPHIRecipe : public VPRecipeBase, public VPValue {
  1312. public:
  1313. /// Construct a VPPredInstPHIRecipe given \p PredInst whose value needs a phi
  1314. /// nodes after merging back from a Branch-on-Mask.
  1315. VPPredInstPHIRecipe(VPValue *PredV)
  1316. : VPRecipeBase(VPDef::VPPredInstPHISC, PredV), VPValue(this) {}
  1317. ~VPPredInstPHIRecipe() override = default;
  1318. VP_CLASSOF_IMPL(VPDef::VPPredInstPHISC)
  1319. /// Generates phi nodes for live-outs as needed to retain SSA form.
  1320. void execute(VPTransformState &State) override;
  1321. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1322. /// Print the recipe.
  1323. void print(raw_ostream &O, const Twine &Indent,
  1324. VPSlotTracker &SlotTracker) const override;
  1325. #endif
  1326. /// Returns true if the recipe uses scalars of operand \p Op.
  1327. bool usesScalars(const VPValue *Op) const override {
  1328. assert(is_contained(operands(), Op) &&
  1329. "Op must be an operand of the recipe");
  1330. return true;
  1331. }
  1332. };
  1333. /// A Recipe for widening load/store operations.
  1334. /// The recipe uses the following VPValues:
  1335. /// - For load: Address, optional mask
  1336. /// - For store: Address, stored value, optional mask
  1337. /// TODO: We currently execute only per-part unless a specific instance is
  1338. /// provided.
  1339. class VPWidenMemoryInstructionRecipe : public VPRecipeBase {
  1340. Instruction &Ingredient;
  1341. // Whether the loaded-from / stored-to addresses are consecutive.
  1342. bool Consecutive;
  1343. // Whether the consecutive loaded/stored addresses are in reverse order.
  1344. bool Reverse;
  1345. void setMask(VPValue *Mask) {
  1346. if (!Mask)
  1347. return;
  1348. addOperand(Mask);
  1349. }
  1350. bool isMasked() const {
  1351. return isStore() ? getNumOperands() == 3 : getNumOperands() == 2;
  1352. }
  1353. public:
  1354. VPWidenMemoryInstructionRecipe(LoadInst &Load, VPValue *Addr, VPValue *Mask,
  1355. bool Consecutive, bool Reverse)
  1356. : VPRecipeBase(VPDef::VPWidenMemoryInstructionSC, {Addr}),
  1357. Ingredient(Load), Consecutive(Consecutive), Reverse(Reverse) {
  1358. assert((Consecutive || !Reverse) && "Reverse implies consecutive");
  1359. new VPValue(this, &Load);
  1360. setMask(Mask);
  1361. }
  1362. VPWidenMemoryInstructionRecipe(StoreInst &Store, VPValue *Addr,
  1363. VPValue *StoredValue, VPValue *Mask,
  1364. bool Consecutive, bool Reverse)
  1365. : VPRecipeBase(VPDef::VPWidenMemoryInstructionSC, {Addr, StoredValue}),
  1366. Ingredient(Store), Consecutive(Consecutive), Reverse(Reverse) {
  1367. assert((Consecutive || !Reverse) && "Reverse implies consecutive");
  1368. setMask(Mask);
  1369. }
  1370. VP_CLASSOF_IMPL(VPDef::VPWidenMemoryInstructionSC)
  1371. /// Return the address accessed by this recipe.
  1372. VPValue *getAddr() const {
  1373. return getOperand(0); // Address is the 1st, mandatory operand.
  1374. }
  1375. /// Return the mask used by this recipe. Note that a full mask is represented
  1376. /// by a nullptr.
  1377. VPValue *getMask() const {
  1378. // Mask is optional and therefore the last operand.
  1379. return isMasked() ? getOperand(getNumOperands() - 1) : nullptr;
  1380. }
  1381. /// Returns true if this recipe is a store.
  1382. bool isStore() const { return isa<StoreInst>(Ingredient); }
  1383. /// Return the address accessed by this recipe.
  1384. VPValue *getStoredValue() const {
  1385. assert(isStore() && "Stored value only available for store instructions");
  1386. return getOperand(1); // Stored value is the 2nd, mandatory operand.
  1387. }
  1388. // Return whether the loaded-from / stored-to addresses are consecutive.
  1389. bool isConsecutive() const { return Consecutive; }
  1390. // Return whether the consecutive loaded/stored addresses are in reverse
  1391. // order.
  1392. bool isReverse() const { return Reverse; }
  1393. /// Generate the wide load/store.
  1394. void execute(VPTransformState &State) override;
  1395. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1396. /// Print the recipe.
  1397. void print(raw_ostream &O, const Twine &Indent,
  1398. VPSlotTracker &SlotTracker) const override;
  1399. #endif
  1400. /// Returns true if the recipe only uses the first lane of operand \p Op.
  1401. bool onlyFirstLaneUsed(const VPValue *Op) const override {
  1402. assert(is_contained(operands(), Op) &&
  1403. "Op must be an operand of the recipe");
  1404. // Widened, consecutive memory operations only demand the first lane of
  1405. // their address, unless the same operand is also stored. That latter can
  1406. // happen with opaque pointers.
  1407. return Op == getAddr() && isConsecutive() &&
  1408. (!isStore() || Op != getStoredValue());
  1409. }
  1410. Instruction &getIngredient() const { return Ingredient; }
  1411. };
  1412. /// Recipe to expand a SCEV expression.
  1413. class VPExpandSCEVRecipe : public VPRecipeBase, public VPValue {
  1414. const SCEV *Expr;
  1415. ScalarEvolution &SE;
  1416. public:
  1417. VPExpandSCEVRecipe(const SCEV *Expr, ScalarEvolution &SE)
  1418. : VPRecipeBase(VPDef::VPExpandSCEVSC, {}), VPValue(this), Expr(Expr),
  1419. SE(SE) {}
  1420. ~VPExpandSCEVRecipe() override = default;
  1421. VP_CLASSOF_IMPL(VPDef::VPExpandSCEVSC)
  1422. /// Generate a canonical vector induction variable of the vector loop, with
  1423. void execute(VPTransformState &State) override;
  1424. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1425. /// Print the recipe.
  1426. void print(raw_ostream &O, const Twine &Indent,
  1427. VPSlotTracker &SlotTracker) const override;
  1428. #endif
  1429. const SCEV *getSCEV() const { return Expr; }
  1430. };
  1431. /// Canonical scalar induction phi of the vector loop. Starting at the specified
  1432. /// start value (either 0 or the resume value when vectorizing the epilogue
  1433. /// loop). VPWidenCanonicalIVRecipe represents the vector version of the
  1434. /// canonical induction variable.
  1435. class VPCanonicalIVPHIRecipe : public VPHeaderPHIRecipe {
  1436. DebugLoc DL;
  1437. public:
  1438. VPCanonicalIVPHIRecipe(VPValue *StartV, DebugLoc DL)
  1439. : VPHeaderPHIRecipe(VPDef::VPCanonicalIVPHISC, nullptr, StartV), DL(DL) {}
  1440. ~VPCanonicalIVPHIRecipe() override = default;
  1441. VP_CLASSOF_IMPL(VPDef::VPCanonicalIVPHISC)
  1442. static inline bool classof(const VPHeaderPHIRecipe *D) {
  1443. return D->getVPDefID() == VPDef::VPCanonicalIVPHISC;
  1444. }
  1445. /// Generate the canonical scalar induction phi of the vector loop.
  1446. void execute(VPTransformState &State) override;
  1447. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1448. /// Print the recipe.
  1449. void print(raw_ostream &O, const Twine &Indent,
  1450. VPSlotTracker &SlotTracker) const override;
  1451. #endif
  1452. /// Returns the scalar type of the induction.
  1453. const Type *getScalarType() const {
  1454. return getOperand(0)->getLiveInIRValue()->getType();
  1455. }
  1456. /// Returns true if the recipe only uses the first lane of operand \p Op.
  1457. bool onlyFirstLaneUsed(const VPValue *Op) const override {
  1458. assert(is_contained(operands(), Op) &&
  1459. "Op must be an operand of the recipe");
  1460. return true;
  1461. }
  1462. /// Check if the induction described by \p ID is canonical, i.e. has the same
  1463. /// start, step (of 1), and type as the canonical IV.
  1464. bool isCanonical(const InductionDescriptor &ID, Type *Ty) const;
  1465. };
  1466. /// A recipe for generating the active lane mask for the vector loop that is
  1467. /// used to predicate the vector operations.
  1468. /// TODO: It would be good to use the existing VPWidenPHIRecipe instead and
  1469. /// remove VPActiveLaneMaskPHIRecipe.
  1470. class VPActiveLaneMaskPHIRecipe : public VPHeaderPHIRecipe {
  1471. DebugLoc DL;
  1472. public:
  1473. VPActiveLaneMaskPHIRecipe(VPValue *StartMask, DebugLoc DL)
  1474. : VPHeaderPHIRecipe(VPDef::VPActiveLaneMaskPHISC, nullptr, StartMask),
  1475. DL(DL) {}
  1476. ~VPActiveLaneMaskPHIRecipe() override = default;
  1477. VP_CLASSOF_IMPL(VPDef::VPActiveLaneMaskPHISC)
  1478. static inline bool classof(const VPHeaderPHIRecipe *D) {
  1479. return D->getVPDefID() == VPDef::VPActiveLaneMaskPHISC;
  1480. }
  1481. /// Generate the active lane mask phi of the vector loop.
  1482. void execute(VPTransformState &State) override;
  1483. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1484. /// Print the recipe.
  1485. void print(raw_ostream &O, const Twine &Indent,
  1486. VPSlotTracker &SlotTracker) const override;
  1487. #endif
  1488. };
  1489. /// A Recipe for widening the canonical induction variable of the vector loop.
  1490. class VPWidenCanonicalIVRecipe : public VPRecipeBase, public VPValue {
  1491. public:
  1492. VPWidenCanonicalIVRecipe(VPCanonicalIVPHIRecipe *CanonicalIV)
  1493. : VPRecipeBase(VPDef::VPWidenCanonicalIVSC, {CanonicalIV}),
  1494. VPValue(this) {}
  1495. ~VPWidenCanonicalIVRecipe() override = default;
  1496. VP_CLASSOF_IMPL(VPDef::VPWidenCanonicalIVSC)
  1497. /// Generate a canonical vector induction variable of the vector loop, with
  1498. /// start = {<Part*VF, Part*VF+1, ..., Part*VF+VF-1> for 0 <= Part < UF}, and
  1499. /// step = <VF*UF, VF*UF, ..., VF*UF>.
  1500. void execute(VPTransformState &State) override;
  1501. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1502. /// Print the recipe.
  1503. void print(raw_ostream &O, const Twine &Indent,
  1504. VPSlotTracker &SlotTracker) const override;
  1505. #endif
  1506. /// Returns the scalar type of the induction.
  1507. const Type *getScalarType() const {
  1508. return cast<VPCanonicalIVPHIRecipe>(getOperand(0)->getDefiningRecipe())
  1509. ->getScalarType();
  1510. }
  1511. };
  1512. /// A recipe for converting the canonical IV value to the corresponding value of
  1513. /// an IV with different start and step values, using Start + CanonicalIV *
  1514. /// Step.
  1515. class VPDerivedIVRecipe : public VPRecipeBase, public VPValue {
  1516. /// The type of the result value. It may be smaller than the type of the
  1517. /// induction and in this case it will get truncated to ResultTy.
  1518. Type *ResultTy;
  1519. /// Induction descriptor for the induction the canonical IV is transformed to.
  1520. const InductionDescriptor &IndDesc;
  1521. public:
  1522. VPDerivedIVRecipe(const InductionDescriptor &IndDesc, VPValue *Start,
  1523. VPCanonicalIVPHIRecipe *CanonicalIV, VPValue *Step,
  1524. Type *ResultTy)
  1525. : VPRecipeBase(VPDef::VPDerivedIVSC, {Start, CanonicalIV, Step}),
  1526. VPValue(this), ResultTy(ResultTy), IndDesc(IndDesc) {}
  1527. ~VPDerivedIVRecipe() override = default;
  1528. VP_CLASSOF_IMPL(VPDef::VPDerivedIVSC)
  1529. /// Generate the transformed value of the induction at offset StartValue (1.
  1530. /// operand) + IV (2. operand) * StepValue (3, operand).
  1531. void execute(VPTransformState &State) override;
  1532. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1533. /// Print the recipe.
  1534. void print(raw_ostream &O, const Twine &Indent,
  1535. VPSlotTracker &SlotTracker) const override;
  1536. #endif
  1537. VPValue *getStartValue() const { return getOperand(0); }
  1538. VPValue *getCanonicalIV() const { return getOperand(1); }
  1539. VPValue *getStepValue() const { return getOperand(2); }
  1540. /// Returns true if the recipe only uses the first lane of operand \p Op.
  1541. bool onlyFirstLaneUsed(const VPValue *Op) const override {
  1542. assert(is_contained(operands(), Op) &&
  1543. "Op must be an operand of the recipe");
  1544. return true;
  1545. }
  1546. };
  1547. /// A recipe for handling phi nodes of integer and floating-point inductions,
  1548. /// producing their scalar values.
  1549. class VPScalarIVStepsRecipe : public VPRecipeBase, public VPValue {
  1550. const InductionDescriptor &IndDesc;
  1551. public:
  1552. VPScalarIVStepsRecipe(const InductionDescriptor &IndDesc, VPValue *IV,
  1553. VPValue *Step)
  1554. : VPRecipeBase(VPDef::VPScalarIVStepsSC, {IV, Step}), VPValue(this),
  1555. IndDesc(IndDesc) {}
  1556. ~VPScalarIVStepsRecipe() override = default;
  1557. VP_CLASSOF_IMPL(VPDef::VPScalarIVStepsSC)
  1558. /// Generate the scalarized versions of the phi node as needed by their users.
  1559. void execute(VPTransformState &State) override;
  1560. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1561. /// Print the recipe.
  1562. void print(raw_ostream &O, const Twine &Indent,
  1563. VPSlotTracker &SlotTracker) const override;
  1564. #endif
  1565. VPValue *getStepValue() const { return getOperand(1); }
  1566. /// Returns true if the recipe only uses the first lane of operand \p Op.
  1567. bool onlyFirstLaneUsed(const VPValue *Op) const override {
  1568. assert(is_contained(operands(), Op) &&
  1569. "Op must be an operand of the recipe");
  1570. return true;
  1571. }
  1572. };
  1573. /// VPBasicBlock serves as the leaf of the Hierarchical Control-Flow Graph. It
  1574. /// holds a sequence of zero or more VPRecipe's each representing a sequence of
  1575. /// output IR instructions. All PHI-like recipes must come before any non-PHI recipes.
  1576. class VPBasicBlock : public VPBlockBase {
  1577. public:
  1578. using RecipeListTy = iplist<VPRecipeBase>;
  1579. private:
  1580. /// The VPRecipes held in the order of output instructions to generate.
  1581. RecipeListTy Recipes;
  1582. public:
  1583. VPBasicBlock(const Twine &Name = "", VPRecipeBase *Recipe = nullptr)
  1584. : VPBlockBase(VPBasicBlockSC, Name.str()) {
  1585. if (Recipe)
  1586. appendRecipe(Recipe);
  1587. }
  1588. ~VPBasicBlock() override {
  1589. while (!Recipes.empty())
  1590. Recipes.pop_back();
  1591. }
  1592. /// Instruction iterators...
  1593. using iterator = RecipeListTy::iterator;
  1594. using const_iterator = RecipeListTy::const_iterator;
  1595. using reverse_iterator = RecipeListTy::reverse_iterator;
  1596. using const_reverse_iterator = RecipeListTy::const_reverse_iterator;
  1597. //===--------------------------------------------------------------------===//
  1598. /// Recipe iterator methods
  1599. ///
  1600. inline iterator begin() { return Recipes.begin(); }
  1601. inline const_iterator begin() const { return Recipes.begin(); }
  1602. inline iterator end() { return Recipes.end(); }
  1603. inline const_iterator end() const { return Recipes.end(); }
  1604. inline reverse_iterator rbegin() { return Recipes.rbegin(); }
  1605. inline const_reverse_iterator rbegin() const { return Recipes.rbegin(); }
  1606. inline reverse_iterator rend() { return Recipes.rend(); }
  1607. inline const_reverse_iterator rend() const { return Recipes.rend(); }
  1608. inline size_t size() const { return Recipes.size(); }
  1609. inline bool empty() const { return Recipes.empty(); }
  1610. inline const VPRecipeBase &front() const { return Recipes.front(); }
  1611. inline VPRecipeBase &front() { return Recipes.front(); }
  1612. inline const VPRecipeBase &back() const { return Recipes.back(); }
  1613. inline VPRecipeBase &back() { return Recipes.back(); }
  1614. /// Returns a reference to the list of recipes.
  1615. RecipeListTy &getRecipeList() { return Recipes; }
  1616. /// Returns a pointer to a member of the recipe list.
  1617. static RecipeListTy VPBasicBlock::*getSublistAccess(VPRecipeBase *) {
  1618. return &VPBasicBlock::Recipes;
  1619. }
  1620. /// Method to support type inquiry through isa, cast, and dyn_cast.
  1621. static inline bool classof(const VPBlockBase *V) {
  1622. return V->getVPBlockID() == VPBlockBase::VPBasicBlockSC;
  1623. }
  1624. void insert(VPRecipeBase *Recipe, iterator InsertPt) {
  1625. assert(Recipe && "No recipe to append.");
  1626. assert(!Recipe->Parent && "Recipe already in VPlan");
  1627. Recipe->Parent = this;
  1628. Recipes.insert(InsertPt, Recipe);
  1629. }
  1630. /// Augment the existing recipes of a VPBasicBlock with an additional
  1631. /// \p Recipe as the last recipe.
  1632. void appendRecipe(VPRecipeBase *Recipe) { insert(Recipe, end()); }
  1633. /// The method which generates the output IR instructions that correspond to
  1634. /// this VPBasicBlock, thereby "executing" the VPlan.
  1635. void execute(VPTransformState *State) override;
  1636. /// Return the position of the first non-phi node recipe in the block.
  1637. iterator getFirstNonPhi();
  1638. /// Returns an iterator range over the PHI-like recipes in the block.
  1639. iterator_range<iterator> phis() {
  1640. return make_range(begin(), getFirstNonPhi());
  1641. }
  1642. void dropAllReferences(VPValue *NewValue) override;
  1643. /// Split current block at \p SplitAt by inserting a new block between the
  1644. /// current block and its successors and moving all recipes starting at
  1645. /// SplitAt to the new block. Returns the new block.
  1646. VPBasicBlock *splitAt(iterator SplitAt);
  1647. VPRegionBlock *getEnclosingLoopRegion();
  1648. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1649. /// Print this VPBsicBlock to \p O, prefixing all lines with \p Indent. \p
  1650. /// SlotTracker is used to print unnamed VPValue's using consequtive numbers.
  1651. ///
  1652. /// Note that the numbering is applied to the whole VPlan, so printing
  1653. /// individual blocks is consistent with the whole VPlan printing.
  1654. void print(raw_ostream &O, const Twine &Indent,
  1655. VPSlotTracker &SlotTracker) const override;
  1656. using VPBlockBase::print; // Get the print(raw_stream &O) version.
  1657. #endif
  1658. /// If the block has multiple successors, return the branch recipe terminating
  1659. /// the block. If there are no or only a single successor, return nullptr;
  1660. VPRecipeBase *getTerminator();
  1661. const VPRecipeBase *getTerminator() const;
  1662. /// Returns true if the block is exiting it's parent region.
  1663. bool isExiting() const;
  1664. private:
  1665. /// Create an IR BasicBlock to hold the output instructions generated by this
  1666. /// VPBasicBlock, and return it. Update the CFGState accordingly.
  1667. BasicBlock *createEmptyBasicBlock(VPTransformState::CFGState &CFG);
  1668. };
  1669. /// VPRegionBlock represents a collection of VPBasicBlocks and VPRegionBlocks
  1670. /// which form a Single-Entry-Single-Exiting subgraph of the output IR CFG.
  1671. /// A VPRegionBlock may indicate that its contents are to be replicated several
  1672. /// times. This is designed to support predicated scalarization, in which a
  1673. /// scalar if-then code structure needs to be generated VF * UF times. Having
  1674. /// this replication indicator helps to keep a single model for multiple
  1675. /// candidate VF's. The actual replication takes place only once the desired VF
  1676. /// and UF have been determined.
  1677. class VPRegionBlock : public VPBlockBase {
  1678. /// Hold the Single Entry of the SESE region modelled by the VPRegionBlock.
  1679. VPBlockBase *Entry;
  1680. /// Hold the Single Exiting block of the SESE region modelled by the
  1681. /// VPRegionBlock.
  1682. VPBlockBase *Exiting;
  1683. /// An indicator whether this region is to generate multiple replicated
  1684. /// instances of output IR corresponding to its VPBlockBases.
  1685. bool IsReplicator;
  1686. public:
  1687. VPRegionBlock(VPBlockBase *Entry, VPBlockBase *Exiting,
  1688. const std::string &Name = "", bool IsReplicator = false)
  1689. : VPBlockBase(VPRegionBlockSC, Name), Entry(Entry), Exiting(Exiting),
  1690. IsReplicator(IsReplicator) {
  1691. assert(Entry->getPredecessors().empty() && "Entry block has predecessors.");
  1692. assert(Exiting->getSuccessors().empty() && "Exit block has successors.");
  1693. Entry->setParent(this);
  1694. Exiting->setParent(this);
  1695. }
  1696. VPRegionBlock(const std::string &Name = "", bool IsReplicator = false)
  1697. : VPBlockBase(VPRegionBlockSC, Name), Entry(nullptr), Exiting(nullptr),
  1698. IsReplicator(IsReplicator) {}
  1699. ~VPRegionBlock() override {
  1700. if (Entry) {
  1701. VPValue DummyValue;
  1702. Entry->dropAllReferences(&DummyValue);
  1703. deleteCFG(Entry);
  1704. }
  1705. }
  1706. /// Method to support type inquiry through isa, cast, and dyn_cast.
  1707. static inline bool classof(const VPBlockBase *V) {
  1708. return V->getVPBlockID() == VPBlockBase::VPRegionBlockSC;
  1709. }
  1710. const VPBlockBase *getEntry() const { return Entry; }
  1711. VPBlockBase *getEntry() { return Entry; }
  1712. /// Set \p EntryBlock as the entry VPBlockBase of this VPRegionBlock. \p
  1713. /// EntryBlock must have no predecessors.
  1714. void setEntry(VPBlockBase *EntryBlock) {
  1715. assert(EntryBlock->getPredecessors().empty() &&
  1716. "Entry block cannot have predecessors.");
  1717. Entry = EntryBlock;
  1718. EntryBlock->setParent(this);
  1719. }
  1720. const VPBlockBase *getExiting() const { return Exiting; }
  1721. VPBlockBase *getExiting() { return Exiting; }
  1722. /// Set \p ExitingBlock as the exiting VPBlockBase of this VPRegionBlock. \p
  1723. /// ExitingBlock must have no successors.
  1724. void setExiting(VPBlockBase *ExitingBlock) {
  1725. assert(ExitingBlock->getSuccessors().empty() &&
  1726. "Exit block cannot have successors.");
  1727. Exiting = ExitingBlock;
  1728. ExitingBlock->setParent(this);
  1729. }
  1730. /// Returns the pre-header VPBasicBlock of the loop region.
  1731. VPBasicBlock *getPreheaderVPBB() {
  1732. assert(!isReplicator() && "should only get pre-header of loop regions");
  1733. return getSinglePredecessor()->getExitingBasicBlock();
  1734. }
  1735. /// An indicator whether this region is to generate multiple replicated
  1736. /// instances of output IR corresponding to its VPBlockBases.
  1737. bool isReplicator() const { return IsReplicator; }
  1738. /// The method which generates the output IR instructions that correspond to
  1739. /// this VPRegionBlock, thereby "executing" the VPlan.
  1740. void execute(VPTransformState *State) override;
  1741. void dropAllReferences(VPValue *NewValue) override;
  1742. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1743. /// Print this VPRegionBlock to \p O (recursively), prefixing all lines with
  1744. /// \p Indent. \p SlotTracker is used to print unnamed VPValue's using
  1745. /// consequtive numbers.
  1746. ///
  1747. /// Note that the numbering is applied to the whole VPlan, so printing
  1748. /// individual regions is consistent with the whole VPlan printing.
  1749. void print(raw_ostream &O, const Twine &Indent,
  1750. VPSlotTracker &SlotTracker) const override;
  1751. using VPBlockBase::print; // Get the print(raw_stream &O) version.
  1752. #endif
  1753. };
  1754. /// VPlan models a candidate for vectorization, encoding various decisions take
  1755. /// to produce efficient output IR, including which branches, basic-blocks and
  1756. /// output IR instructions to generate, and their cost. VPlan holds a
  1757. /// Hierarchical-CFG of VPBasicBlocks and VPRegionBlocks rooted at an Entry
  1758. /// VPBlock.
  1759. class VPlan {
  1760. friend class VPlanPrinter;
  1761. friend class VPSlotTracker;
  1762. /// Hold the single entry to the Hierarchical CFG of the VPlan.
  1763. VPBlockBase *Entry;
  1764. /// Holds the VFs applicable to this VPlan.
  1765. SmallSetVector<ElementCount, 2> VFs;
  1766. /// Holds the UFs applicable to this VPlan. If empty, the VPlan is valid for
  1767. /// any UF.
  1768. SmallSetVector<unsigned, 2> UFs;
  1769. /// Holds the name of the VPlan, for printing.
  1770. std::string Name;
  1771. /// Holds all the external definitions created for this VPlan. External
  1772. /// definitions must be immutable and hold a pointer to their underlying IR.
  1773. DenseMap<Value *, VPValue *> VPExternalDefs;
  1774. /// Represents the trip count of the original loop, for folding
  1775. /// the tail.
  1776. VPValue *TripCount = nullptr;
  1777. /// Represents the backedge taken count of the original loop, for folding
  1778. /// the tail. It equals TripCount - 1.
  1779. VPValue *BackedgeTakenCount = nullptr;
  1780. /// Represents the vector trip count.
  1781. VPValue VectorTripCount;
  1782. /// Holds a mapping between Values and their corresponding VPValue inside
  1783. /// VPlan.
  1784. Value2VPValueTy Value2VPValue;
  1785. /// Contains all VPValues that been allocated by addVPValue directly and need
  1786. /// to be free when the plan's destructor is called.
  1787. SmallVector<VPValue *, 16> VPValuesToFree;
  1788. /// Indicates whether it is safe use the Value2VPValue mapping or if the
  1789. /// mapping cannot be used any longer, because it is stale.
  1790. bool Value2VPValueEnabled = true;
  1791. /// Values used outside the plan.
  1792. MapVector<PHINode *, VPLiveOut *> LiveOuts;
  1793. public:
  1794. VPlan(VPBlockBase *Entry = nullptr) : Entry(Entry) {
  1795. if (Entry)
  1796. Entry->setPlan(this);
  1797. }
  1798. ~VPlan();
  1799. /// Prepare the plan for execution, setting up the required live-in values.
  1800. void prepareToExecute(Value *TripCount, Value *VectorTripCount,
  1801. Value *CanonicalIVStartValue, VPTransformState &State,
  1802. bool IsEpilogueVectorization);
  1803. /// Generate the IR code for this VPlan.
  1804. void execute(VPTransformState *State);
  1805. VPBlockBase *getEntry() { return Entry; }
  1806. const VPBlockBase *getEntry() const { return Entry; }
  1807. VPBlockBase *setEntry(VPBlockBase *Block) {
  1808. Entry = Block;
  1809. Block->setPlan(this);
  1810. return Entry;
  1811. }
  1812. /// The trip count of the original loop.
  1813. VPValue *getOrCreateTripCount() {
  1814. if (!TripCount)
  1815. TripCount = new VPValue();
  1816. return TripCount;
  1817. }
  1818. /// The backedge taken count of the original loop.
  1819. VPValue *getOrCreateBackedgeTakenCount() {
  1820. if (!BackedgeTakenCount)
  1821. BackedgeTakenCount = new VPValue();
  1822. return BackedgeTakenCount;
  1823. }
  1824. /// The vector trip count.
  1825. VPValue &getVectorTripCount() { return VectorTripCount; }
  1826. /// Mark the plan to indicate that using Value2VPValue is not safe any
  1827. /// longer, because it may be stale.
  1828. void disableValue2VPValue() { Value2VPValueEnabled = false; }
  1829. void addVF(ElementCount VF) { VFs.insert(VF); }
  1830. void setVF(ElementCount VF) {
  1831. assert(hasVF(VF) && "Cannot set VF not already in plan");
  1832. VFs.clear();
  1833. VFs.insert(VF);
  1834. }
  1835. bool hasVF(ElementCount VF) { return VFs.count(VF); }
  1836. bool hasScalarVFOnly() const { return VFs.size() == 1 && VFs[0].isScalar(); }
  1837. bool hasUF(unsigned UF) const { return UFs.empty() || UFs.contains(UF); }
  1838. void setUF(unsigned UF) {
  1839. assert(hasUF(UF) && "Cannot set the UF not already in plan");
  1840. UFs.clear();
  1841. UFs.insert(UF);
  1842. }
  1843. /// Return a string with the name of the plan and the applicable VFs and UFs.
  1844. std::string getName() const;
  1845. void setName(const Twine &newName) { Name = newName.str(); }
  1846. /// Get the existing or add a new external definition for \p V.
  1847. VPValue *getOrAddExternalDef(Value *V) {
  1848. auto I = VPExternalDefs.insert({V, nullptr});
  1849. if (I.second)
  1850. I.first->second = new VPValue(V);
  1851. return I.first->second;
  1852. }
  1853. void addVPValue(Value *V) {
  1854. assert(Value2VPValueEnabled &&
  1855. "IR value to VPValue mapping may be out of date!");
  1856. assert(V && "Trying to add a null Value to VPlan");
  1857. assert(!Value2VPValue.count(V) && "Value already exists in VPlan");
  1858. VPValue *VPV = new VPValue(V);
  1859. Value2VPValue[V] = VPV;
  1860. VPValuesToFree.push_back(VPV);
  1861. }
  1862. void addVPValue(Value *V, VPValue *VPV) {
  1863. assert(Value2VPValueEnabled && "Value2VPValue mapping may be out of date!");
  1864. assert(V && "Trying to add a null Value to VPlan");
  1865. assert(!Value2VPValue.count(V) && "Value already exists in VPlan");
  1866. Value2VPValue[V] = VPV;
  1867. }
  1868. /// Returns the VPValue for \p V. \p OverrideAllowed can be used to disable
  1869. /// checking whether it is safe to query VPValues using IR Values.
  1870. VPValue *getVPValue(Value *V, bool OverrideAllowed = false) {
  1871. assert((OverrideAllowed || isa<Constant>(V) || Value2VPValueEnabled) &&
  1872. "Value2VPValue mapping may be out of date!");
  1873. assert(V && "Trying to get the VPValue of a null Value");
  1874. assert(Value2VPValue.count(V) && "Value does not exist in VPlan");
  1875. return Value2VPValue[V];
  1876. }
  1877. /// Gets the VPValue or adds a new one (if none exists yet) for \p V. \p
  1878. /// OverrideAllowed can be used to disable checking whether it is safe to
  1879. /// query VPValues using IR Values.
  1880. VPValue *getOrAddVPValue(Value *V, bool OverrideAllowed = false) {
  1881. assert((OverrideAllowed || isa<Constant>(V) || Value2VPValueEnabled) &&
  1882. "Value2VPValue mapping may be out of date!");
  1883. assert(V && "Trying to get or add the VPValue of a null Value");
  1884. if (!Value2VPValue.count(V))
  1885. addVPValue(V);
  1886. return getVPValue(V);
  1887. }
  1888. void removeVPValueFor(Value *V) {
  1889. assert(Value2VPValueEnabled &&
  1890. "IR value to VPValue mapping may be out of date!");
  1891. Value2VPValue.erase(V);
  1892. }
  1893. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1894. /// Print this VPlan to \p O.
  1895. void print(raw_ostream &O) const;
  1896. /// Print this VPlan in DOT format to \p O.
  1897. void printDOT(raw_ostream &O) const;
  1898. /// Dump the plan to stderr (for debugging).
  1899. LLVM_DUMP_METHOD void dump() const;
  1900. #endif
  1901. /// Returns a range mapping the values the range \p Operands to their
  1902. /// corresponding VPValues.
  1903. iterator_range<mapped_iterator<Use *, std::function<VPValue *(Value *)>>>
  1904. mapToVPValues(User::op_range Operands) {
  1905. std::function<VPValue *(Value *)> Fn = [this](Value *Op) {
  1906. return getOrAddVPValue(Op);
  1907. };
  1908. return map_range(Operands, Fn);
  1909. }
  1910. /// Returns the VPRegionBlock of the vector loop.
  1911. VPRegionBlock *getVectorLoopRegion() {
  1912. return cast<VPRegionBlock>(getEntry()->getSingleSuccessor());
  1913. }
  1914. const VPRegionBlock *getVectorLoopRegion() const {
  1915. return cast<VPRegionBlock>(getEntry()->getSingleSuccessor());
  1916. }
  1917. /// Returns the canonical induction recipe of the vector loop.
  1918. VPCanonicalIVPHIRecipe *getCanonicalIV() {
  1919. VPBasicBlock *EntryVPBB = getVectorLoopRegion()->getEntryBasicBlock();
  1920. if (EntryVPBB->empty()) {
  1921. // VPlan native path.
  1922. EntryVPBB = cast<VPBasicBlock>(EntryVPBB->getSingleSuccessor());
  1923. }
  1924. return cast<VPCanonicalIVPHIRecipe>(&*EntryVPBB->begin());
  1925. }
  1926. /// Find and return the VPActiveLaneMaskPHIRecipe from the header - there
  1927. /// be only one at most. If there isn't one, then return nullptr.
  1928. VPActiveLaneMaskPHIRecipe *getActiveLaneMaskPhi();
  1929. void addLiveOut(PHINode *PN, VPValue *V);
  1930. void clearLiveOuts() {
  1931. for (auto &KV : LiveOuts)
  1932. delete KV.second;
  1933. LiveOuts.clear();
  1934. }
  1935. void removeLiveOut(PHINode *PN) {
  1936. delete LiveOuts[PN];
  1937. LiveOuts.erase(PN);
  1938. }
  1939. const MapVector<PHINode *, VPLiveOut *> &getLiveOuts() const {
  1940. return LiveOuts;
  1941. }
  1942. private:
  1943. /// Add to the given dominator tree the header block and every new basic block
  1944. /// that was created between it and the latch block, inclusive.
  1945. static void updateDominatorTree(DominatorTree *DT, BasicBlock *LoopLatchBB,
  1946. BasicBlock *LoopPreHeaderBB,
  1947. BasicBlock *LoopExitBB);
  1948. };
  1949. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1950. /// VPlanPrinter prints a given VPlan to a given output stream. The printing is
  1951. /// indented and follows the dot format.
  1952. class VPlanPrinter {
  1953. raw_ostream &OS;
  1954. const VPlan &Plan;
  1955. unsigned Depth = 0;
  1956. unsigned TabWidth = 2;
  1957. std::string Indent;
  1958. unsigned BID = 0;
  1959. SmallDenseMap<const VPBlockBase *, unsigned> BlockID;
  1960. VPSlotTracker SlotTracker;
  1961. /// Handle indentation.
  1962. void bumpIndent(int b) { Indent = std::string((Depth += b) * TabWidth, ' '); }
  1963. /// Print a given \p Block of the Plan.
  1964. void dumpBlock(const VPBlockBase *Block);
  1965. /// Print the information related to the CFG edges going out of a given
  1966. /// \p Block, followed by printing the successor blocks themselves.
  1967. void dumpEdges(const VPBlockBase *Block);
  1968. /// Print a given \p BasicBlock, including its VPRecipes, followed by printing
  1969. /// its successor blocks.
  1970. void dumpBasicBlock(const VPBasicBlock *BasicBlock);
  1971. /// Print a given \p Region of the Plan.
  1972. void dumpRegion(const VPRegionBlock *Region);
  1973. unsigned getOrCreateBID(const VPBlockBase *Block) {
  1974. return BlockID.count(Block) ? BlockID[Block] : BlockID[Block] = BID++;
  1975. }
  1976. Twine getOrCreateName(const VPBlockBase *Block);
  1977. Twine getUID(const VPBlockBase *Block);
  1978. /// Print the information related to a CFG edge between two VPBlockBases.
  1979. void drawEdge(const VPBlockBase *From, const VPBlockBase *To, bool Hidden,
  1980. const Twine &Label);
  1981. public:
  1982. VPlanPrinter(raw_ostream &O, const VPlan &P)
  1983. : OS(O), Plan(P), SlotTracker(&P) {}
  1984. LLVM_DUMP_METHOD void dump();
  1985. };
  1986. struct VPlanIngredient {
  1987. const Value *V;
  1988. VPlanIngredient(const Value *V) : V(V) {}
  1989. void print(raw_ostream &O) const;
  1990. };
  1991. inline raw_ostream &operator<<(raw_ostream &OS, const VPlanIngredient &I) {
  1992. I.print(OS);
  1993. return OS;
  1994. }
  1995. inline raw_ostream &operator<<(raw_ostream &OS, const VPlan &Plan) {
  1996. Plan.print(OS);
  1997. return OS;
  1998. }
  1999. #endif
  2000. //===----------------------------------------------------------------------===//
  2001. // VPlan Utilities
  2002. //===----------------------------------------------------------------------===//
  2003. /// Class that provides utilities for VPBlockBases in VPlan.
  2004. class VPBlockUtils {
  2005. public:
  2006. VPBlockUtils() = delete;
  2007. /// Insert disconnected VPBlockBase \p NewBlock after \p BlockPtr. Add \p
  2008. /// NewBlock as successor of \p BlockPtr and \p BlockPtr as predecessor of \p
  2009. /// NewBlock, and propagate \p BlockPtr parent to \p NewBlock. \p BlockPtr's
  2010. /// successors are moved from \p BlockPtr to \p NewBlock. \p NewBlock must
  2011. /// have neither successors nor predecessors.
  2012. static void insertBlockAfter(VPBlockBase *NewBlock, VPBlockBase *BlockPtr) {
  2013. assert(NewBlock->getSuccessors().empty() &&
  2014. NewBlock->getPredecessors().empty() &&
  2015. "Can't insert new block with predecessors or successors.");
  2016. NewBlock->setParent(BlockPtr->getParent());
  2017. SmallVector<VPBlockBase *> Succs(BlockPtr->successors());
  2018. for (VPBlockBase *Succ : Succs) {
  2019. disconnectBlocks(BlockPtr, Succ);
  2020. connectBlocks(NewBlock, Succ);
  2021. }
  2022. connectBlocks(BlockPtr, NewBlock);
  2023. }
  2024. /// Insert disconnected VPBlockBases \p IfTrue and \p IfFalse after \p
  2025. /// BlockPtr. Add \p IfTrue and \p IfFalse as succesors of \p BlockPtr and \p
  2026. /// BlockPtr as predecessor of \p IfTrue and \p IfFalse. Propagate \p BlockPtr
  2027. /// parent to \p IfTrue and \p IfFalse. \p BlockPtr must have no successors
  2028. /// and \p IfTrue and \p IfFalse must have neither successors nor
  2029. /// predecessors.
  2030. static void insertTwoBlocksAfter(VPBlockBase *IfTrue, VPBlockBase *IfFalse,
  2031. VPBlockBase *BlockPtr) {
  2032. assert(IfTrue->getSuccessors().empty() &&
  2033. "Can't insert IfTrue with successors.");
  2034. assert(IfFalse->getSuccessors().empty() &&
  2035. "Can't insert IfFalse with successors.");
  2036. BlockPtr->setTwoSuccessors(IfTrue, IfFalse);
  2037. IfTrue->setPredecessors({BlockPtr});
  2038. IfFalse->setPredecessors({BlockPtr});
  2039. IfTrue->setParent(BlockPtr->getParent());
  2040. IfFalse->setParent(BlockPtr->getParent());
  2041. }
  2042. /// Connect VPBlockBases \p From and \p To bi-directionally. Append \p To to
  2043. /// the successors of \p From and \p From to the predecessors of \p To. Both
  2044. /// VPBlockBases must have the same parent, which can be null. Both
  2045. /// VPBlockBases can be already connected to other VPBlockBases.
  2046. static void connectBlocks(VPBlockBase *From, VPBlockBase *To) {
  2047. assert((From->getParent() == To->getParent()) &&
  2048. "Can't connect two block with different parents");
  2049. assert(From->getNumSuccessors() < 2 &&
  2050. "Blocks can't have more than two successors.");
  2051. From->appendSuccessor(To);
  2052. To->appendPredecessor(From);
  2053. }
  2054. /// Disconnect VPBlockBases \p From and \p To bi-directionally. Remove \p To
  2055. /// from the successors of \p From and \p From from the predecessors of \p To.
  2056. static void disconnectBlocks(VPBlockBase *From, VPBlockBase *To) {
  2057. assert(To && "Successor to disconnect is null.");
  2058. From->removeSuccessor(To);
  2059. To->removePredecessor(From);
  2060. }
  2061. /// Return an iterator range over \p Range which only includes \p BlockTy
  2062. /// blocks. The accesses are casted to \p BlockTy.
  2063. template <typename BlockTy, typename T>
  2064. static auto blocksOnly(const T &Range) {
  2065. // Create BaseTy with correct const-ness based on BlockTy.
  2066. using BaseTy = std::conditional_t<std::is_const<BlockTy>::value,
  2067. const VPBlockBase, VPBlockBase>;
  2068. // We need to first create an iterator range over (const) BlocktTy & instead
  2069. // of (const) BlockTy * for filter_range to work properly.
  2070. auto Mapped =
  2071. map_range(Range, [](BaseTy *Block) -> BaseTy & { return *Block; });
  2072. auto Filter = make_filter_range(
  2073. Mapped, [](BaseTy &Block) { return isa<BlockTy>(&Block); });
  2074. return map_range(Filter, [](BaseTy &Block) -> BlockTy * {
  2075. return cast<BlockTy>(&Block);
  2076. });
  2077. }
  2078. };
  2079. class VPInterleavedAccessInfo {
  2080. DenseMap<VPInstruction *, InterleaveGroup<VPInstruction> *>
  2081. InterleaveGroupMap;
  2082. /// Type for mapping of instruction based interleave groups to VPInstruction
  2083. /// interleave groups
  2084. using Old2NewTy = DenseMap<InterleaveGroup<Instruction> *,
  2085. InterleaveGroup<VPInstruction> *>;
  2086. /// Recursively \p Region and populate VPlan based interleave groups based on
  2087. /// \p IAI.
  2088. void visitRegion(VPRegionBlock *Region, Old2NewTy &Old2New,
  2089. InterleavedAccessInfo &IAI);
  2090. /// Recursively traverse \p Block and populate VPlan based interleave groups
  2091. /// based on \p IAI.
  2092. void visitBlock(VPBlockBase *Block, Old2NewTy &Old2New,
  2093. InterleavedAccessInfo &IAI);
  2094. public:
  2095. VPInterleavedAccessInfo(VPlan &Plan, InterleavedAccessInfo &IAI);
  2096. ~VPInterleavedAccessInfo() {
  2097. SmallPtrSet<InterleaveGroup<VPInstruction> *, 4> DelSet;
  2098. // Avoid releasing a pointer twice.
  2099. for (auto &I : InterleaveGroupMap)
  2100. DelSet.insert(I.second);
  2101. for (auto *Ptr : DelSet)
  2102. delete Ptr;
  2103. }
  2104. /// Get the interleave group that \p Instr belongs to.
  2105. ///
  2106. /// \returns nullptr if doesn't have such group.
  2107. InterleaveGroup<VPInstruction> *
  2108. getInterleaveGroup(VPInstruction *Instr) const {
  2109. return InterleaveGroupMap.lookup(Instr);
  2110. }
  2111. };
  2112. /// Class that maps (parts of) an existing VPlan to trees of combined
  2113. /// VPInstructions.
  2114. class VPlanSlp {
  2115. enum class OpMode { Failed, Load, Opcode };
  2116. /// A DenseMapInfo implementation for using SmallVector<VPValue *, 4> as
  2117. /// DenseMap keys.
  2118. struct BundleDenseMapInfo {
  2119. static SmallVector<VPValue *, 4> getEmptyKey() {
  2120. return {reinterpret_cast<VPValue *>(-1)};
  2121. }
  2122. static SmallVector<VPValue *, 4> getTombstoneKey() {
  2123. return {reinterpret_cast<VPValue *>(-2)};
  2124. }
  2125. static unsigned getHashValue(const SmallVector<VPValue *, 4> &V) {
  2126. return static_cast<unsigned>(hash_combine_range(V.begin(), V.end()));
  2127. }
  2128. static bool isEqual(const SmallVector<VPValue *, 4> &LHS,
  2129. const SmallVector<VPValue *, 4> &RHS) {
  2130. return LHS == RHS;
  2131. }
  2132. };
  2133. /// Mapping of values in the original VPlan to a combined VPInstruction.
  2134. DenseMap<SmallVector<VPValue *, 4>, VPInstruction *, BundleDenseMapInfo>
  2135. BundleToCombined;
  2136. VPInterleavedAccessInfo &IAI;
  2137. /// Basic block to operate on. For now, only instructions in a single BB are
  2138. /// considered.
  2139. const VPBasicBlock &BB;
  2140. /// Indicates whether we managed to combine all visited instructions or not.
  2141. bool CompletelySLP = true;
  2142. /// Width of the widest combined bundle in bits.
  2143. unsigned WidestBundleBits = 0;
  2144. using MultiNodeOpTy =
  2145. typename std::pair<VPInstruction *, SmallVector<VPValue *, 4>>;
  2146. // Input operand bundles for the current multi node. Each multi node operand
  2147. // bundle contains values not matching the multi node's opcode. They will
  2148. // be reordered in reorderMultiNodeOps, once we completed building a
  2149. // multi node.
  2150. SmallVector<MultiNodeOpTy, 4> MultiNodeOps;
  2151. /// Indicates whether we are building a multi node currently.
  2152. bool MultiNodeActive = false;
  2153. /// Check if we can vectorize Operands together.
  2154. bool areVectorizable(ArrayRef<VPValue *> Operands) const;
  2155. /// Add combined instruction \p New for the bundle \p Operands.
  2156. void addCombined(ArrayRef<VPValue *> Operands, VPInstruction *New);
  2157. /// Indicate we hit a bundle we failed to combine. Returns nullptr for now.
  2158. VPInstruction *markFailed();
  2159. /// Reorder operands in the multi node to maximize sequential memory access
  2160. /// and commutative operations.
  2161. SmallVector<MultiNodeOpTy, 4> reorderMultiNodeOps();
  2162. /// Choose the best candidate to use for the lane after \p Last. The set of
  2163. /// candidates to choose from are values with an opcode matching \p Last's
  2164. /// or loads consecutive to \p Last.
  2165. std::pair<OpMode, VPValue *> getBest(OpMode Mode, VPValue *Last,
  2166. SmallPtrSetImpl<VPValue *> &Candidates,
  2167. VPInterleavedAccessInfo &IAI);
  2168. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  2169. /// Print bundle \p Values to dbgs().
  2170. void dumpBundle(ArrayRef<VPValue *> Values);
  2171. #endif
  2172. public:
  2173. VPlanSlp(VPInterleavedAccessInfo &IAI, VPBasicBlock &BB) : IAI(IAI), BB(BB) {}
  2174. ~VPlanSlp() = default;
  2175. /// Tries to build an SLP tree rooted at \p Operands and returns a
  2176. /// VPInstruction combining \p Operands, if they can be combined.
  2177. VPInstruction *buildGraph(ArrayRef<VPValue *> Operands);
  2178. /// Return the width of the widest combined bundle in bits.
  2179. unsigned getWidestBundleBits() const { return WidestBundleBits; }
  2180. /// Return true if all visited instruction can be combined.
  2181. bool isCompletelySLP() const { return CompletelySLP; }
  2182. };
  2183. namespace vputils {
  2184. /// Returns true if only the first lane of \p Def is used.
  2185. bool onlyFirstLaneUsed(VPValue *Def);
  2186. /// Get or create a VPValue that corresponds to the expansion of \p Expr. If \p
  2187. /// Expr is a SCEVConstant or SCEVUnknown, return a VPValue wrapping the live-in
  2188. /// value. Otherwise return a VPExpandSCEVRecipe to expand \p Expr. If \p Plan's
  2189. /// pre-header already contains a recipe expanding \p Expr, return it. If not,
  2190. /// create a new one.
  2191. VPValue *getOrCreateVPValueForSCEVExpr(VPlan &Plan, const SCEV *Expr,
  2192. ScalarEvolution &SE);
  2193. /// Returns true if \p VPV is uniform after vectorization.
  2194. inline bool isUniformAfterVectorization(VPValue *VPV) {
  2195. // A value defined outside the vector region must be uniform after
  2196. // vectorization inside a vector region.
  2197. if (VPV->isDefinedOutsideVectorRegions())
  2198. return true;
  2199. VPRecipeBase *Def = VPV->getDefiningRecipe();
  2200. assert(Def && "Must have definition for value defined inside vector region");
  2201. if (auto Rep = dyn_cast<VPReplicateRecipe>(Def))
  2202. return Rep->isUniform();
  2203. return false;
  2204. }
  2205. } // end namespace vputils
  2206. } // end namespace llvm
  2207. #endif // LLVM_TRANSFORMS_VECTORIZE_VPLAN_H