VPlan.h 103 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852
  1. //===- VPlan.h - Represent A Vectorizer Plan --------------------*- C++ -*-===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. /// \file
  10. /// This file contains the declarations of the Vectorization Plan base classes:
  11. /// 1. VPBasicBlock and VPRegionBlock that inherit from a common pure virtual
  12. /// VPBlockBase, together implementing a Hierarchical CFG;
  13. /// 2. Specializations of GraphTraits that allow VPBlockBase graphs to be
  14. /// treated as proper graphs for generic algorithms;
  15. /// 3. Pure virtual VPRecipeBase serving as the base class for recipes contained
  16. /// within VPBasicBlocks;
  17. /// 4. VPInstruction, a concrete Recipe and VPUser modeling a single planned
  18. /// instruction;
  19. /// 5. The VPlan class holding a candidate for vectorization;
  20. /// 6. The VPlanPrinter class providing a way to print a plan in dot format;
  21. /// These are documented in docs/VectorizationPlan.rst.
  22. //
  23. //===----------------------------------------------------------------------===//
  24. #ifndef LLVM_TRANSFORMS_VECTORIZE_VPLAN_H
  25. #define LLVM_TRANSFORMS_VECTORIZE_VPLAN_H
  26. #include "VPlanLoopInfo.h"
  27. #include "VPlanValue.h"
  28. #include "llvm/ADT/DenseMap.h"
  29. #include "llvm/ADT/DepthFirstIterator.h"
  30. #include "llvm/ADT/GraphTraits.h"
  31. #include "llvm/ADT/Optional.h"
  32. #include "llvm/ADT/SmallBitVector.h"
  33. #include "llvm/ADT/SmallPtrSet.h"
  34. #include "llvm/ADT/SmallSet.h"
  35. #include "llvm/ADT/SmallVector.h"
  36. #include "llvm/ADT/Twine.h"
  37. #include "llvm/ADT/ilist.h"
  38. #include "llvm/ADT/ilist_node.h"
  39. #include "llvm/Analysis/VectorUtils.h"
  40. #include "llvm/IR/DebugLoc.h"
  41. #include "llvm/IR/IRBuilder.h"
  42. #include "llvm/Support/InstructionCost.h"
  43. #include <algorithm>
  44. #include <cassert>
  45. #include <cstddef>
  46. #include <map>
  47. #include <string>
  48. namespace llvm {
  49. class BasicBlock;
  50. class DominatorTree;
  51. class InductionDescriptor;
  52. class InnerLoopVectorizer;
  53. class LoopInfo;
  54. class raw_ostream;
  55. class RecurrenceDescriptor;
  56. class Value;
  57. class VPBasicBlock;
  58. class VPRegionBlock;
  59. class VPlan;
  60. class VPReplicateRecipe;
  61. class VPlanSlp;
  62. /// Returns a calculation for the total number of elements for a given \p VF.
  63. /// For fixed width vectors this value is a constant, whereas for scalable
  64. /// vectors it is an expression determined at runtime.
  65. Value *getRuntimeVF(IRBuilder<> &B, Type *Ty, ElementCount VF);
  66. /// Return a value for Step multiplied by VF.
  67. Value *createStepForVF(IRBuilder<> &B, Type *Ty, ElementCount VF, int64_t Step);
  68. /// A range of powers-of-2 vectorization factors with fixed start and
  69. /// adjustable end. The range includes start and excludes end, e.g.,:
  70. /// [1, 9) = {1, 2, 4, 8}
  71. struct VFRange {
  72. // A power of 2.
  73. const ElementCount Start;
  74. // Need not be a power of 2. If End <= Start range is empty.
  75. ElementCount End;
  76. bool isEmpty() const {
  77. return End.getKnownMinValue() <= Start.getKnownMinValue();
  78. }
  79. VFRange(const ElementCount &Start, const ElementCount &End)
  80. : Start(Start), End(End) {
  81. assert(Start.isScalable() == End.isScalable() &&
  82. "Both Start and End should have the same scalable flag");
  83. assert(isPowerOf2_32(Start.getKnownMinValue()) &&
  84. "Expected Start to be a power of 2");
  85. }
  86. };
  87. using VPlanPtr = std::unique_ptr<VPlan>;
  88. /// In what follows, the term "input IR" refers to code that is fed into the
  89. /// vectorizer whereas the term "output IR" refers to code that is generated by
  90. /// the vectorizer.
  91. /// VPLane provides a way to access lanes in both fixed width and scalable
  92. /// vectors, where for the latter the lane index sometimes needs calculating
  93. /// as a runtime expression.
  94. class VPLane {
  95. public:
  96. /// Kind describes how to interpret Lane.
  97. enum class Kind : uint8_t {
  98. /// For First, Lane is the index into the first N elements of a
  99. /// fixed-vector <N x <ElTy>> or a scalable vector <vscale x N x <ElTy>>.
  100. First,
  101. /// For ScalableLast, Lane is the offset from the start of the last
  102. /// N-element subvector in a scalable vector <vscale x N x <ElTy>>. For
  103. /// example, a Lane of 0 corresponds to lane `(vscale - 1) * N`, a Lane of
  104. /// 1 corresponds to `((vscale - 1) * N) + 1`, etc.
  105. ScalableLast
  106. };
  107. private:
  108. /// in [0..VF)
  109. unsigned Lane;
  110. /// Indicates how the Lane should be interpreted, as described above.
  111. Kind LaneKind;
  112. public:
  113. VPLane(unsigned Lane, Kind LaneKind) : Lane(Lane), LaneKind(LaneKind) {}
  114. static VPLane getFirstLane() { return VPLane(0, VPLane::Kind::First); }
  115. static VPLane getLastLaneForVF(const ElementCount &VF) {
  116. unsigned LaneOffset = VF.getKnownMinValue() - 1;
  117. Kind LaneKind;
  118. if (VF.isScalable())
  119. // In this case 'LaneOffset' refers to the offset from the start of the
  120. // last subvector with VF.getKnownMinValue() elements.
  121. LaneKind = VPLane::Kind::ScalableLast;
  122. else
  123. LaneKind = VPLane::Kind::First;
  124. return VPLane(LaneOffset, LaneKind);
  125. }
  126. /// Returns a compile-time known value for the lane index and asserts if the
  127. /// lane can only be calculated at runtime.
  128. unsigned getKnownLane() const {
  129. assert(LaneKind == Kind::First);
  130. return Lane;
  131. }
  132. /// Returns an expression describing the lane index that can be used at
  133. /// runtime.
  134. Value *getAsRuntimeExpr(IRBuilder<> &Builder, const ElementCount &VF) const;
  135. /// Returns the Kind of lane offset.
  136. Kind getKind() const { return LaneKind; }
  137. /// Returns true if this is the first lane of the whole vector.
  138. bool isFirstLane() const { return Lane == 0 && LaneKind == Kind::First; }
  139. /// Maps the lane to a cache index based on \p VF.
  140. unsigned mapToCacheIndex(const ElementCount &VF) const {
  141. switch (LaneKind) {
  142. case VPLane::Kind::ScalableLast:
  143. assert(VF.isScalable() && Lane < VF.getKnownMinValue());
  144. return VF.getKnownMinValue() + Lane;
  145. default:
  146. assert(Lane < VF.getKnownMinValue());
  147. return Lane;
  148. }
  149. }
  150. /// Returns the maxmimum number of lanes that we are able to consider
  151. /// caching for \p VF.
  152. static unsigned getNumCachedLanes(const ElementCount &VF) {
  153. return VF.getKnownMinValue() * (VF.isScalable() ? 2 : 1);
  154. }
  155. };
  156. /// VPIteration represents a single point in the iteration space of the output
  157. /// (vectorized and/or unrolled) IR loop.
  158. struct VPIteration {
  159. /// in [0..UF)
  160. unsigned Part;
  161. VPLane Lane;
  162. VPIteration(unsigned Part, unsigned Lane,
  163. VPLane::Kind Kind = VPLane::Kind::First)
  164. : Part(Part), Lane(Lane, Kind) {}
  165. VPIteration(unsigned Part, const VPLane &Lane) : Part(Part), Lane(Lane) {}
  166. bool isFirstIteration() const { return Part == 0 && Lane.isFirstLane(); }
  167. };
  168. /// VPTransformState holds information passed down when "executing" a VPlan,
  169. /// needed for generating the output IR.
  170. struct VPTransformState {
  171. VPTransformState(ElementCount VF, unsigned UF, LoopInfo *LI,
  172. DominatorTree *DT, IRBuilder<> &Builder,
  173. InnerLoopVectorizer *ILV, VPlan *Plan)
  174. : VF(VF), UF(UF), LI(LI), DT(DT), Builder(Builder), ILV(ILV), Plan(Plan) {
  175. }
  176. /// The chosen Vectorization and Unroll Factors of the loop being vectorized.
  177. ElementCount VF;
  178. unsigned UF;
  179. /// Hold the indices to generate specific scalar instructions. Null indicates
  180. /// that all instances are to be generated, using either scalar or vector
  181. /// instructions.
  182. Optional<VPIteration> Instance;
  183. struct DataState {
  184. /// A type for vectorized values in the new loop. Each value from the
  185. /// original loop, when vectorized, is represented by UF vector values in
  186. /// the new unrolled loop, where UF is the unroll factor.
  187. typedef SmallVector<Value *, 2> PerPartValuesTy;
  188. DenseMap<VPValue *, PerPartValuesTy> PerPartOutput;
  189. using ScalarsPerPartValuesTy = SmallVector<SmallVector<Value *, 4>, 2>;
  190. DenseMap<VPValue *, ScalarsPerPartValuesTy> PerPartScalars;
  191. } Data;
  192. /// Get the generated Value for a given VPValue and a given Part. Note that
  193. /// as some Defs are still created by ILV and managed in its ValueMap, this
  194. /// method will delegate the call to ILV in such cases in order to provide
  195. /// callers a consistent API.
  196. /// \see set.
  197. Value *get(VPValue *Def, unsigned Part);
  198. /// Get the generated Value for a given VPValue and given Part and Lane.
  199. Value *get(VPValue *Def, const VPIteration &Instance);
  200. bool hasVectorValue(VPValue *Def, unsigned Part) {
  201. auto I = Data.PerPartOutput.find(Def);
  202. return I != Data.PerPartOutput.end() && Part < I->second.size() &&
  203. I->second[Part];
  204. }
  205. bool hasAnyVectorValue(VPValue *Def) const {
  206. return Data.PerPartOutput.find(Def) != Data.PerPartOutput.end();
  207. }
  208. bool hasScalarValue(VPValue *Def, VPIteration Instance) {
  209. auto I = Data.PerPartScalars.find(Def);
  210. if (I == Data.PerPartScalars.end())
  211. return false;
  212. unsigned CacheIdx = Instance.Lane.mapToCacheIndex(VF);
  213. return Instance.Part < I->second.size() &&
  214. CacheIdx < I->second[Instance.Part].size() &&
  215. I->second[Instance.Part][CacheIdx];
  216. }
  217. /// Set the generated Value for a given VPValue and a given Part.
  218. void set(VPValue *Def, Value *V, unsigned Part) {
  219. if (!Data.PerPartOutput.count(Def)) {
  220. DataState::PerPartValuesTy Entry(UF);
  221. Data.PerPartOutput[Def] = Entry;
  222. }
  223. Data.PerPartOutput[Def][Part] = V;
  224. }
  225. /// Reset an existing vector value for \p Def and a given \p Part.
  226. void reset(VPValue *Def, Value *V, unsigned Part) {
  227. auto Iter = Data.PerPartOutput.find(Def);
  228. assert(Iter != Data.PerPartOutput.end() &&
  229. "need to overwrite existing value");
  230. Iter->second[Part] = V;
  231. }
  232. /// Set the generated scalar \p V for \p Def and the given \p Instance.
  233. void set(VPValue *Def, Value *V, const VPIteration &Instance) {
  234. auto Iter = Data.PerPartScalars.insert({Def, {}});
  235. auto &PerPartVec = Iter.first->second;
  236. while (PerPartVec.size() <= Instance.Part)
  237. PerPartVec.emplace_back();
  238. auto &Scalars = PerPartVec[Instance.Part];
  239. unsigned CacheIdx = Instance.Lane.mapToCacheIndex(VF);
  240. while (Scalars.size() <= CacheIdx)
  241. Scalars.push_back(nullptr);
  242. assert(!Scalars[CacheIdx] && "should overwrite existing value");
  243. Scalars[CacheIdx] = V;
  244. }
  245. /// Reset an existing scalar value for \p Def and a given \p Instance.
  246. void reset(VPValue *Def, Value *V, const VPIteration &Instance) {
  247. auto Iter = Data.PerPartScalars.find(Def);
  248. assert(Iter != Data.PerPartScalars.end() &&
  249. "need to overwrite existing value");
  250. assert(Instance.Part < Iter->second.size() &&
  251. "need to overwrite existing value");
  252. unsigned CacheIdx = Instance.Lane.mapToCacheIndex(VF);
  253. assert(CacheIdx < Iter->second[Instance.Part].size() &&
  254. "need to overwrite existing value");
  255. Iter->second[Instance.Part][CacheIdx] = V;
  256. }
  257. /// Hold state information used when constructing the CFG of the output IR,
  258. /// traversing the VPBasicBlocks and generating corresponding IR BasicBlocks.
  259. struct CFGState {
  260. /// The previous VPBasicBlock visited. Initially set to null.
  261. VPBasicBlock *PrevVPBB = nullptr;
  262. /// The previous IR BasicBlock created or used. Initially set to the new
  263. /// header BasicBlock.
  264. BasicBlock *PrevBB = nullptr;
  265. /// The last IR BasicBlock in the output IR. Set to the new latch
  266. /// BasicBlock, used for placing the newly created BasicBlocks.
  267. BasicBlock *LastBB = nullptr;
  268. /// The IR BasicBlock that is the preheader of the vector loop in the output
  269. /// IR.
  270. /// FIXME: The vector preheader should also be modeled in VPlan, so any code
  271. /// that needs to be added to the preheader gets directly generated by
  272. /// VPlan. There should be no need to manage a pointer to the IR BasicBlock.
  273. BasicBlock *VectorPreHeader = nullptr;
  274. /// A mapping of each VPBasicBlock to the corresponding BasicBlock. In case
  275. /// of replication, maps the BasicBlock of the last replica created.
  276. SmallDenseMap<VPBasicBlock *, BasicBlock *> VPBB2IRBB;
  277. /// Vector of VPBasicBlocks whose terminator instruction needs to be fixed
  278. /// up at the end of vector code generation.
  279. SmallVector<VPBasicBlock *, 8> VPBBsToFix;
  280. CFGState() = default;
  281. } CFG;
  282. /// Hold a pointer to LoopInfo to register new basic blocks in the loop.
  283. LoopInfo *LI;
  284. /// Hold a pointer to Dominator Tree to register new basic blocks in the loop.
  285. DominatorTree *DT;
  286. /// Hold a reference to the IRBuilder used to generate output IR code.
  287. IRBuilder<> &Builder;
  288. VPValue2ValueTy VPValue2Value;
  289. /// Hold the canonical scalar IV of the vector loop (start=0, step=VF*UF).
  290. Value *CanonicalIV = nullptr;
  291. /// Hold a pointer to InnerLoopVectorizer to reuse its IR generation methods.
  292. InnerLoopVectorizer *ILV;
  293. /// Pointer to the VPlan code is generated for.
  294. VPlan *Plan;
  295. /// Holds recipes that may generate a poison value that is used after
  296. /// vectorization, even when their operands are not poison.
  297. SmallPtrSet<VPRecipeBase *, 16> MayGeneratePoisonRecipes;
  298. };
  299. /// VPUsers instance used by VPBlockBase to manage CondBit and the block
  300. /// predicate. Currently VPBlockUsers are used in VPBlockBase for historical
  301. /// reasons, but in the future the only VPUsers should either be recipes or
  302. /// live-outs.VPBlockBase uses.
  303. struct VPBlockUser : public VPUser {
  304. VPBlockUser() : VPUser({}, VPUserID::Block) {}
  305. VPValue *getSingleOperandOrNull() {
  306. if (getNumOperands() == 1)
  307. return getOperand(0);
  308. return nullptr;
  309. }
  310. const VPValue *getSingleOperandOrNull() const {
  311. if (getNumOperands() == 1)
  312. return getOperand(0);
  313. return nullptr;
  314. }
  315. void resetSingleOpUser(VPValue *NewVal) {
  316. assert(getNumOperands() <= 1 && "Didn't expect more than one operand!");
  317. if (!NewVal) {
  318. if (getNumOperands() == 1)
  319. removeLastOperand();
  320. return;
  321. }
  322. if (getNumOperands() == 1)
  323. setOperand(0, NewVal);
  324. else
  325. addOperand(NewVal);
  326. }
  327. };
  328. /// VPBlockBase is the building block of the Hierarchical Control-Flow Graph.
  329. /// A VPBlockBase can be either a VPBasicBlock or a VPRegionBlock.
  330. class VPBlockBase {
  331. friend class VPBlockUtils;
  332. const unsigned char SubclassID; ///< Subclass identifier (for isa/dyn_cast).
  333. /// An optional name for the block.
  334. std::string Name;
  335. /// The immediate VPRegionBlock which this VPBlockBase belongs to, or null if
  336. /// it is a topmost VPBlockBase.
  337. VPRegionBlock *Parent = nullptr;
  338. /// List of predecessor blocks.
  339. SmallVector<VPBlockBase *, 1> Predecessors;
  340. /// List of successor blocks.
  341. SmallVector<VPBlockBase *, 1> Successors;
  342. /// Successor selector managed by a VPUser. For blocks with zero or one
  343. /// successors, there is no operand. Otherwise there is exactly one operand
  344. /// which is the branch condition.
  345. VPBlockUser CondBitUser;
  346. /// If the block is predicated, its predicate is stored as an operand of this
  347. /// VPUser to maintain the def-use relations. Otherwise there is no operand
  348. /// here.
  349. VPBlockUser PredicateUser;
  350. /// VPlan containing the block. Can only be set on the entry block of the
  351. /// plan.
  352. VPlan *Plan = nullptr;
  353. /// Add \p Successor as the last successor to this block.
  354. void appendSuccessor(VPBlockBase *Successor) {
  355. assert(Successor && "Cannot add nullptr successor!");
  356. Successors.push_back(Successor);
  357. }
  358. /// Add \p Predecessor as the last predecessor to this block.
  359. void appendPredecessor(VPBlockBase *Predecessor) {
  360. assert(Predecessor && "Cannot add nullptr predecessor!");
  361. Predecessors.push_back(Predecessor);
  362. }
  363. /// Remove \p Predecessor from the predecessors of this block.
  364. void removePredecessor(VPBlockBase *Predecessor) {
  365. auto Pos = find(Predecessors, Predecessor);
  366. assert(Pos && "Predecessor does not exist");
  367. Predecessors.erase(Pos);
  368. }
  369. /// Remove \p Successor from the successors of this block.
  370. void removeSuccessor(VPBlockBase *Successor) {
  371. auto Pos = find(Successors, Successor);
  372. assert(Pos && "Successor does not exist");
  373. Successors.erase(Pos);
  374. }
  375. protected:
  376. VPBlockBase(const unsigned char SC, const std::string &N)
  377. : SubclassID(SC), Name(N) {}
  378. public:
  379. /// An enumeration for keeping track of the concrete subclass of VPBlockBase
  380. /// that are actually instantiated. Values of this enumeration are kept in the
  381. /// SubclassID field of the VPBlockBase objects. They are used for concrete
  382. /// type identification.
  383. using VPBlockTy = enum { VPBasicBlockSC, VPRegionBlockSC };
  384. using VPBlocksTy = SmallVectorImpl<VPBlockBase *>;
  385. virtual ~VPBlockBase() = default;
  386. const std::string &getName() const { return Name; }
  387. void setName(const Twine &newName) { Name = newName.str(); }
  388. /// \return an ID for the concrete type of this object.
  389. /// This is used to implement the classof checks. This should not be used
  390. /// for any other purpose, as the values may change as LLVM evolves.
  391. unsigned getVPBlockID() const { return SubclassID; }
  392. VPRegionBlock *getParent() { return Parent; }
  393. const VPRegionBlock *getParent() const { return Parent; }
  394. /// \return A pointer to the plan containing the current block.
  395. VPlan *getPlan();
  396. const VPlan *getPlan() const;
  397. /// Sets the pointer of the plan containing the block. The block must be the
  398. /// entry block into the VPlan.
  399. void setPlan(VPlan *ParentPlan);
  400. void setParent(VPRegionBlock *P) { Parent = P; }
  401. /// \return the VPBasicBlock that is the entry of this VPBlockBase,
  402. /// recursively, if the latter is a VPRegionBlock. Otherwise, if this
  403. /// VPBlockBase is a VPBasicBlock, it is returned.
  404. const VPBasicBlock *getEntryBasicBlock() const;
  405. VPBasicBlock *getEntryBasicBlock();
  406. /// \return the VPBasicBlock that is the exit of this VPBlockBase,
  407. /// recursively, if the latter is a VPRegionBlock. Otherwise, if this
  408. /// VPBlockBase is a VPBasicBlock, it is returned.
  409. const VPBasicBlock *getExitBasicBlock() const;
  410. VPBasicBlock *getExitBasicBlock();
  411. const VPBlocksTy &getSuccessors() const { return Successors; }
  412. VPBlocksTy &getSuccessors() { return Successors; }
  413. iterator_range<VPBlockBase **> successors() { return Successors; }
  414. const VPBlocksTy &getPredecessors() const { return Predecessors; }
  415. VPBlocksTy &getPredecessors() { return Predecessors; }
  416. /// \return the successor of this VPBlockBase if it has a single successor.
  417. /// Otherwise return a null pointer.
  418. VPBlockBase *getSingleSuccessor() const {
  419. return (Successors.size() == 1 ? *Successors.begin() : nullptr);
  420. }
  421. /// \return the predecessor of this VPBlockBase if it has a single
  422. /// predecessor. Otherwise return a null pointer.
  423. VPBlockBase *getSinglePredecessor() const {
  424. return (Predecessors.size() == 1 ? *Predecessors.begin() : nullptr);
  425. }
  426. size_t getNumSuccessors() const { return Successors.size(); }
  427. size_t getNumPredecessors() const { return Predecessors.size(); }
  428. /// An Enclosing Block of a block B is any block containing B, including B
  429. /// itself. \return the closest enclosing block starting from "this", which
  430. /// has successors. \return the root enclosing block if all enclosing blocks
  431. /// have no successors.
  432. VPBlockBase *getEnclosingBlockWithSuccessors();
  433. /// \return the closest enclosing block starting from "this", which has
  434. /// predecessors. \return the root enclosing block if all enclosing blocks
  435. /// have no predecessors.
  436. VPBlockBase *getEnclosingBlockWithPredecessors();
  437. /// \return the successors either attached directly to this VPBlockBase or, if
  438. /// this VPBlockBase is the exit block of a VPRegionBlock and has no
  439. /// successors of its own, search recursively for the first enclosing
  440. /// VPRegionBlock that has successors and return them. If no such
  441. /// VPRegionBlock exists, return the (empty) successors of the topmost
  442. /// VPBlockBase reached.
  443. const VPBlocksTy &getHierarchicalSuccessors() {
  444. return getEnclosingBlockWithSuccessors()->getSuccessors();
  445. }
  446. /// \return the hierarchical successor of this VPBlockBase if it has a single
  447. /// hierarchical successor. Otherwise return a null pointer.
  448. VPBlockBase *getSingleHierarchicalSuccessor() {
  449. return getEnclosingBlockWithSuccessors()->getSingleSuccessor();
  450. }
  451. /// \return the predecessors either attached directly to this VPBlockBase or,
  452. /// if this VPBlockBase is the entry block of a VPRegionBlock and has no
  453. /// predecessors of its own, search recursively for the first enclosing
  454. /// VPRegionBlock that has predecessors and return them. If no such
  455. /// VPRegionBlock exists, return the (empty) predecessors of the topmost
  456. /// VPBlockBase reached.
  457. const VPBlocksTy &getHierarchicalPredecessors() {
  458. return getEnclosingBlockWithPredecessors()->getPredecessors();
  459. }
  460. /// \return the hierarchical predecessor of this VPBlockBase if it has a
  461. /// single hierarchical predecessor. Otherwise return a null pointer.
  462. VPBlockBase *getSingleHierarchicalPredecessor() {
  463. return getEnclosingBlockWithPredecessors()->getSinglePredecessor();
  464. }
  465. /// \return the condition bit selecting the successor.
  466. VPValue *getCondBit();
  467. /// \return the condition bit selecting the successor.
  468. const VPValue *getCondBit() const;
  469. /// Set the condition bit selecting the successor.
  470. void setCondBit(VPValue *CV);
  471. /// \return the block's predicate.
  472. VPValue *getPredicate();
  473. /// \return the block's predicate.
  474. const VPValue *getPredicate() const;
  475. /// Set the block's predicate.
  476. void setPredicate(VPValue *Pred);
  477. /// Set a given VPBlockBase \p Successor as the single successor of this
  478. /// VPBlockBase. This VPBlockBase is not added as predecessor of \p Successor.
  479. /// This VPBlockBase must have no successors.
  480. void setOneSuccessor(VPBlockBase *Successor) {
  481. assert(Successors.empty() && "Setting one successor when others exist.");
  482. appendSuccessor(Successor);
  483. }
  484. /// Set two given VPBlockBases \p IfTrue and \p IfFalse to be the two
  485. /// successors of this VPBlockBase. \p Condition is set as the successor
  486. /// selector. This VPBlockBase is not added as predecessor of \p IfTrue or \p
  487. /// IfFalse. This VPBlockBase must have no successors.
  488. void setTwoSuccessors(VPBlockBase *IfTrue, VPBlockBase *IfFalse,
  489. VPValue *Condition) {
  490. assert(Successors.empty() && "Setting two successors when others exist.");
  491. assert(Condition && "Setting two successors without condition!");
  492. setCondBit(Condition);
  493. appendSuccessor(IfTrue);
  494. appendSuccessor(IfFalse);
  495. }
  496. /// Set each VPBasicBlock in \p NewPreds as predecessor of this VPBlockBase.
  497. /// This VPBlockBase must have no predecessors. This VPBlockBase is not added
  498. /// as successor of any VPBasicBlock in \p NewPreds.
  499. void setPredecessors(ArrayRef<VPBlockBase *> NewPreds) {
  500. assert(Predecessors.empty() && "Block predecessors already set.");
  501. for (auto *Pred : NewPreds)
  502. appendPredecessor(Pred);
  503. }
  504. /// Remove all the predecessor of this block.
  505. void clearPredecessors() { Predecessors.clear(); }
  506. /// Remove all the successors of this block and set to null its condition bit
  507. void clearSuccessors() {
  508. Successors.clear();
  509. setCondBit(nullptr);
  510. }
  511. /// The method which generates the output IR that correspond to this
  512. /// VPBlockBase, thereby "executing" the VPlan.
  513. virtual void execute(struct VPTransformState *State) = 0;
  514. /// Delete all blocks reachable from a given VPBlockBase, inclusive.
  515. static void deleteCFG(VPBlockBase *Entry);
  516. /// Return true if it is legal to hoist instructions into this block.
  517. bool isLegalToHoistInto() {
  518. // There are currently no constraints that prevent an instruction to be
  519. // hoisted into a VPBlockBase.
  520. return true;
  521. }
  522. /// Replace all operands of VPUsers in the block with \p NewValue and also
  523. /// replaces all uses of VPValues defined in the block with NewValue.
  524. virtual void dropAllReferences(VPValue *NewValue) = 0;
  525. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  526. void printAsOperand(raw_ostream &OS, bool PrintType) const {
  527. OS << getName();
  528. }
  529. /// Print plain-text dump of this VPBlockBase to \p O, prefixing all lines
  530. /// with \p Indent. \p SlotTracker is used to print unnamed VPValue's using
  531. /// consequtive numbers.
  532. ///
  533. /// Note that the numbering is applied to the whole VPlan, so printing
  534. /// individual blocks is consistent with the whole VPlan printing.
  535. virtual void print(raw_ostream &O, const Twine &Indent,
  536. VPSlotTracker &SlotTracker) const = 0;
  537. /// Print plain-text dump of this VPlan to \p O.
  538. void print(raw_ostream &O) const {
  539. VPSlotTracker SlotTracker(getPlan());
  540. print(O, "", SlotTracker);
  541. }
  542. /// Print the successors of this block to \p O, prefixing all lines with \p
  543. /// Indent.
  544. void printSuccessors(raw_ostream &O, const Twine &Indent) const;
  545. /// Dump this VPBlockBase to dbgs().
  546. LLVM_DUMP_METHOD void dump() const { print(dbgs()); }
  547. #endif
  548. };
  549. /// VPRecipeBase is a base class modeling a sequence of one or more output IR
  550. /// instructions. VPRecipeBase owns the the VPValues it defines through VPDef
  551. /// and is responsible for deleting its defined values. Single-value
  552. /// VPRecipeBases that also inherit from VPValue must make sure to inherit from
  553. /// VPRecipeBase before VPValue.
  554. class VPRecipeBase : public ilist_node_with_parent<VPRecipeBase, VPBasicBlock>,
  555. public VPDef,
  556. public VPUser {
  557. friend VPBasicBlock;
  558. friend class VPBlockUtils;
  559. /// Each VPRecipe belongs to a single VPBasicBlock.
  560. VPBasicBlock *Parent = nullptr;
  561. public:
  562. VPRecipeBase(const unsigned char SC, ArrayRef<VPValue *> Operands)
  563. : VPDef(SC), VPUser(Operands, VPUser::VPUserID::Recipe) {}
  564. template <typename IterT>
  565. VPRecipeBase(const unsigned char SC, iterator_range<IterT> Operands)
  566. : VPDef(SC), VPUser(Operands, VPUser::VPUserID::Recipe) {}
  567. virtual ~VPRecipeBase() = default;
  568. /// \return the VPBasicBlock which this VPRecipe belongs to.
  569. VPBasicBlock *getParent() { return Parent; }
  570. const VPBasicBlock *getParent() const { return Parent; }
  571. /// The method which generates the output IR instructions that correspond to
  572. /// this VPRecipe, thereby "executing" the VPlan.
  573. virtual void execute(struct VPTransformState &State) = 0;
  574. /// Insert an unlinked recipe into a basic block immediately before
  575. /// the specified recipe.
  576. void insertBefore(VPRecipeBase *InsertPos);
  577. /// Insert an unlinked Recipe into a basic block immediately after
  578. /// the specified Recipe.
  579. void insertAfter(VPRecipeBase *InsertPos);
  580. /// Unlink this recipe from its current VPBasicBlock and insert it into
  581. /// the VPBasicBlock that MovePos lives in, right after MovePos.
  582. void moveAfter(VPRecipeBase *MovePos);
  583. /// Unlink this recipe and insert into BB before I.
  584. ///
  585. /// \pre I is a valid iterator into BB.
  586. void moveBefore(VPBasicBlock &BB, iplist<VPRecipeBase>::iterator I);
  587. /// This method unlinks 'this' from the containing basic block, but does not
  588. /// delete it.
  589. void removeFromParent();
  590. /// This method unlinks 'this' from the containing basic block and deletes it.
  591. ///
  592. /// \returns an iterator pointing to the element after the erased one
  593. iplist<VPRecipeBase>::iterator eraseFromParent();
  594. /// Returns the underlying instruction, if the recipe is a VPValue or nullptr
  595. /// otherwise.
  596. Instruction *getUnderlyingInstr() {
  597. return cast<Instruction>(getVPSingleValue()->getUnderlyingValue());
  598. }
  599. const Instruction *getUnderlyingInstr() const {
  600. return cast<Instruction>(getVPSingleValue()->getUnderlyingValue());
  601. }
  602. /// Method to support type inquiry through isa, cast, and dyn_cast.
  603. static inline bool classof(const VPDef *D) {
  604. // All VPDefs are also VPRecipeBases.
  605. return true;
  606. }
  607. static inline bool classof(const VPUser *U) {
  608. return U->getVPUserID() == VPUser::VPUserID::Recipe;
  609. }
  610. /// Returns true if the recipe may have side-effects.
  611. bool mayHaveSideEffects() const;
  612. /// Returns true for PHI-like recipes.
  613. bool isPhi() const {
  614. return getVPDefID() >= VPFirstPHISC && getVPDefID() <= VPLastPHISC;
  615. }
  616. /// Returns true if the recipe may read from memory.
  617. bool mayReadFromMemory() const;
  618. /// Returns true if the recipe may write to memory.
  619. bool mayWriteToMemory() const;
  620. /// Returns true if the recipe may read from or write to memory.
  621. bool mayReadOrWriteMemory() const {
  622. return mayReadFromMemory() || mayWriteToMemory();
  623. }
  624. /// Returns true if the recipe only uses the first lane of operand \p Op.
  625. /// Conservatively returns false.
  626. virtual bool onlyFirstLaneUsed(const VPValue *Op) const {
  627. assert(is_contained(operands(), Op) &&
  628. "Op must be an operand of the recipe");
  629. return false;
  630. }
  631. };
  632. inline bool VPUser::classof(const VPDef *Def) {
  633. return Def->getVPDefID() == VPRecipeBase::VPInstructionSC ||
  634. Def->getVPDefID() == VPRecipeBase::VPWidenSC ||
  635. Def->getVPDefID() == VPRecipeBase::VPWidenCallSC ||
  636. Def->getVPDefID() == VPRecipeBase::VPWidenSelectSC ||
  637. Def->getVPDefID() == VPRecipeBase::VPWidenGEPSC ||
  638. Def->getVPDefID() == VPRecipeBase::VPBlendSC ||
  639. Def->getVPDefID() == VPRecipeBase::VPInterleaveSC ||
  640. Def->getVPDefID() == VPRecipeBase::VPReplicateSC ||
  641. Def->getVPDefID() == VPRecipeBase::VPReductionSC ||
  642. Def->getVPDefID() == VPRecipeBase::VPBranchOnMaskSC ||
  643. Def->getVPDefID() == VPRecipeBase::VPWidenMemoryInstructionSC;
  644. }
  645. /// This is a concrete Recipe that models a single VPlan-level instruction.
  646. /// While as any Recipe it may generate a sequence of IR instructions when
  647. /// executed, these instructions would always form a single-def expression as
  648. /// the VPInstruction is also a single def-use vertex.
  649. class VPInstruction : public VPRecipeBase, public VPValue {
  650. friend class VPlanSlp;
  651. public:
  652. /// VPlan opcodes, extending LLVM IR with idiomatics instructions.
  653. enum {
  654. FirstOrderRecurrenceSplice =
  655. Instruction::OtherOpsEnd + 1, // Combines the incoming and previous
  656. // values of a first-order recurrence.
  657. Not,
  658. ICmpULE,
  659. SLPLoad,
  660. SLPStore,
  661. ActiveLaneMask,
  662. CanonicalIVIncrement,
  663. CanonicalIVIncrementNUW,
  664. BranchOnCount,
  665. };
  666. private:
  667. typedef unsigned char OpcodeTy;
  668. OpcodeTy Opcode;
  669. FastMathFlags FMF;
  670. DebugLoc DL;
  671. /// Utility method serving execute(): generates a single instance of the
  672. /// modeled instruction.
  673. void generateInstruction(VPTransformState &State, unsigned Part);
  674. protected:
  675. void setUnderlyingInstr(Instruction *I) { setUnderlyingValue(I); }
  676. public:
  677. VPInstruction(unsigned Opcode, ArrayRef<VPValue *> Operands, DebugLoc DL)
  678. : VPRecipeBase(VPRecipeBase::VPInstructionSC, Operands),
  679. VPValue(VPValue::VPVInstructionSC, nullptr, this), Opcode(Opcode),
  680. DL(DL) {}
  681. VPInstruction(unsigned Opcode, std::initializer_list<VPValue *> Operands,
  682. DebugLoc DL = {})
  683. : VPInstruction(Opcode, ArrayRef<VPValue *>(Operands), DL) {}
  684. /// Method to support type inquiry through isa, cast, and dyn_cast.
  685. static inline bool classof(const VPValue *V) {
  686. return V->getVPValueID() == VPValue::VPVInstructionSC;
  687. }
  688. VPInstruction *clone() const {
  689. SmallVector<VPValue *, 2> Operands(operands());
  690. return new VPInstruction(Opcode, Operands, DL);
  691. }
  692. /// Method to support type inquiry through isa, cast, and dyn_cast.
  693. static inline bool classof(const VPDef *R) {
  694. return R->getVPDefID() == VPRecipeBase::VPInstructionSC;
  695. }
  696. /// Extra classof implementations to allow directly casting from VPUser ->
  697. /// VPInstruction.
  698. static inline bool classof(const VPUser *U) {
  699. auto *R = dyn_cast<VPRecipeBase>(U);
  700. return R && R->getVPDefID() == VPRecipeBase::VPInstructionSC;
  701. }
  702. static inline bool classof(const VPRecipeBase *R) {
  703. return R->getVPDefID() == VPRecipeBase::VPInstructionSC;
  704. }
  705. unsigned getOpcode() const { return Opcode; }
  706. /// Generate the instruction.
  707. /// TODO: We currently execute only per-part unless a specific instance is
  708. /// provided.
  709. void execute(VPTransformState &State) override;
  710. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  711. /// Print the VPInstruction to \p O.
  712. void print(raw_ostream &O, const Twine &Indent,
  713. VPSlotTracker &SlotTracker) const override;
  714. /// Print the VPInstruction to dbgs() (for debugging).
  715. LLVM_DUMP_METHOD void dump() const;
  716. #endif
  717. /// Return true if this instruction may modify memory.
  718. bool mayWriteToMemory() const {
  719. // TODO: we can use attributes of the called function to rule out memory
  720. // modifications.
  721. return Opcode == Instruction::Store || Opcode == Instruction::Call ||
  722. Opcode == Instruction::Invoke || Opcode == SLPStore;
  723. }
  724. bool hasResult() const {
  725. // CallInst may or may not have a result, depending on the called function.
  726. // Conservatively return calls have results for now.
  727. switch (getOpcode()) {
  728. case Instruction::Ret:
  729. case Instruction::Br:
  730. case Instruction::Store:
  731. case Instruction::Switch:
  732. case Instruction::IndirectBr:
  733. case Instruction::Resume:
  734. case Instruction::CatchRet:
  735. case Instruction::Unreachable:
  736. case Instruction::Fence:
  737. case Instruction::AtomicRMW:
  738. case VPInstruction::BranchOnCount:
  739. return false;
  740. default:
  741. return true;
  742. }
  743. }
  744. /// Set the fast-math flags.
  745. void setFastMathFlags(FastMathFlags FMFNew);
  746. /// Returns true if the recipe only uses the first lane of operand \p Op.
  747. bool onlyFirstLaneUsed(const VPValue *Op) const override {
  748. assert(is_contained(operands(), Op) &&
  749. "Op must be an operand of the recipe");
  750. if (getOperand(0) != Op)
  751. return false;
  752. switch (getOpcode()) {
  753. default:
  754. return false;
  755. case VPInstruction::ActiveLaneMask:
  756. case VPInstruction::CanonicalIVIncrement:
  757. case VPInstruction::CanonicalIVIncrementNUW:
  758. case VPInstruction::BranchOnCount:
  759. return true;
  760. };
  761. llvm_unreachable("switch should return");
  762. }
  763. };
  764. /// VPWidenRecipe is a recipe for producing a copy of vector type its
  765. /// ingredient. This recipe covers most of the traditional vectorization cases
  766. /// where each ingredient transforms into a vectorized version of itself.
  767. class VPWidenRecipe : public VPRecipeBase, public VPValue {
  768. public:
  769. template <typename IterT>
  770. VPWidenRecipe(Instruction &I, iterator_range<IterT> Operands)
  771. : VPRecipeBase(VPRecipeBase::VPWidenSC, Operands),
  772. VPValue(VPValue::VPVWidenSC, &I, this) {}
  773. ~VPWidenRecipe() override = default;
  774. /// Method to support type inquiry through isa, cast, and dyn_cast.
  775. static inline bool classof(const VPDef *D) {
  776. return D->getVPDefID() == VPRecipeBase::VPWidenSC;
  777. }
  778. static inline bool classof(const VPValue *V) {
  779. return V->getVPValueID() == VPValue::VPVWidenSC;
  780. }
  781. /// Produce widened copies of all Ingredients.
  782. void execute(VPTransformState &State) override;
  783. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  784. /// Print the recipe.
  785. void print(raw_ostream &O, const Twine &Indent,
  786. VPSlotTracker &SlotTracker) const override;
  787. #endif
  788. };
  789. /// A recipe for widening Call instructions.
  790. class VPWidenCallRecipe : public VPRecipeBase, public VPValue {
  791. public:
  792. template <typename IterT>
  793. VPWidenCallRecipe(CallInst &I, iterator_range<IterT> CallArguments)
  794. : VPRecipeBase(VPRecipeBase::VPWidenCallSC, CallArguments),
  795. VPValue(VPValue::VPVWidenCallSC, &I, this) {}
  796. ~VPWidenCallRecipe() override = default;
  797. /// Method to support type inquiry through isa, cast, and dyn_cast.
  798. static inline bool classof(const VPDef *D) {
  799. return D->getVPDefID() == VPRecipeBase::VPWidenCallSC;
  800. }
  801. /// Produce a widened version of the call instruction.
  802. void execute(VPTransformState &State) override;
  803. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  804. /// Print the recipe.
  805. void print(raw_ostream &O, const Twine &Indent,
  806. VPSlotTracker &SlotTracker) const override;
  807. #endif
  808. };
  809. /// A recipe for widening select instructions.
  810. class VPWidenSelectRecipe : public VPRecipeBase, public VPValue {
  811. /// Is the condition of the select loop invariant?
  812. bool InvariantCond;
  813. public:
  814. template <typename IterT>
  815. VPWidenSelectRecipe(SelectInst &I, iterator_range<IterT> Operands,
  816. bool InvariantCond)
  817. : VPRecipeBase(VPRecipeBase::VPWidenSelectSC, Operands),
  818. VPValue(VPValue::VPVWidenSelectSC, &I, this),
  819. InvariantCond(InvariantCond) {}
  820. ~VPWidenSelectRecipe() override = default;
  821. /// Method to support type inquiry through isa, cast, and dyn_cast.
  822. static inline bool classof(const VPDef *D) {
  823. return D->getVPDefID() == VPRecipeBase::VPWidenSelectSC;
  824. }
  825. /// Produce a widened version of the select instruction.
  826. void execute(VPTransformState &State) override;
  827. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  828. /// Print the recipe.
  829. void print(raw_ostream &O, const Twine &Indent,
  830. VPSlotTracker &SlotTracker) const override;
  831. #endif
  832. };
  833. /// A recipe for handling GEP instructions.
  834. class VPWidenGEPRecipe : public VPRecipeBase, public VPValue {
  835. bool IsPtrLoopInvariant;
  836. SmallBitVector IsIndexLoopInvariant;
  837. public:
  838. template <typename IterT>
  839. VPWidenGEPRecipe(GetElementPtrInst *GEP, iterator_range<IterT> Operands)
  840. : VPRecipeBase(VPRecipeBase::VPWidenGEPSC, Operands),
  841. VPValue(VPWidenGEPSC, GEP, this),
  842. IsIndexLoopInvariant(GEP->getNumIndices(), false) {}
  843. template <typename IterT>
  844. VPWidenGEPRecipe(GetElementPtrInst *GEP, iterator_range<IterT> Operands,
  845. Loop *OrigLoop)
  846. : VPRecipeBase(VPRecipeBase::VPWidenGEPSC, Operands),
  847. VPValue(VPValue::VPVWidenGEPSC, GEP, this),
  848. IsIndexLoopInvariant(GEP->getNumIndices(), false) {
  849. IsPtrLoopInvariant = OrigLoop->isLoopInvariant(GEP->getPointerOperand());
  850. for (auto Index : enumerate(GEP->indices()))
  851. IsIndexLoopInvariant[Index.index()] =
  852. OrigLoop->isLoopInvariant(Index.value().get());
  853. }
  854. ~VPWidenGEPRecipe() override = default;
  855. /// Method to support type inquiry through isa, cast, and dyn_cast.
  856. static inline bool classof(const VPDef *D) {
  857. return D->getVPDefID() == VPRecipeBase::VPWidenGEPSC;
  858. }
  859. /// Generate the gep nodes.
  860. void execute(VPTransformState &State) override;
  861. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  862. /// Print the recipe.
  863. void print(raw_ostream &O, const Twine &Indent,
  864. VPSlotTracker &SlotTracker) const override;
  865. #endif
  866. };
  867. /// A recipe for handling phi nodes of integer and floating-point inductions,
  868. /// producing their vector and scalar values.
  869. class VPWidenIntOrFpInductionRecipe : public VPRecipeBase, public VPValue {
  870. PHINode *IV;
  871. const InductionDescriptor &IndDesc;
  872. bool NeedsScalarIV;
  873. bool NeedsVectorIV;
  874. public:
  875. VPWidenIntOrFpInductionRecipe(PHINode *IV, VPValue *Start,
  876. const InductionDescriptor &IndDesc,
  877. bool NeedsScalarIV, bool NeedsVectorIV)
  878. : VPRecipeBase(VPWidenIntOrFpInductionSC, {Start}), VPValue(IV, this),
  879. IV(IV), IndDesc(IndDesc), NeedsScalarIV(NeedsScalarIV),
  880. NeedsVectorIV(NeedsVectorIV) {}
  881. VPWidenIntOrFpInductionRecipe(PHINode *IV, VPValue *Start,
  882. const InductionDescriptor &IndDesc,
  883. TruncInst *Trunc, bool NeedsScalarIV,
  884. bool NeedsVectorIV)
  885. : VPRecipeBase(VPWidenIntOrFpInductionSC, {Start}), VPValue(Trunc, this),
  886. IV(IV), IndDesc(IndDesc), NeedsScalarIV(NeedsScalarIV),
  887. NeedsVectorIV(NeedsVectorIV) {}
  888. ~VPWidenIntOrFpInductionRecipe() override = default;
  889. /// Method to support type inquiry through isa, cast, and dyn_cast.
  890. static inline bool classof(const VPDef *D) {
  891. return D->getVPDefID() == VPRecipeBase::VPWidenIntOrFpInductionSC;
  892. }
  893. /// Generate the vectorized and scalarized versions of the phi node as
  894. /// needed by their users.
  895. void execute(VPTransformState &State) override;
  896. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  897. /// Print the recipe.
  898. void print(raw_ostream &O, const Twine &Indent,
  899. VPSlotTracker &SlotTracker) const override;
  900. #endif
  901. /// Returns the start value of the induction.
  902. VPValue *getStartValue() { return getOperand(0); }
  903. const VPValue *getStartValue() const { return getOperand(0); }
  904. /// Returns the first defined value as TruncInst, if it is one or nullptr
  905. /// otherwise.
  906. TruncInst *getTruncInst() {
  907. return dyn_cast_or_null<TruncInst>(getVPValue(0)->getUnderlyingValue());
  908. }
  909. const TruncInst *getTruncInst() const {
  910. return dyn_cast_or_null<TruncInst>(getVPValue(0)->getUnderlyingValue());
  911. }
  912. /// Returns the induction descriptor for the recipe.
  913. const InductionDescriptor &getInductionDescriptor() const { return IndDesc; }
  914. /// Returns true if the induction is canonical, i.e. starting at 0 and
  915. /// incremented by UF * VF (= the original IV is incremented by 1).
  916. bool isCanonical() const;
  917. /// Returns the scalar type of the induction.
  918. const Type *getScalarType() const {
  919. const TruncInst *TruncI = getTruncInst();
  920. return TruncI ? TruncI->getType() : IV->getType();
  921. }
  922. /// Returns true if a scalar phi needs to be created for the induction.
  923. bool needsScalarIV() const { return NeedsScalarIV; }
  924. /// Returns true if a vector phi needs to be created for the induction.
  925. bool needsVectorIV() const { return NeedsVectorIV; }
  926. };
  927. /// A pure virtual base class for all recipes modeling header phis, including
  928. /// phis for first order recurrences, pointer inductions and reductions. The
  929. /// start value is the first operand of the recipe and the incoming value from
  930. /// the backedge is the second operand.
  931. class VPHeaderPHIRecipe : public VPRecipeBase, public VPValue {
  932. protected:
  933. VPHeaderPHIRecipe(unsigned char VPVID, unsigned char VPDefID, PHINode *Phi,
  934. VPValue *Start = nullptr)
  935. : VPRecipeBase(VPDefID, {}), VPValue(VPVID, Phi, this) {
  936. if (Start)
  937. addOperand(Start);
  938. }
  939. public:
  940. ~VPHeaderPHIRecipe() override = default;
  941. /// Method to support type inquiry through isa, cast, and dyn_cast.
  942. static inline bool classof(const VPRecipeBase *B) {
  943. return B->getVPDefID() == VPRecipeBase::VPCanonicalIVPHISC ||
  944. B->getVPDefID() == VPRecipeBase::VPFirstOrderRecurrencePHISC ||
  945. B->getVPDefID() == VPRecipeBase::VPReductionPHISC ||
  946. B->getVPDefID() == VPRecipeBase::VPWidenIntOrFpInductionSC ||
  947. B->getVPDefID() == VPRecipeBase::VPWidenPHISC;
  948. }
  949. static inline bool classof(const VPValue *V) {
  950. return V->getVPValueID() == VPValue::VPVCanonicalIVPHISC ||
  951. V->getVPValueID() == VPValue::VPVFirstOrderRecurrencePHISC ||
  952. V->getVPValueID() == VPValue::VPVReductionPHISC ||
  953. V->getVPValueID() == VPValue::VPVWidenIntOrFpInductionSC ||
  954. V->getVPValueID() == VPValue::VPVWidenPHISC;
  955. }
  956. /// Generate the phi nodes.
  957. void execute(VPTransformState &State) override = 0;
  958. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  959. /// Print the recipe.
  960. void print(raw_ostream &O, const Twine &Indent,
  961. VPSlotTracker &SlotTracker) const override = 0;
  962. #endif
  963. /// Returns the start value of the phi, if one is set.
  964. VPValue *getStartValue() {
  965. return getNumOperands() == 0 ? nullptr : getOperand(0);
  966. }
  967. /// Returns the incoming value from the loop backedge.
  968. VPValue *getBackedgeValue() {
  969. return getOperand(1);
  970. }
  971. /// Returns the backedge value as a recipe. The backedge value is guaranteed
  972. /// to be a recipe.
  973. VPRecipeBase *getBackedgeRecipe() {
  974. return cast<VPRecipeBase>(getBackedgeValue()->getDef());
  975. }
  976. };
  977. /// A recipe for handling header phis that are widened in the vector loop.
  978. /// In the VPlan native path, all incoming VPValues & VPBasicBlock pairs are
  979. /// managed in the recipe directly.
  980. class VPWidenPHIRecipe : public VPHeaderPHIRecipe {
  981. /// List of incoming blocks. Only used in the VPlan native path.
  982. SmallVector<VPBasicBlock *, 2> IncomingBlocks;
  983. public:
  984. /// Create a new VPWidenPHIRecipe for \p Phi with start value \p Start.
  985. VPWidenPHIRecipe(PHINode *Phi, VPValue *Start = nullptr)
  986. : VPHeaderPHIRecipe(VPVWidenPHISC, VPWidenPHISC, Phi) {
  987. if (Start)
  988. addOperand(Start);
  989. }
  990. ~VPWidenPHIRecipe() override = default;
  991. /// Method to support type inquiry through isa, cast, and dyn_cast.
  992. static inline bool classof(const VPRecipeBase *B) {
  993. return B->getVPDefID() == VPRecipeBase::VPWidenPHISC;
  994. }
  995. static inline bool classof(const VPHeaderPHIRecipe *R) {
  996. return R->getVPDefID() == VPRecipeBase::VPWidenPHISC;
  997. }
  998. static inline bool classof(const VPValue *V) {
  999. return V->getVPValueID() == VPValue::VPVWidenPHISC;
  1000. }
  1001. /// Generate the phi/select nodes.
  1002. void execute(VPTransformState &State) override;
  1003. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1004. /// Print the recipe.
  1005. void print(raw_ostream &O, const Twine &Indent,
  1006. VPSlotTracker &SlotTracker) const override;
  1007. #endif
  1008. /// Adds a pair (\p IncomingV, \p IncomingBlock) to the phi.
  1009. void addIncoming(VPValue *IncomingV, VPBasicBlock *IncomingBlock) {
  1010. addOperand(IncomingV);
  1011. IncomingBlocks.push_back(IncomingBlock);
  1012. }
  1013. /// Returns the \p I th incoming VPBasicBlock.
  1014. VPBasicBlock *getIncomingBlock(unsigned I) { return IncomingBlocks[I]; }
  1015. /// Returns the \p I th incoming VPValue.
  1016. VPValue *getIncomingValue(unsigned I) { return getOperand(I); }
  1017. };
  1018. /// A recipe for handling first-order recurrence phis. The start value is the
  1019. /// first operand of the recipe and the incoming value from the backedge is the
  1020. /// second operand.
  1021. struct VPFirstOrderRecurrencePHIRecipe : public VPHeaderPHIRecipe {
  1022. VPFirstOrderRecurrencePHIRecipe(PHINode *Phi, VPValue &Start)
  1023. : VPHeaderPHIRecipe(VPVFirstOrderRecurrencePHISC,
  1024. VPFirstOrderRecurrencePHISC, Phi, &Start) {}
  1025. /// Method to support type inquiry through isa, cast, and dyn_cast.
  1026. static inline bool classof(const VPRecipeBase *R) {
  1027. return R->getVPDefID() == VPRecipeBase::VPFirstOrderRecurrencePHISC;
  1028. }
  1029. static inline bool classof(const VPHeaderPHIRecipe *R) {
  1030. return R->getVPDefID() == VPRecipeBase::VPFirstOrderRecurrencePHISC;
  1031. }
  1032. static inline bool classof(const VPValue *V) {
  1033. return V->getVPValueID() == VPValue::VPVFirstOrderRecurrencePHISC;
  1034. }
  1035. void execute(VPTransformState &State) override;
  1036. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1037. /// Print the recipe.
  1038. void print(raw_ostream &O, const Twine &Indent,
  1039. VPSlotTracker &SlotTracker) const override;
  1040. #endif
  1041. };
  1042. /// A recipe for handling reduction phis. The start value is the first operand
  1043. /// of the recipe and the incoming value from the backedge is the second
  1044. /// operand.
  1045. class VPReductionPHIRecipe : public VPHeaderPHIRecipe {
  1046. /// Descriptor for the reduction.
  1047. const RecurrenceDescriptor &RdxDesc;
  1048. /// The phi is part of an in-loop reduction.
  1049. bool IsInLoop;
  1050. /// The phi is part of an ordered reduction. Requires IsInLoop to be true.
  1051. bool IsOrdered;
  1052. public:
  1053. /// Create a new VPReductionPHIRecipe for the reduction \p Phi described by \p
  1054. /// RdxDesc.
  1055. VPReductionPHIRecipe(PHINode *Phi, const RecurrenceDescriptor &RdxDesc,
  1056. VPValue &Start, bool IsInLoop = false,
  1057. bool IsOrdered = false)
  1058. : VPHeaderPHIRecipe(VPVReductionPHISC, VPReductionPHISC, Phi, &Start),
  1059. RdxDesc(RdxDesc), IsInLoop(IsInLoop), IsOrdered(IsOrdered) {
  1060. assert((!IsOrdered || IsInLoop) && "IsOrdered requires IsInLoop");
  1061. }
  1062. ~VPReductionPHIRecipe() override = default;
  1063. /// Method to support type inquiry through isa, cast, and dyn_cast.
  1064. static inline bool classof(const VPRecipeBase *R) {
  1065. return R->getVPDefID() == VPRecipeBase::VPReductionPHISC;
  1066. }
  1067. static inline bool classof(const VPHeaderPHIRecipe *R) {
  1068. return R->getVPDefID() == VPRecipeBase::VPReductionPHISC;
  1069. }
  1070. static inline bool classof(const VPValue *V) {
  1071. return V->getVPValueID() == VPValue::VPVReductionPHISC;
  1072. }
  1073. /// Generate the phi/select nodes.
  1074. void execute(VPTransformState &State) override;
  1075. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1076. /// Print the recipe.
  1077. void print(raw_ostream &O, const Twine &Indent,
  1078. VPSlotTracker &SlotTracker) const override;
  1079. #endif
  1080. const RecurrenceDescriptor &getRecurrenceDescriptor() const {
  1081. return RdxDesc;
  1082. }
  1083. /// Returns true, if the phi is part of an ordered reduction.
  1084. bool isOrdered() const { return IsOrdered; }
  1085. /// Returns true, if the phi is part of an in-loop reduction.
  1086. bool isInLoop() const { return IsInLoop; }
  1087. };
  1088. /// A recipe for vectorizing a phi-node as a sequence of mask-based select
  1089. /// instructions.
  1090. class VPBlendRecipe : public VPRecipeBase, public VPValue {
  1091. PHINode *Phi;
  1092. public:
  1093. /// The blend operation is a User of the incoming values and of their
  1094. /// respective masks, ordered [I0, M0, I1, M1, ...]. Note that a single value
  1095. /// might be incoming with a full mask for which there is no VPValue.
  1096. VPBlendRecipe(PHINode *Phi, ArrayRef<VPValue *> Operands)
  1097. : VPRecipeBase(VPBlendSC, Operands),
  1098. VPValue(VPValue::VPVBlendSC, Phi, this), Phi(Phi) {
  1099. assert(Operands.size() > 0 &&
  1100. ((Operands.size() == 1) || (Operands.size() % 2 == 0)) &&
  1101. "Expected either a single incoming value or a positive even number "
  1102. "of operands");
  1103. }
  1104. /// Method to support type inquiry through isa, cast, and dyn_cast.
  1105. static inline bool classof(const VPDef *D) {
  1106. return D->getVPDefID() == VPRecipeBase::VPBlendSC;
  1107. }
  1108. /// Return the number of incoming values, taking into account that a single
  1109. /// incoming value has no mask.
  1110. unsigned getNumIncomingValues() const { return (getNumOperands() + 1) / 2; }
  1111. /// Return incoming value number \p Idx.
  1112. VPValue *getIncomingValue(unsigned Idx) const { return getOperand(Idx * 2); }
  1113. /// Return mask number \p Idx.
  1114. VPValue *getMask(unsigned Idx) const { return getOperand(Idx * 2 + 1); }
  1115. /// Generate the phi/select nodes.
  1116. void execute(VPTransformState &State) override;
  1117. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1118. /// Print the recipe.
  1119. void print(raw_ostream &O, const Twine &Indent,
  1120. VPSlotTracker &SlotTracker) const override;
  1121. #endif
  1122. /// Returns true if the recipe only uses the first lane of operand \p Op.
  1123. bool onlyFirstLaneUsed(const VPValue *Op) const override {
  1124. assert(is_contained(operands(), Op) &&
  1125. "Op must be an operand of the recipe");
  1126. // Recursing through Blend recipes only, must terminate at header phi's the
  1127. // latest.
  1128. return all_of(users(), [this](VPUser *U) {
  1129. return cast<VPRecipeBase>(U)->onlyFirstLaneUsed(this);
  1130. });
  1131. }
  1132. };
  1133. /// VPInterleaveRecipe is a recipe for transforming an interleave group of load
  1134. /// or stores into one wide load/store and shuffles. The first operand of a
  1135. /// VPInterleave recipe is the address, followed by the stored values, followed
  1136. /// by an optional mask.
  1137. class VPInterleaveRecipe : public VPRecipeBase {
  1138. const InterleaveGroup<Instruction> *IG;
  1139. bool HasMask = false;
  1140. public:
  1141. VPInterleaveRecipe(const InterleaveGroup<Instruction> *IG, VPValue *Addr,
  1142. ArrayRef<VPValue *> StoredValues, VPValue *Mask)
  1143. : VPRecipeBase(VPInterleaveSC, {Addr}), IG(IG) {
  1144. for (unsigned i = 0; i < IG->getFactor(); ++i)
  1145. if (Instruction *I = IG->getMember(i)) {
  1146. if (I->getType()->isVoidTy())
  1147. continue;
  1148. new VPValue(I, this);
  1149. }
  1150. for (auto *SV : StoredValues)
  1151. addOperand(SV);
  1152. if (Mask) {
  1153. HasMask = true;
  1154. addOperand(Mask);
  1155. }
  1156. }
  1157. ~VPInterleaveRecipe() override = default;
  1158. /// Method to support type inquiry through isa, cast, and dyn_cast.
  1159. static inline bool classof(const VPDef *D) {
  1160. return D->getVPDefID() == VPRecipeBase::VPInterleaveSC;
  1161. }
  1162. /// Return the address accessed by this recipe.
  1163. VPValue *getAddr() const {
  1164. return getOperand(0); // Address is the 1st, mandatory operand.
  1165. }
  1166. /// Return the mask used by this recipe. Note that a full mask is represented
  1167. /// by a nullptr.
  1168. VPValue *getMask() const {
  1169. // Mask is optional and therefore the last, currently 2nd operand.
  1170. return HasMask ? getOperand(getNumOperands() - 1) : nullptr;
  1171. }
  1172. /// Return the VPValues stored by this interleave group. If it is a load
  1173. /// interleave group, return an empty ArrayRef.
  1174. ArrayRef<VPValue *> getStoredValues() const {
  1175. // The first operand is the address, followed by the stored values, followed
  1176. // by an optional mask.
  1177. return ArrayRef<VPValue *>(op_begin(), getNumOperands())
  1178. .slice(1, getNumStoreOperands());
  1179. }
  1180. /// Generate the wide load or store, and shuffles.
  1181. void execute(VPTransformState &State) override;
  1182. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1183. /// Print the recipe.
  1184. void print(raw_ostream &O, const Twine &Indent,
  1185. VPSlotTracker &SlotTracker) const override;
  1186. #endif
  1187. const InterleaveGroup<Instruction> *getInterleaveGroup() { return IG; }
  1188. /// Returns the number of stored operands of this interleave group. Returns 0
  1189. /// for load interleave groups.
  1190. unsigned getNumStoreOperands() const {
  1191. return getNumOperands() - (HasMask ? 2 : 1);
  1192. }
  1193. };
  1194. /// A recipe to represent inloop reduction operations, performing a reduction on
  1195. /// a vector operand into a scalar value, and adding the result to a chain.
  1196. /// The Operands are {ChainOp, VecOp, [Condition]}.
  1197. class VPReductionRecipe : public VPRecipeBase, public VPValue {
  1198. /// The recurrence decriptor for the reduction in question.
  1199. const RecurrenceDescriptor *RdxDesc;
  1200. /// Pointer to the TTI, needed to create the target reduction
  1201. const TargetTransformInfo *TTI;
  1202. public:
  1203. VPReductionRecipe(const RecurrenceDescriptor *R, Instruction *I,
  1204. VPValue *ChainOp, VPValue *VecOp, VPValue *CondOp,
  1205. const TargetTransformInfo *TTI)
  1206. : VPRecipeBase(VPRecipeBase::VPReductionSC, {ChainOp, VecOp}),
  1207. VPValue(VPValue::VPVReductionSC, I, this), RdxDesc(R), TTI(TTI) {
  1208. if (CondOp)
  1209. addOperand(CondOp);
  1210. }
  1211. ~VPReductionRecipe() override = default;
  1212. /// Method to support type inquiry through isa, cast, and dyn_cast.
  1213. static inline bool classof(const VPValue *V) {
  1214. return V->getVPValueID() == VPValue::VPVReductionSC;
  1215. }
  1216. /// Generate the reduction in the loop
  1217. void execute(VPTransformState &State) override;
  1218. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1219. /// Print the recipe.
  1220. void print(raw_ostream &O, const Twine &Indent,
  1221. VPSlotTracker &SlotTracker) const override;
  1222. #endif
  1223. /// The VPValue of the scalar Chain being accumulated.
  1224. VPValue *getChainOp() const { return getOperand(0); }
  1225. /// The VPValue of the vector value to be reduced.
  1226. VPValue *getVecOp() const { return getOperand(1); }
  1227. /// The VPValue of the condition for the block.
  1228. VPValue *getCondOp() const {
  1229. return getNumOperands() > 2 ? getOperand(2) : nullptr;
  1230. }
  1231. };
  1232. /// VPReplicateRecipe replicates a given instruction producing multiple scalar
  1233. /// copies of the original scalar type, one per lane, instead of producing a
  1234. /// single copy of widened type for all lanes. If the instruction is known to be
  1235. /// uniform only one copy, per lane zero, will be generated.
  1236. class VPReplicateRecipe : public VPRecipeBase, public VPValue {
  1237. /// Indicator if only a single replica per lane is needed.
  1238. bool IsUniform;
  1239. /// Indicator if the replicas are also predicated.
  1240. bool IsPredicated;
  1241. /// Indicator if the scalar values should also be packed into a vector.
  1242. bool AlsoPack;
  1243. public:
  1244. template <typename IterT>
  1245. VPReplicateRecipe(Instruction *I, iterator_range<IterT> Operands,
  1246. bool IsUniform, bool IsPredicated = false)
  1247. : VPRecipeBase(VPReplicateSC, Operands), VPValue(VPVReplicateSC, I, this),
  1248. IsUniform(IsUniform), IsPredicated(IsPredicated) {
  1249. // Retain the previous behavior of predicateInstructions(), where an
  1250. // insert-element of a predicated instruction got hoisted into the
  1251. // predicated basic block iff it was its only user. This is achieved by
  1252. // having predicated instructions also pack their values into a vector by
  1253. // default unless they have a replicated user which uses their scalar value.
  1254. AlsoPack = IsPredicated && !I->use_empty();
  1255. }
  1256. ~VPReplicateRecipe() override = default;
  1257. /// Method to support type inquiry through isa, cast, and dyn_cast.
  1258. static inline bool classof(const VPDef *D) {
  1259. return D->getVPDefID() == VPRecipeBase::VPReplicateSC;
  1260. }
  1261. static inline bool classof(const VPValue *V) {
  1262. return V->getVPValueID() == VPValue::VPVReplicateSC;
  1263. }
  1264. /// Generate replicas of the desired Ingredient. Replicas will be generated
  1265. /// for all parts and lanes unless a specific part and lane are specified in
  1266. /// the \p State.
  1267. void execute(VPTransformState &State) override;
  1268. void setAlsoPack(bool Pack) { AlsoPack = Pack; }
  1269. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1270. /// Print the recipe.
  1271. void print(raw_ostream &O, const Twine &Indent,
  1272. VPSlotTracker &SlotTracker) const override;
  1273. #endif
  1274. bool isUniform() const { return IsUniform; }
  1275. bool isPacked() const { return AlsoPack; }
  1276. bool isPredicated() const { return IsPredicated; }
  1277. /// Returns true if the recipe only uses the first lane of operand \p Op.
  1278. bool onlyFirstLaneUsed(const VPValue *Op) const override {
  1279. assert(is_contained(operands(), Op) &&
  1280. "Op must be an operand of the recipe");
  1281. return isUniform();
  1282. }
  1283. };
  1284. /// A recipe for generating conditional branches on the bits of a mask.
  1285. class VPBranchOnMaskRecipe : public VPRecipeBase {
  1286. public:
  1287. VPBranchOnMaskRecipe(VPValue *BlockInMask)
  1288. : VPRecipeBase(VPBranchOnMaskSC, {}) {
  1289. if (BlockInMask) // nullptr means all-one mask.
  1290. addOperand(BlockInMask);
  1291. }
  1292. /// Method to support type inquiry through isa, cast, and dyn_cast.
  1293. static inline bool classof(const VPDef *D) {
  1294. return D->getVPDefID() == VPRecipeBase::VPBranchOnMaskSC;
  1295. }
  1296. /// Generate the extraction of the appropriate bit from the block mask and the
  1297. /// conditional branch.
  1298. void execute(VPTransformState &State) override;
  1299. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1300. /// Print the recipe.
  1301. void print(raw_ostream &O, const Twine &Indent,
  1302. VPSlotTracker &SlotTracker) const override {
  1303. O << Indent << "BRANCH-ON-MASK ";
  1304. if (VPValue *Mask = getMask())
  1305. Mask->printAsOperand(O, SlotTracker);
  1306. else
  1307. O << " All-One";
  1308. }
  1309. #endif
  1310. /// Return the mask used by this recipe. Note that a full mask is represented
  1311. /// by a nullptr.
  1312. VPValue *getMask() const {
  1313. assert(getNumOperands() <= 1 && "should have either 0 or 1 operands");
  1314. // Mask is optional.
  1315. return getNumOperands() == 1 ? getOperand(0) : nullptr;
  1316. }
  1317. };
  1318. /// VPPredInstPHIRecipe is a recipe for generating the phi nodes needed when
  1319. /// control converges back from a Branch-on-Mask. The phi nodes are needed in
  1320. /// order to merge values that are set under such a branch and feed their uses.
  1321. /// The phi nodes can be scalar or vector depending on the users of the value.
  1322. /// This recipe works in concert with VPBranchOnMaskRecipe.
  1323. class VPPredInstPHIRecipe : public VPRecipeBase, public VPValue {
  1324. public:
  1325. /// Construct a VPPredInstPHIRecipe given \p PredInst whose value needs a phi
  1326. /// nodes after merging back from a Branch-on-Mask.
  1327. VPPredInstPHIRecipe(VPValue *PredV)
  1328. : VPRecipeBase(VPPredInstPHISC, PredV),
  1329. VPValue(VPValue::VPVPredInstPHI, nullptr, this) {}
  1330. ~VPPredInstPHIRecipe() override = default;
  1331. /// Method to support type inquiry through isa, cast, and dyn_cast.
  1332. static inline bool classof(const VPDef *D) {
  1333. return D->getVPDefID() == VPRecipeBase::VPPredInstPHISC;
  1334. }
  1335. /// Generates phi nodes for live-outs as needed to retain SSA form.
  1336. void execute(VPTransformState &State) override;
  1337. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1338. /// Print the recipe.
  1339. void print(raw_ostream &O, const Twine &Indent,
  1340. VPSlotTracker &SlotTracker) const override;
  1341. #endif
  1342. };
  1343. /// A Recipe for widening load/store operations.
  1344. /// The recipe uses the following VPValues:
  1345. /// - For load: Address, optional mask
  1346. /// - For store: Address, stored value, optional mask
  1347. /// TODO: We currently execute only per-part unless a specific instance is
  1348. /// provided.
  1349. class VPWidenMemoryInstructionRecipe : public VPRecipeBase, public VPValue {
  1350. Instruction &Ingredient;
  1351. // Whether the loaded-from / stored-to addresses are consecutive.
  1352. bool Consecutive;
  1353. // Whether the consecutive loaded/stored addresses are in reverse order.
  1354. bool Reverse;
  1355. void setMask(VPValue *Mask) {
  1356. if (!Mask)
  1357. return;
  1358. addOperand(Mask);
  1359. }
  1360. bool isMasked() const {
  1361. return isStore() ? getNumOperands() == 3 : getNumOperands() == 2;
  1362. }
  1363. public:
  1364. VPWidenMemoryInstructionRecipe(LoadInst &Load, VPValue *Addr, VPValue *Mask,
  1365. bool Consecutive, bool Reverse)
  1366. : VPRecipeBase(VPWidenMemoryInstructionSC, {Addr}),
  1367. VPValue(VPValue::VPVMemoryInstructionSC, &Load, this), Ingredient(Load),
  1368. Consecutive(Consecutive), Reverse(Reverse) {
  1369. assert((Consecutive || !Reverse) && "Reverse implies consecutive");
  1370. setMask(Mask);
  1371. }
  1372. VPWidenMemoryInstructionRecipe(StoreInst &Store, VPValue *Addr,
  1373. VPValue *StoredValue, VPValue *Mask,
  1374. bool Consecutive, bool Reverse)
  1375. : VPRecipeBase(VPWidenMemoryInstructionSC, {Addr, StoredValue}),
  1376. VPValue(VPValue::VPVMemoryInstructionSC, &Store, this),
  1377. Ingredient(Store), Consecutive(Consecutive), Reverse(Reverse) {
  1378. assert((Consecutive || !Reverse) && "Reverse implies consecutive");
  1379. setMask(Mask);
  1380. }
  1381. /// Method to support type inquiry through isa, cast, and dyn_cast.
  1382. static inline bool classof(const VPDef *D) {
  1383. return D->getVPDefID() == VPRecipeBase::VPWidenMemoryInstructionSC;
  1384. }
  1385. /// Return the address accessed by this recipe.
  1386. VPValue *getAddr() const {
  1387. return getOperand(0); // Address is the 1st, mandatory operand.
  1388. }
  1389. /// Return the mask used by this recipe. Note that a full mask is represented
  1390. /// by a nullptr.
  1391. VPValue *getMask() const {
  1392. // Mask is optional and therefore the last operand.
  1393. return isMasked() ? getOperand(getNumOperands() - 1) : nullptr;
  1394. }
  1395. /// Returns true if this recipe is a store.
  1396. bool isStore() const { return isa<StoreInst>(Ingredient); }
  1397. /// Return the address accessed by this recipe.
  1398. VPValue *getStoredValue() const {
  1399. assert(isStore() && "Stored value only available for store instructions");
  1400. return getOperand(1); // Stored value is the 2nd, mandatory operand.
  1401. }
  1402. // Return whether the loaded-from / stored-to addresses are consecutive.
  1403. bool isConsecutive() const { return Consecutive; }
  1404. // Return whether the consecutive loaded/stored addresses are in reverse
  1405. // order.
  1406. bool isReverse() const { return Reverse; }
  1407. /// Generate the wide load/store.
  1408. void execute(VPTransformState &State) override;
  1409. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1410. /// Print the recipe.
  1411. void print(raw_ostream &O, const Twine &Indent,
  1412. VPSlotTracker &SlotTracker) const override;
  1413. #endif
  1414. /// Returns true if the recipe only uses the first lane of operand \p Op.
  1415. bool onlyFirstLaneUsed(const VPValue *Op) const override {
  1416. assert(is_contained(operands(), Op) &&
  1417. "Op must be an operand of the recipe");
  1418. // Widened, consecutive memory operations only demand the first lane of
  1419. // their address.
  1420. return Op == getAddr() && isConsecutive();
  1421. }
  1422. };
  1423. /// Canonical scalar induction phi of the vector loop. Starting at the specified
  1424. /// start value (either 0 or the resume value when vectorizing the epilogue
  1425. /// loop). VPWidenCanonicalIVRecipe represents the vector version of the
  1426. /// canonical induction variable.
  1427. class VPCanonicalIVPHIRecipe : public VPHeaderPHIRecipe {
  1428. DebugLoc DL;
  1429. public:
  1430. VPCanonicalIVPHIRecipe(VPValue *StartV, DebugLoc DL)
  1431. : VPHeaderPHIRecipe(VPValue::VPVCanonicalIVPHISC, VPCanonicalIVPHISC,
  1432. nullptr, StartV),
  1433. DL(DL) {}
  1434. ~VPCanonicalIVPHIRecipe() override = default;
  1435. /// Method to support type inquiry through isa, cast, and dyn_cast.
  1436. static inline bool classof(const VPDef *D) {
  1437. return D->getVPDefID() == VPCanonicalIVPHISC;
  1438. }
  1439. /// Generate the canonical scalar induction phi of the vector loop.
  1440. void execute(VPTransformState &State) override;
  1441. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1442. /// Print the recipe.
  1443. void print(raw_ostream &O, const Twine &Indent,
  1444. VPSlotTracker &SlotTracker) const override;
  1445. #endif
  1446. /// Returns the scalar type of the induction.
  1447. const Type *getScalarType() const {
  1448. return getOperand(0)->getLiveInIRValue()->getType();
  1449. }
  1450. /// Returns true if the recipe only uses the first lane of operand \p Op.
  1451. bool onlyFirstLaneUsed(const VPValue *Op) const override {
  1452. assert(is_contained(operands(), Op) &&
  1453. "Op must be an operand of the recipe");
  1454. return true;
  1455. }
  1456. };
  1457. /// A Recipe for widening the canonical induction variable of the vector loop.
  1458. class VPWidenCanonicalIVRecipe : public VPRecipeBase, public VPValue {
  1459. public:
  1460. VPWidenCanonicalIVRecipe(VPCanonicalIVPHIRecipe *CanonicalIV)
  1461. : VPRecipeBase(VPWidenCanonicalIVSC, {CanonicalIV}),
  1462. VPValue(VPValue::VPVWidenCanonicalIVSC, nullptr, this) {}
  1463. ~VPWidenCanonicalIVRecipe() override = default;
  1464. /// Method to support type inquiry through isa, cast, and dyn_cast.
  1465. static inline bool classof(const VPDef *D) {
  1466. return D->getVPDefID() == VPRecipeBase::VPWidenCanonicalIVSC;
  1467. }
  1468. /// Extra classof implementations to allow directly casting from VPUser ->
  1469. /// VPWidenCanonicalIVRecipe.
  1470. static inline bool classof(const VPUser *U) {
  1471. auto *R = dyn_cast<VPRecipeBase>(U);
  1472. return R && R->getVPDefID() == VPRecipeBase::VPWidenCanonicalIVSC;
  1473. }
  1474. static inline bool classof(const VPRecipeBase *R) {
  1475. return R->getVPDefID() == VPRecipeBase::VPWidenCanonicalIVSC;
  1476. }
  1477. /// Generate a canonical vector induction variable of the vector loop, with
  1478. /// start = {<Part*VF, Part*VF+1, ..., Part*VF+VF-1> for 0 <= Part < UF}, and
  1479. /// step = <VF*UF, VF*UF, ..., VF*UF>.
  1480. void execute(VPTransformState &State) override;
  1481. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1482. /// Print the recipe.
  1483. void print(raw_ostream &O, const Twine &Indent,
  1484. VPSlotTracker &SlotTracker) const override;
  1485. #endif
  1486. /// Returns the scalar type of the induction.
  1487. const Type *getScalarType() const {
  1488. return cast<VPCanonicalIVPHIRecipe>(getOperand(0)->getDef())
  1489. ->getScalarType();
  1490. }
  1491. };
  1492. /// VPBasicBlock serves as the leaf of the Hierarchical Control-Flow Graph. It
  1493. /// holds a sequence of zero or more VPRecipe's each representing a sequence of
  1494. /// output IR instructions. All PHI-like recipes must come before any non-PHI recipes.
  1495. class VPBasicBlock : public VPBlockBase {
  1496. public:
  1497. using RecipeListTy = iplist<VPRecipeBase>;
  1498. private:
  1499. /// The VPRecipes held in the order of output instructions to generate.
  1500. RecipeListTy Recipes;
  1501. public:
  1502. VPBasicBlock(const Twine &Name = "", VPRecipeBase *Recipe = nullptr)
  1503. : VPBlockBase(VPBasicBlockSC, Name.str()) {
  1504. if (Recipe)
  1505. appendRecipe(Recipe);
  1506. }
  1507. ~VPBasicBlock() override {
  1508. while (!Recipes.empty())
  1509. Recipes.pop_back();
  1510. }
  1511. /// Instruction iterators...
  1512. using iterator = RecipeListTy::iterator;
  1513. using const_iterator = RecipeListTy::const_iterator;
  1514. using reverse_iterator = RecipeListTy::reverse_iterator;
  1515. using const_reverse_iterator = RecipeListTy::const_reverse_iterator;
  1516. //===--------------------------------------------------------------------===//
  1517. /// Recipe iterator methods
  1518. ///
  1519. inline iterator begin() { return Recipes.begin(); }
  1520. inline const_iterator begin() const { return Recipes.begin(); }
  1521. inline iterator end() { return Recipes.end(); }
  1522. inline const_iterator end() const { return Recipes.end(); }
  1523. inline reverse_iterator rbegin() { return Recipes.rbegin(); }
  1524. inline const_reverse_iterator rbegin() const { return Recipes.rbegin(); }
  1525. inline reverse_iterator rend() { return Recipes.rend(); }
  1526. inline const_reverse_iterator rend() const { return Recipes.rend(); }
  1527. inline size_t size() const { return Recipes.size(); }
  1528. inline bool empty() const { return Recipes.empty(); }
  1529. inline const VPRecipeBase &front() const { return Recipes.front(); }
  1530. inline VPRecipeBase &front() { return Recipes.front(); }
  1531. inline const VPRecipeBase &back() const { return Recipes.back(); }
  1532. inline VPRecipeBase &back() { return Recipes.back(); }
  1533. /// Returns a reference to the list of recipes.
  1534. RecipeListTy &getRecipeList() { return Recipes; }
  1535. /// Returns a pointer to a member of the recipe list.
  1536. static RecipeListTy VPBasicBlock::*getSublistAccess(VPRecipeBase *) {
  1537. return &VPBasicBlock::Recipes;
  1538. }
  1539. /// Method to support type inquiry through isa, cast, and dyn_cast.
  1540. static inline bool classof(const VPBlockBase *V) {
  1541. return V->getVPBlockID() == VPBlockBase::VPBasicBlockSC;
  1542. }
  1543. void insert(VPRecipeBase *Recipe, iterator InsertPt) {
  1544. assert(Recipe && "No recipe to append.");
  1545. assert(!Recipe->Parent && "Recipe already in VPlan");
  1546. Recipe->Parent = this;
  1547. Recipes.insert(InsertPt, Recipe);
  1548. }
  1549. /// Augment the existing recipes of a VPBasicBlock with an additional
  1550. /// \p Recipe as the last recipe.
  1551. void appendRecipe(VPRecipeBase *Recipe) { insert(Recipe, end()); }
  1552. /// The method which generates the output IR instructions that correspond to
  1553. /// this VPBasicBlock, thereby "executing" the VPlan.
  1554. void execute(struct VPTransformState *State) override;
  1555. /// Return the position of the first non-phi node recipe in the block.
  1556. iterator getFirstNonPhi();
  1557. /// Returns an iterator range over the PHI-like recipes in the block.
  1558. iterator_range<iterator> phis() {
  1559. return make_range(begin(), getFirstNonPhi());
  1560. }
  1561. void dropAllReferences(VPValue *NewValue) override;
  1562. /// Split current block at \p SplitAt by inserting a new block between the
  1563. /// current block and its successors and moving all recipes starting at
  1564. /// SplitAt to the new block. Returns the new block.
  1565. VPBasicBlock *splitAt(iterator SplitAt);
  1566. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1567. /// Print this VPBsicBlock to \p O, prefixing all lines with \p Indent. \p
  1568. /// SlotTracker is used to print unnamed VPValue's using consequtive numbers.
  1569. ///
  1570. /// Note that the numbering is applied to the whole VPlan, so printing
  1571. /// individual blocks is consistent with the whole VPlan printing.
  1572. void print(raw_ostream &O, const Twine &Indent,
  1573. VPSlotTracker &SlotTracker) const override;
  1574. using VPBlockBase::print; // Get the print(raw_stream &O) version.
  1575. #endif
  1576. private:
  1577. /// Create an IR BasicBlock to hold the output instructions generated by this
  1578. /// VPBasicBlock, and return it. Update the CFGState accordingly.
  1579. BasicBlock *createEmptyBasicBlock(VPTransformState::CFGState &CFG);
  1580. };
  1581. /// VPRegionBlock represents a collection of VPBasicBlocks and VPRegionBlocks
  1582. /// which form a Single-Entry-Single-Exit subgraph of the output IR CFG.
  1583. /// A VPRegionBlock may indicate that its contents are to be replicated several
  1584. /// times. This is designed to support predicated scalarization, in which a
  1585. /// scalar if-then code structure needs to be generated VF * UF times. Having
  1586. /// this replication indicator helps to keep a single model for multiple
  1587. /// candidate VF's. The actual replication takes place only once the desired VF
  1588. /// and UF have been determined.
  1589. class VPRegionBlock : public VPBlockBase {
  1590. /// Hold the Single Entry of the SESE region modelled by the VPRegionBlock.
  1591. VPBlockBase *Entry;
  1592. /// Hold the Single Exit of the SESE region modelled by the VPRegionBlock.
  1593. VPBlockBase *Exit;
  1594. /// An indicator whether this region is to generate multiple replicated
  1595. /// instances of output IR corresponding to its VPBlockBases.
  1596. bool IsReplicator;
  1597. public:
  1598. VPRegionBlock(VPBlockBase *Entry, VPBlockBase *Exit,
  1599. const std::string &Name = "", bool IsReplicator = false)
  1600. : VPBlockBase(VPRegionBlockSC, Name), Entry(Entry), Exit(Exit),
  1601. IsReplicator(IsReplicator) {
  1602. assert(Entry->getPredecessors().empty() && "Entry block has predecessors.");
  1603. assert(Exit->getSuccessors().empty() && "Exit block has successors.");
  1604. Entry->setParent(this);
  1605. Exit->setParent(this);
  1606. }
  1607. VPRegionBlock(const std::string &Name = "", bool IsReplicator = false)
  1608. : VPBlockBase(VPRegionBlockSC, Name), Entry(nullptr), Exit(nullptr),
  1609. IsReplicator(IsReplicator) {}
  1610. ~VPRegionBlock() override {
  1611. if (Entry) {
  1612. VPValue DummyValue;
  1613. Entry->dropAllReferences(&DummyValue);
  1614. deleteCFG(Entry);
  1615. }
  1616. }
  1617. /// Method to support type inquiry through isa, cast, and dyn_cast.
  1618. static inline bool classof(const VPBlockBase *V) {
  1619. return V->getVPBlockID() == VPBlockBase::VPRegionBlockSC;
  1620. }
  1621. const VPBlockBase *getEntry() const { return Entry; }
  1622. VPBlockBase *getEntry() { return Entry; }
  1623. /// Set \p EntryBlock as the entry VPBlockBase of this VPRegionBlock. \p
  1624. /// EntryBlock must have no predecessors.
  1625. void setEntry(VPBlockBase *EntryBlock) {
  1626. assert(EntryBlock->getPredecessors().empty() &&
  1627. "Entry block cannot have predecessors.");
  1628. Entry = EntryBlock;
  1629. EntryBlock->setParent(this);
  1630. }
  1631. // FIXME: DominatorTreeBase is doing 'A->getParent()->front()'. 'front' is a
  1632. // specific interface of llvm::Function, instead of using
  1633. // GraphTraints::getEntryNode. We should add a new template parameter to
  1634. // DominatorTreeBase representing the Graph type.
  1635. VPBlockBase &front() const { return *Entry; }
  1636. const VPBlockBase *getExit() const { return Exit; }
  1637. VPBlockBase *getExit() { return Exit; }
  1638. /// Set \p ExitBlock as the exit VPBlockBase of this VPRegionBlock. \p
  1639. /// ExitBlock must have no successors.
  1640. void setExit(VPBlockBase *ExitBlock) {
  1641. assert(ExitBlock->getSuccessors().empty() &&
  1642. "Exit block cannot have successors.");
  1643. Exit = ExitBlock;
  1644. ExitBlock->setParent(this);
  1645. }
  1646. /// An indicator whether this region is to generate multiple replicated
  1647. /// instances of output IR corresponding to its VPBlockBases.
  1648. bool isReplicator() const { return IsReplicator; }
  1649. /// The method which generates the output IR instructions that correspond to
  1650. /// this VPRegionBlock, thereby "executing" the VPlan.
  1651. void execute(struct VPTransformState *State) override;
  1652. void dropAllReferences(VPValue *NewValue) override;
  1653. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  1654. /// Print this VPRegionBlock to \p O (recursively), prefixing all lines with
  1655. /// \p Indent. \p SlotTracker is used to print unnamed VPValue's using
  1656. /// consequtive numbers.
  1657. ///
  1658. /// Note that the numbering is applied to the whole VPlan, so printing
  1659. /// individual regions is consistent with the whole VPlan printing.
  1660. void print(raw_ostream &O, const Twine &Indent,
  1661. VPSlotTracker &SlotTracker) const override;
  1662. using VPBlockBase::print; // Get the print(raw_stream &O) version.
  1663. #endif
  1664. };
  1665. //===----------------------------------------------------------------------===//
  1666. // GraphTraits specializations for VPlan Hierarchical Control-Flow Graphs //
  1667. //===----------------------------------------------------------------------===//
  1668. // The following set of template specializations implement GraphTraits to treat
  1669. // any VPBlockBase as a node in a graph of VPBlockBases. It's important to note
  1670. // that VPBlockBase traits don't recurse into VPRegioBlocks, i.e., if the
  1671. // VPBlockBase is a VPRegionBlock, this specialization provides access to its
  1672. // successors/predecessors but not to the blocks inside the region.
  1673. template <> struct GraphTraits<VPBlockBase *> {
  1674. using NodeRef = VPBlockBase *;
  1675. using ChildIteratorType = SmallVectorImpl<VPBlockBase *>::iterator;
  1676. static NodeRef getEntryNode(NodeRef N) { return N; }
  1677. static inline ChildIteratorType child_begin(NodeRef N) {
  1678. return N->getSuccessors().begin();
  1679. }
  1680. static inline ChildIteratorType child_end(NodeRef N) {
  1681. return N->getSuccessors().end();
  1682. }
  1683. };
  1684. template <> struct GraphTraits<const VPBlockBase *> {
  1685. using NodeRef = const VPBlockBase *;
  1686. using ChildIteratorType = SmallVectorImpl<VPBlockBase *>::const_iterator;
  1687. static NodeRef getEntryNode(NodeRef N) { return N; }
  1688. static inline ChildIteratorType child_begin(NodeRef N) {
  1689. return N->getSuccessors().begin();
  1690. }
  1691. static inline ChildIteratorType child_end(NodeRef N) {
  1692. return N->getSuccessors().end();
  1693. }
  1694. };
  1695. // Inverse order specialization for VPBasicBlocks. Predecessors are used instead
  1696. // of successors for the inverse traversal.
  1697. template <> struct GraphTraits<Inverse<VPBlockBase *>> {
  1698. using NodeRef = VPBlockBase *;
  1699. using ChildIteratorType = SmallVectorImpl<VPBlockBase *>::iterator;
  1700. static NodeRef getEntryNode(Inverse<NodeRef> B) { return B.Graph; }
  1701. static inline ChildIteratorType child_begin(NodeRef N) {
  1702. return N->getPredecessors().begin();
  1703. }
  1704. static inline ChildIteratorType child_end(NodeRef N) {
  1705. return N->getPredecessors().end();
  1706. }
  1707. };
  1708. // The following set of template specializations implement GraphTraits to
  1709. // treat VPRegionBlock as a graph and recurse inside its nodes. It's important
  1710. // to note that the blocks inside the VPRegionBlock are treated as VPBlockBases
  1711. // (i.e., no dyn_cast is performed, VPBlockBases specialization is used), so
  1712. // there won't be automatic recursion into other VPBlockBases that turn to be
  1713. // VPRegionBlocks.
  1714. template <>
  1715. struct GraphTraits<VPRegionBlock *> : public GraphTraits<VPBlockBase *> {
  1716. using GraphRef = VPRegionBlock *;
  1717. using nodes_iterator = df_iterator<NodeRef>;
  1718. static NodeRef getEntryNode(GraphRef N) { return N->getEntry(); }
  1719. static nodes_iterator nodes_begin(GraphRef N) {
  1720. return nodes_iterator::begin(N->getEntry());
  1721. }
  1722. static nodes_iterator nodes_end(GraphRef N) {
  1723. // df_iterator::end() returns an empty iterator so the node used doesn't
  1724. // matter.
  1725. return nodes_iterator::end(N);
  1726. }
  1727. };
  1728. template <>
  1729. struct GraphTraits<const VPRegionBlock *>
  1730. : public GraphTraits<const VPBlockBase *> {
  1731. using GraphRef = const VPRegionBlock *;
  1732. using nodes_iterator = df_iterator<NodeRef>;
  1733. static NodeRef getEntryNode(GraphRef N) { return N->getEntry(); }
  1734. static nodes_iterator nodes_begin(GraphRef N) {
  1735. return nodes_iterator::begin(N->getEntry());
  1736. }
  1737. static nodes_iterator nodes_end(GraphRef N) {
  1738. // df_iterator::end() returns an empty iterator so the node used doesn't
  1739. // matter.
  1740. return nodes_iterator::end(N);
  1741. }
  1742. };
  1743. template <>
  1744. struct GraphTraits<Inverse<VPRegionBlock *>>
  1745. : public GraphTraits<Inverse<VPBlockBase *>> {
  1746. using GraphRef = VPRegionBlock *;
  1747. using nodes_iterator = df_iterator<NodeRef>;
  1748. static NodeRef getEntryNode(Inverse<GraphRef> N) {
  1749. return N.Graph->getExit();
  1750. }
  1751. static nodes_iterator nodes_begin(GraphRef N) {
  1752. return nodes_iterator::begin(N->getExit());
  1753. }
  1754. static nodes_iterator nodes_end(GraphRef N) {
  1755. // df_iterator::end() returns an empty iterator so the node used doesn't
  1756. // matter.
  1757. return nodes_iterator::end(N);
  1758. }
  1759. };
  1760. /// Iterator to traverse all successors of a VPBlockBase node. This includes the
  1761. /// entry node of VPRegionBlocks. Exit blocks of a region implicitly have their
  1762. /// parent region's successors. This ensures all blocks in a region are visited
  1763. /// before any blocks in a successor region when doing a reverse post-order
  1764. // traversal of the graph.
  1765. template <typename BlockPtrTy>
  1766. class VPAllSuccessorsIterator
  1767. : public iterator_facade_base<VPAllSuccessorsIterator<BlockPtrTy>,
  1768. std::forward_iterator_tag, VPBlockBase> {
  1769. BlockPtrTy Block;
  1770. /// Index of the current successor. For VPBasicBlock nodes, this simply is the
  1771. /// index for the successor array. For VPRegionBlock, SuccessorIdx == 0 is
  1772. /// used for the region's entry block, and SuccessorIdx - 1 are the indices
  1773. /// for the successor array.
  1774. size_t SuccessorIdx;
  1775. static BlockPtrTy getBlockWithSuccs(BlockPtrTy Current) {
  1776. while (Current && Current->getNumSuccessors() == 0)
  1777. Current = Current->getParent();
  1778. return Current;
  1779. }
  1780. /// Templated helper to dereference successor \p SuccIdx of \p Block. Used by
  1781. /// both the const and non-const operator* implementations.
  1782. template <typename T1> static T1 deref(T1 Block, unsigned SuccIdx) {
  1783. if (auto *R = dyn_cast<VPRegionBlock>(Block)) {
  1784. if (SuccIdx == 0)
  1785. return R->getEntry();
  1786. SuccIdx--;
  1787. }
  1788. // For exit blocks, use the next parent region with successors.
  1789. return getBlockWithSuccs(Block)->getSuccessors()[SuccIdx];
  1790. }
  1791. public:
  1792. VPAllSuccessorsIterator(BlockPtrTy Block, size_t Idx = 0)
  1793. : Block(Block), SuccessorIdx(Idx) {}
  1794. VPAllSuccessorsIterator(const VPAllSuccessorsIterator &Other)
  1795. : Block(Other.Block), SuccessorIdx(Other.SuccessorIdx) {}
  1796. VPAllSuccessorsIterator &operator=(const VPAllSuccessorsIterator &R) {
  1797. Block = R.Block;
  1798. SuccessorIdx = R.SuccessorIdx;
  1799. return *this;
  1800. }
  1801. static VPAllSuccessorsIterator end(BlockPtrTy Block) {
  1802. BlockPtrTy ParentWithSuccs = getBlockWithSuccs(Block);
  1803. unsigned NumSuccessors = ParentWithSuccs
  1804. ? ParentWithSuccs->getNumSuccessors()
  1805. : Block->getNumSuccessors();
  1806. if (auto *R = dyn_cast<VPRegionBlock>(Block))
  1807. return {R, NumSuccessors + 1};
  1808. return {Block, NumSuccessors};
  1809. }
  1810. bool operator==(const VPAllSuccessorsIterator &R) const {
  1811. return Block == R.Block && SuccessorIdx == R.SuccessorIdx;
  1812. }
  1813. const VPBlockBase *operator*() const { return deref(Block, SuccessorIdx); }
  1814. BlockPtrTy operator*() { return deref(Block, SuccessorIdx); }
  1815. VPAllSuccessorsIterator &operator++() {
  1816. SuccessorIdx++;
  1817. return *this;
  1818. }
  1819. VPAllSuccessorsIterator operator++(int X) {
  1820. VPAllSuccessorsIterator Orig = *this;
  1821. SuccessorIdx++;
  1822. return Orig;
  1823. }
  1824. };
  1825. /// Helper for GraphTraits specialization that traverses through VPRegionBlocks.
  1826. template <typename BlockTy> class VPBlockRecursiveTraversalWrapper {
  1827. BlockTy Entry;
  1828. public:
  1829. VPBlockRecursiveTraversalWrapper(BlockTy Entry) : Entry(Entry) {}
  1830. BlockTy getEntry() { return Entry; }
  1831. };
  1832. /// GraphTraits specialization to recursively traverse VPBlockBase nodes,
  1833. /// including traversing through VPRegionBlocks. Exit blocks of a region
  1834. /// implicitly have their parent region's successors. This ensures all blocks in
  1835. /// a region are visited before any blocks in a successor region when doing a
  1836. /// reverse post-order traversal of the graph.
  1837. template <>
  1838. struct GraphTraits<VPBlockRecursiveTraversalWrapper<VPBlockBase *>> {
  1839. using NodeRef = VPBlockBase *;
  1840. using ChildIteratorType = VPAllSuccessorsIterator<VPBlockBase *>;
  1841. static NodeRef
  1842. getEntryNode(VPBlockRecursiveTraversalWrapper<VPBlockBase *> N) {
  1843. return N.getEntry();
  1844. }
  1845. static inline ChildIteratorType child_begin(NodeRef N) {
  1846. return ChildIteratorType(N);
  1847. }
  1848. static inline ChildIteratorType child_end(NodeRef N) {
  1849. return ChildIteratorType::end(N);
  1850. }
  1851. };
  1852. template <>
  1853. struct GraphTraits<VPBlockRecursiveTraversalWrapper<const VPBlockBase *>> {
  1854. using NodeRef = const VPBlockBase *;
  1855. using ChildIteratorType = VPAllSuccessorsIterator<const VPBlockBase *>;
  1856. static NodeRef
  1857. getEntryNode(VPBlockRecursiveTraversalWrapper<const VPBlockBase *> N) {
  1858. return N.getEntry();
  1859. }
  1860. static inline ChildIteratorType child_begin(NodeRef N) {
  1861. return ChildIteratorType(N);
  1862. }
  1863. static inline ChildIteratorType child_end(NodeRef N) {
  1864. return ChildIteratorType::end(N);
  1865. }
  1866. };
  1867. /// VPlan models a candidate for vectorization, encoding various decisions take
  1868. /// to produce efficient output IR, including which branches, basic-blocks and
  1869. /// output IR instructions to generate, and their cost. VPlan holds a
  1870. /// Hierarchical-CFG of VPBasicBlocks and VPRegionBlocks rooted at an Entry
  1871. /// VPBlock.
  1872. class VPlan {
  1873. friend class VPlanPrinter;
  1874. friend class VPSlotTracker;
  1875. /// Hold the single entry to the Hierarchical CFG of the VPlan.
  1876. VPBlockBase *Entry;
  1877. /// Holds the VFs applicable to this VPlan.
  1878. SmallSetVector<ElementCount, 2> VFs;
  1879. /// Holds the name of the VPlan, for printing.
  1880. std::string Name;
  1881. /// Holds all the external definitions created for this VPlan.
  1882. // TODO: Introduce a specific representation for external definitions in
  1883. // VPlan. External definitions must be immutable and hold a pointer to its
  1884. // underlying IR that will be used to implement its structural comparison
  1885. // (operators '==' and '<').
  1886. SetVector<VPValue *> VPExternalDefs;
  1887. /// Represents the trip count of the original loop, for folding
  1888. /// the tail.
  1889. VPValue *TripCount = nullptr;
  1890. /// Represents the backedge taken count of the original loop, for folding
  1891. /// the tail. It equals TripCount - 1.
  1892. VPValue *BackedgeTakenCount = nullptr;
  1893. /// Represents the vector trip count.
  1894. VPValue VectorTripCount;
  1895. /// Holds a mapping between Values and their corresponding VPValue inside
  1896. /// VPlan.
  1897. Value2VPValueTy Value2VPValue;
  1898. /// Contains all VPValues that been allocated by addVPValue directly and need
  1899. /// to be free when the plan's destructor is called.
  1900. SmallVector<VPValue *, 16> VPValuesToFree;
  1901. /// Holds the VPLoopInfo analysis for this VPlan.
  1902. VPLoopInfo VPLInfo;
  1903. /// Indicates whether it is safe use the Value2VPValue mapping or if the
  1904. /// mapping cannot be used any longer, because it is stale.
  1905. bool Value2VPValueEnabled = true;
  1906. public:
  1907. VPlan(VPBlockBase *Entry = nullptr) : Entry(Entry) {
  1908. if (Entry)
  1909. Entry->setPlan(this);
  1910. }
  1911. ~VPlan() {
  1912. if (Entry) {
  1913. VPValue DummyValue;
  1914. for (VPBlockBase *Block : depth_first(Entry))
  1915. Block->dropAllReferences(&DummyValue);
  1916. VPBlockBase::deleteCFG(Entry);
  1917. }
  1918. for (VPValue *VPV : VPValuesToFree)
  1919. delete VPV;
  1920. if (TripCount)
  1921. delete TripCount;
  1922. if (BackedgeTakenCount)
  1923. delete BackedgeTakenCount;
  1924. for (VPValue *Def : VPExternalDefs)
  1925. delete Def;
  1926. }
  1927. /// Prepare the plan for execution, setting up the required live-in values.
  1928. void prepareToExecute(Value *TripCount, Value *VectorTripCount,
  1929. Value *CanonicalIVStartValue, VPTransformState &State);
  1930. /// Generate the IR code for this VPlan.
  1931. void execute(struct VPTransformState *State);
  1932. VPBlockBase *getEntry() { return Entry; }
  1933. const VPBlockBase *getEntry() const { return Entry; }
  1934. VPBlockBase *setEntry(VPBlockBase *Block) {
  1935. Entry = Block;
  1936. Block->setPlan(this);
  1937. return Entry;
  1938. }
  1939. /// The trip count of the original loop.
  1940. VPValue *getOrCreateTripCount() {
  1941. if (!TripCount)
  1942. TripCount = new VPValue();
  1943. return TripCount;
  1944. }
  1945. /// The backedge taken count of the original loop.
  1946. VPValue *getOrCreateBackedgeTakenCount() {
  1947. if (!BackedgeTakenCount)
  1948. BackedgeTakenCount = new VPValue();
  1949. return BackedgeTakenCount;
  1950. }
  1951. /// The vector trip count.
  1952. VPValue &getVectorTripCount() { return VectorTripCount; }
  1953. /// Mark the plan to indicate that using Value2VPValue is not safe any
  1954. /// longer, because it may be stale.
  1955. void disableValue2VPValue() { Value2VPValueEnabled = false; }
  1956. void addVF(ElementCount VF) { VFs.insert(VF); }
  1957. bool hasVF(ElementCount VF) { return VFs.count(VF); }
  1958. const std::string &getName() const { return Name; }
  1959. void setName(const Twine &newName) { Name = newName.str(); }
  1960. /// Add \p VPVal to the pool of external definitions if it's not already
  1961. /// in the pool.
  1962. void addExternalDef(VPValue *VPVal) { VPExternalDefs.insert(VPVal); }
  1963. void addVPValue(Value *V) {
  1964. assert(Value2VPValueEnabled &&
  1965. "IR value to VPValue mapping may be out of date!");
  1966. assert(V && "Trying to add a null Value to VPlan");
  1967. assert(!Value2VPValue.count(V) && "Value already exists in VPlan");
  1968. VPValue *VPV = new VPValue(V);
  1969. Value2VPValue[V] = VPV;
  1970. VPValuesToFree.push_back(VPV);
  1971. }
  1972. void addVPValue(Value *V, VPValue *VPV) {
  1973. assert(Value2VPValueEnabled && "Value2VPValue mapping may be out of date!");
  1974. assert(V && "Trying to add a null Value to VPlan");
  1975. assert(!Value2VPValue.count(V) && "Value already exists in VPlan");
  1976. Value2VPValue[V] = VPV;
  1977. }
  1978. /// Returns the VPValue for \p V. \p OverrideAllowed can be used to disable
  1979. /// checking whether it is safe to query VPValues using IR Values.
  1980. VPValue *getVPValue(Value *V, bool OverrideAllowed = false) {
  1981. assert((OverrideAllowed || isa<Constant>(V) || Value2VPValueEnabled) &&
  1982. "Value2VPValue mapping may be out of date!");
  1983. assert(V && "Trying to get the VPValue of a null Value");
  1984. assert(Value2VPValue.count(V) && "Value does not exist in VPlan");
  1985. return Value2VPValue[V];
  1986. }
  1987. /// Gets the VPValue or adds a new one (if none exists yet) for \p V. \p
  1988. /// OverrideAllowed can be used to disable checking whether it is safe to
  1989. /// query VPValues using IR Values.
  1990. VPValue *getOrAddVPValue(Value *V, bool OverrideAllowed = false) {
  1991. assert((OverrideAllowed || isa<Constant>(V) || Value2VPValueEnabled) &&
  1992. "Value2VPValue mapping may be out of date!");
  1993. assert(V && "Trying to get or add the VPValue of a null Value");
  1994. if (!Value2VPValue.count(V))
  1995. addVPValue(V);
  1996. return getVPValue(V);
  1997. }
  1998. void removeVPValueFor(Value *V) {
  1999. assert(Value2VPValueEnabled &&
  2000. "IR value to VPValue mapping may be out of date!");
  2001. Value2VPValue.erase(V);
  2002. }
  2003. /// Return the VPLoopInfo analysis for this VPlan.
  2004. VPLoopInfo &getVPLoopInfo() { return VPLInfo; }
  2005. const VPLoopInfo &getVPLoopInfo() const { return VPLInfo; }
  2006. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  2007. /// Print this VPlan to \p O.
  2008. void print(raw_ostream &O) const;
  2009. /// Print this VPlan in DOT format to \p O.
  2010. void printDOT(raw_ostream &O) const;
  2011. /// Dump the plan to stderr (for debugging).
  2012. LLVM_DUMP_METHOD void dump() const;
  2013. #endif
  2014. /// Returns a range mapping the values the range \p Operands to their
  2015. /// corresponding VPValues.
  2016. iterator_range<mapped_iterator<Use *, std::function<VPValue *(Value *)>>>
  2017. mapToVPValues(User::op_range Operands) {
  2018. std::function<VPValue *(Value *)> Fn = [this](Value *Op) {
  2019. return getOrAddVPValue(Op);
  2020. };
  2021. return map_range(Operands, Fn);
  2022. }
  2023. /// Returns true if \p VPV is uniform after vectorization.
  2024. bool isUniformAfterVectorization(VPValue *VPV) const {
  2025. auto RepR = dyn_cast_or_null<VPReplicateRecipe>(VPV->getDef());
  2026. return !VPV->getDef() || (RepR && RepR->isUniform());
  2027. }
  2028. /// Returns the VPRegionBlock of the vector loop.
  2029. VPRegionBlock *getVectorLoopRegion() {
  2030. return cast<VPRegionBlock>(getEntry());
  2031. }
  2032. /// Returns the canonical induction recipe of the vector loop.
  2033. VPCanonicalIVPHIRecipe *getCanonicalIV() {
  2034. VPBasicBlock *EntryVPBB = getVectorLoopRegion()->getEntryBasicBlock();
  2035. if (EntryVPBB->empty()) {
  2036. // VPlan native path.
  2037. EntryVPBB = cast<VPBasicBlock>(EntryVPBB->getSingleSuccessor());
  2038. }
  2039. return cast<VPCanonicalIVPHIRecipe>(&*EntryVPBB->begin());
  2040. }
  2041. private:
  2042. /// Add to the given dominator tree the header block and every new basic block
  2043. /// that was created between it and the latch block, inclusive.
  2044. static void updateDominatorTree(DominatorTree *DT, BasicBlock *LoopLatchBB,
  2045. BasicBlock *LoopPreHeaderBB,
  2046. BasicBlock *LoopExitBB);
  2047. };
  2048. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  2049. /// VPlanPrinter prints a given VPlan to a given output stream. The printing is
  2050. /// indented and follows the dot format.
  2051. class VPlanPrinter {
  2052. raw_ostream &OS;
  2053. const VPlan &Plan;
  2054. unsigned Depth = 0;
  2055. unsigned TabWidth = 2;
  2056. std::string Indent;
  2057. unsigned BID = 0;
  2058. SmallDenseMap<const VPBlockBase *, unsigned> BlockID;
  2059. VPSlotTracker SlotTracker;
  2060. /// Handle indentation.
  2061. void bumpIndent(int b) { Indent = std::string((Depth += b) * TabWidth, ' '); }
  2062. /// Print a given \p Block of the Plan.
  2063. void dumpBlock(const VPBlockBase *Block);
  2064. /// Print the information related to the CFG edges going out of a given
  2065. /// \p Block, followed by printing the successor blocks themselves.
  2066. void dumpEdges(const VPBlockBase *Block);
  2067. /// Print a given \p BasicBlock, including its VPRecipes, followed by printing
  2068. /// its successor blocks.
  2069. void dumpBasicBlock(const VPBasicBlock *BasicBlock);
  2070. /// Print a given \p Region of the Plan.
  2071. void dumpRegion(const VPRegionBlock *Region);
  2072. unsigned getOrCreateBID(const VPBlockBase *Block) {
  2073. return BlockID.count(Block) ? BlockID[Block] : BlockID[Block] = BID++;
  2074. }
  2075. Twine getOrCreateName(const VPBlockBase *Block);
  2076. Twine getUID(const VPBlockBase *Block);
  2077. /// Print the information related to a CFG edge between two VPBlockBases.
  2078. void drawEdge(const VPBlockBase *From, const VPBlockBase *To, bool Hidden,
  2079. const Twine &Label);
  2080. public:
  2081. VPlanPrinter(raw_ostream &O, const VPlan &P)
  2082. : OS(O), Plan(P), SlotTracker(&P) {}
  2083. LLVM_DUMP_METHOD void dump();
  2084. };
  2085. struct VPlanIngredient {
  2086. const Value *V;
  2087. VPlanIngredient(const Value *V) : V(V) {}
  2088. void print(raw_ostream &O) const;
  2089. };
  2090. inline raw_ostream &operator<<(raw_ostream &OS, const VPlanIngredient &I) {
  2091. I.print(OS);
  2092. return OS;
  2093. }
  2094. inline raw_ostream &operator<<(raw_ostream &OS, const VPlan &Plan) {
  2095. Plan.print(OS);
  2096. return OS;
  2097. }
  2098. #endif
  2099. //===----------------------------------------------------------------------===//
  2100. // VPlan Utilities
  2101. //===----------------------------------------------------------------------===//
  2102. /// Class that provides utilities for VPBlockBases in VPlan.
  2103. class VPBlockUtils {
  2104. public:
  2105. VPBlockUtils() = delete;
  2106. /// Insert disconnected VPBlockBase \p NewBlock after \p BlockPtr. Add \p
  2107. /// NewBlock as successor of \p BlockPtr and \p BlockPtr as predecessor of \p
  2108. /// NewBlock, and propagate \p BlockPtr parent to \p NewBlock. \p BlockPtr's
  2109. /// successors are moved from \p BlockPtr to \p NewBlock and \p BlockPtr's
  2110. /// conditional bit is propagated to \p NewBlock. \p NewBlock must have
  2111. /// neither successors nor predecessors.
  2112. static void insertBlockAfter(VPBlockBase *NewBlock, VPBlockBase *BlockPtr) {
  2113. assert(NewBlock->getSuccessors().empty() &&
  2114. NewBlock->getPredecessors().empty() &&
  2115. "Can't insert new block with predecessors or successors.");
  2116. NewBlock->setParent(BlockPtr->getParent());
  2117. SmallVector<VPBlockBase *> Succs(BlockPtr->successors());
  2118. for (VPBlockBase *Succ : Succs) {
  2119. disconnectBlocks(BlockPtr, Succ);
  2120. connectBlocks(NewBlock, Succ);
  2121. }
  2122. NewBlock->setCondBit(BlockPtr->getCondBit());
  2123. BlockPtr->setCondBit(nullptr);
  2124. connectBlocks(BlockPtr, NewBlock);
  2125. }
  2126. /// Insert disconnected VPBlockBases \p IfTrue and \p IfFalse after \p
  2127. /// BlockPtr. Add \p IfTrue and \p IfFalse as succesors of \p BlockPtr and \p
  2128. /// BlockPtr as predecessor of \p IfTrue and \p IfFalse. Propagate \p BlockPtr
  2129. /// parent to \p IfTrue and \p IfFalse. \p Condition is set as the successor
  2130. /// selector. \p BlockPtr must have no successors and \p IfTrue and \p IfFalse
  2131. /// must have neither successors nor predecessors.
  2132. static void insertTwoBlocksAfter(VPBlockBase *IfTrue, VPBlockBase *IfFalse,
  2133. VPValue *Condition, VPBlockBase *BlockPtr) {
  2134. assert(IfTrue->getSuccessors().empty() &&
  2135. "Can't insert IfTrue with successors.");
  2136. assert(IfFalse->getSuccessors().empty() &&
  2137. "Can't insert IfFalse with successors.");
  2138. BlockPtr->setTwoSuccessors(IfTrue, IfFalse, Condition);
  2139. IfTrue->setPredecessors({BlockPtr});
  2140. IfFalse->setPredecessors({BlockPtr});
  2141. IfTrue->setParent(BlockPtr->getParent());
  2142. IfFalse->setParent(BlockPtr->getParent());
  2143. }
  2144. /// Connect VPBlockBases \p From and \p To bi-directionally. Append \p To to
  2145. /// the successors of \p From and \p From to the predecessors of \p To. Both
  2146. /// VPBlockBases must have the same parent, which can be null. Both
  2147. /// VPBlockBases can be already connected to other VPBlockBases.
  2148. static void connectBlocks(VPBlockBase *From, VPBlockBase *To) {
  2149. assert((From->getParent() == To->getParent()) &&
  2150. "Can't connect two block with different parents");
  2151. assert(From->getNumSuccessors() < 2 &&
  2152. "Blocks can't have more than two successors.");
  2153. From->appendSuccessor(To);
  2154. To->appendPredecessor(From);
  2155. }
  2156. /// Disconnect VPBlockBases \p From and \p To bi-directionally. Remove \p To
  2157. /// from the successors of \p From and \p From from the predecessors of \p To.
  2158. static void disconnectBlocks(VPBlockBase *From, VPBlockBase *To) {
  2159. assert(To && "Successor to disconnect is null.");
  2160. From->removeSuccessor(To);
  2161. To->removePredecessor(From);
  2162. }
  2163. /// Try to merge \p Block into its single predecessor, if \p Block is a
  2164. /// VPBasicBlock and its predecessor has a single successor. Returns a pointer
  2165. /// to the predecessor \p Block was merged into or nullptr otherwise.
  2166. static VPBasicBlock *tryToMergeBlockIntoPredecessor(VPBlockBase *Block) {
  2167. auto *VPBB = dyn_cast<VPBasicBlock>(Block);
  2168. auto *PredVPBB =
  2169. dyn_cast_or_null<VPBasicBlock>(Block->getSinglePredecessor());
  2170. if (!VPBB || !PredVPBB || PredVPBB->getNumSuccessors() != 1)
  2171. return nullptr;
  2172. for (VPRecipeBase &R : make_early_inc_range(*VPBB))
  2173. R.moveBefore(*PredVPBB, PredVPBB->end());
  2174. VPBlockUtils::disconnectBlocks(PredVPBB, VPBB);
  2175. auto *ParentRegion = cast<VPRegionBlock>(Block->getParent());
  2176. if (ParentRegion->getExit() == Block)
  2177. ParentRegion->setExit(PredVPBB);
  2178. SmallVector<VPBlockBase *> Successors(Block->successors());
  2179. for (auto *Succ : Successors) {
  2180. VPBlockUtils::disconnectBlocks(Block, Succ);
  2181. VPBlockUtils::connectBlocks(PredVPBB, Succ);
  2182. }
  2183. delete Block;
  2184. return PredVPBB;
  2185. }
  2186. /// Returns true if the edge \p FromBlock -> \p ToBlock is a back-edge.
  2187. static bool isBackEdge(const VPBlockBase *FromBlock,
  2188. const VPBlockBase *ToBlock, const VPLoopInfo *VPLI) {
  2189. assert(FromBlock->getParent() == ToBlock->getParent() &&
  2190. FromBlock->getParent() && "Must be in same region");
  2191. const VPLoop *FromLoop = VPLI->getLoopFor(FromBlock);
  2192. const VPLoop *ToLoop = VPLI->getLoopFor(ToBlock);
  2193. if (!FromLoop || !ToLoop || FromLoop != ToLoop)
  2194. return false;
  2195. // A back-edge is a branch from the loop latch to its header.
  2196. return ToLoop->isLoopLatch(FromBlock) && ToBlock == ToLoop->getHeader();
  2197. }
  2198. /// Returns true if \p Block is a loop latch
  2199. static bool blockIsLoopLatch(const VPBlockBase *Block,
  2200. const VPLoopInfo *VPLInfo) {
  2201. if (const VPLoop *ParentVPL = VPLInfo->getLoopFor(Block))
  2202. return ParentVPL->isLoopLatch(Block);
  2203. return false;
  2204. }
  2205. /// Count and return the number of succesors of \p PredBlock excluding any
  2206. /// backedges.
  2207. static unsigned countSuccessorsNoBE(VPBlockBase *PredBlock,
  2208. VPLoopInfo *VPLI) {
  2209. unsigned Count = 0;
  2210. for (VPBlockBase *SuccBlock : PredBlock->getSuccessors()) {
  2211. if (!VPBlockUtils::isBackEdge(PredBlock, SuccBlock, VPLI))
  2212. Count++;
  2213. }
  2214. return Count;
  2215. }
  2216. /// Return an iterator range over \p Range which only includes \p BlockTy
  2217. /// blocks. The accesses are casted to \p BlockTy.
  2218. template <typename BlockTy, typename T>
  2219. static auto blocksOnly(const T &Range) {
  2220. // Create BaseTy with correct const-ness based on BlockTy.
  2221. using BaseTy =
  2222. typename std::conditional<std::is_const<BlockTy>::value,
  2223. const VPBlockBase, VPBlockBase>::type;
  2224. // We need to first create an iterator range over (const) BlocktTy & instead
  2225. // of (const) BlockTy * for filter_range to work properly.
  2226. auto Mapped =
  2227. map_range(Range, [](BaseTy *Block) -> BaseTy & { return *Block; });
  2228. auto Filter = make_filter_range(
  2229. Mapped, [](BaseTy &Block) { return isa<BlockTy>(&Block); });
  2230. return map_range(Filter, [](BaseTy &Block) -> BlockTy * {
  2231. return cast<BlockTy>(&Block);
  2232. });
  2233. }
  2234. };
  2235. class VPInterleavedAccessInfo {
  2236. DenseMap<VPInstruction *, InterleaveGroup<VPInstruction> *>
  2237. InterleaveGroupMap;
  2238. /// Type for mapping of instruction based interleave groups to VPInstruction
  2239. /// interleave groups
  2240. using Old2NewTy = DenseMap<InterleaveGroup<Instruction> *,
  2241. InterleaveGroup<VPInstruction> *>;
  2242. /// Recursively \p Region and populate VPlan based interleave groups based on
  2243. /// \p IAI.
  2244. void visitRegion(VPRegionBlock *Region, Old2NewTy &Old2New,
  2245. InterleavedAccessInfo &IAI);
  2246. /// Recursively traverse \p Block and populate VPlan based interleave groups
  2247. /// based on \p IAI.
  2248. void visitBlock(VPBlockBase *Block, Old2NewTy &Old2New,
  2249. InterleavedAccessInfo &IAI);
  2250. public:
  2251. VPInterleavedAccessInfo(VPlan &Plan, InterleavedAccessInfo &IAI);
  2252. ~VPInterleavedAccessInfo() {
  2253. SmallPtrSet<InterleaveGroup<VPInstruction> *, 4> DelSet;
  2254. // Avoid releasing a pointer twice.
  2255. for (auto &I : InterleaveGroupMap)
  2256. DelSet.insert(I.second);
  2257. for (auto *Ptr : DelSet)
  2258. delete Ptr;
  2259. }
  2260. /// Get the interleave group that \p Instr belongs to.
  2261. ///
  2262. /// \returns nullptr if doesn't have such group.
  2263. InterleaveGroup<VPInstruction> *
  2264. getInterleaveGroup(VPInstruction *Instr) const {
  2265. return InterleaveGroupMap.lookup(Instr);
  2266. }
  2267. };
  2268. /// Class that maps (parts of) an existing VPlan to trees of combined
  2269. /// VPInstructions.
  2270. class VPlanSlp {
  2271. enum class OpMode { Failed, Load, Opcode };
  2272. /// A DenseMapInfo implementation for using SmallVector<VPValue *, 4> as
  2273. /// DenseMap keys.
  2274. struct BundleDenseMapInfo {
  2275. static SmallVector<VPValue *, 4> getEmptyKey() {
  2276. return {reinterpret_cast<VPValue *>(-1)};
  2277. }
  2278. static SmallVector<VPValue *, 4> getTombstoneKey() {
  2279. return {reinterpret_cast<VPValue *>(-2)};
  2280. }
  2281. static unsigned getHashValue(const SmallVector<VPValue *, 4> &V) {
  2282. return static_cast<unsigned>(hash_combine_range(V.begin(), V.end()));
  2283. }
  2284. static bool isEqual(const SmallVector<VPValue *, 4> &LHS,
  2285. const SmallVector<VPValue *, 4> &RHS) {
  2286. return LHS == RHS;
  2287. }
  2288. };
  2289. /// Mapping of values in the original VPlan to a combined VPInstruction.
  2290. DenseMap<SmallVector<VPValue *, 4>, VPInstruction *, BundleDenseMapInfo>
  2291. BundleToCombined;
  2292. VPInterleavedAccessInfo &IAI;
  2293. /// Basic block to operate on. For now, only instructions in a single BB are
  2294. /// considered.
  2295. const VPBasicBlock &BB;
  2296. /// Indicates whether we managed to combine all visited instructions or not.
  2297. bool CompletelySLP = true;
  2298. /// Width of the widest combined bundle in bits.
  2299. unsigned WidestBundleBits = 0;
  2300. using MultiNodeOpTy =
  2301. typename std::pair<VPInstruction *, SmallVector<VPValue *, 4>>;
  2302. // Input operand bundles for the current multi node. Each multi node operand
  2303. // bundle contains values not matching the multi node's opcode. They will
  2304. // be reordered in reorderMultiNodeOps, once we completed building a
  2305. // multi node.
  2306. SmallVector<MultiNodeOpTy, 4> MultiNodeOps;
  2307. /// Indicates whether we are building a multi node currently.
  2308. bool MultiNodeActive = false;
  2309. /// Check if we can vectorize Operands together.
  2310. bool areVectorizable(ArrayRef<VPValue *> Operands) const;
  2311. /// Add combined instruction \p New for the bundle \p Operands.
  2312. void addCombined(ArrayRef<VPValue *> Operands, VPInstruction *New);
  2313. /// Indicate we hit a bundle we failed to combine. Returns nullptr for now.
  2314. VPInstruction *markFailed();
  2315. /// Reorder operands in the multi node to maximize sequential memory access
  2316. /// and commutative operations.
  2317. SmallVector<MultiNodeOpTy, 4> reorderMultiNodeOps();
  2318. /// Choose the best candidate to use for the lane after \p Last. The set of
  2319. /// candidates to choose from are values with an opcode matching \p Last's
  2320. /// or loads consecutive to \p Last.
  2321. std::pair<OpMode, VPValue *> getBest(OpMode Mode, VPValue *Last,
  2322. SmallPtrSetImpl<VPValue *> &Candidates,
  2323. VPInterleavedAccessInfo &IAI);
  2324. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  2325. /// Print bundle \p Values to dbgs().
  2326. void dumpBundle(ArrayRef<VPValue *> Values);
  2327. #endif
  2328. public:
  2329. VPlanSlp(VPInterleavedAccessInfo &IAI, VPBasicBlock &BB) : IAI(IAI), BB(BB) {}
  2330. ~VPlanSlp() = default;
  2331. /// Tries to build an SLP tree rooted at \p Operands and returns a
  2332. /// VPInstruction combining \p Operands, if they can be combined.
  2333. VPInstruction *buildGraph(ArrayRef<VPValue *> Operands);
  2334. /// Return the width of the widest combined bundle in bits.
  2335. unsigned getWidestBundleBits() const { return WidestBundleBits; }
  2336. /// Return true if all visited instruction can be combined.
  2337. bool isCompletelySLP() const { return CompletelySLP; }
  2338. };
  2339. namespace vputils {
  2340. /// Returns true if only the first lane of \p Def is used.
  2341. bool onlyFirstLaneUsed(VPValue *Def);
  2342. } // end namespace vputils
  2343. } // end namespace llvm
  2344. #endif // LLVM_TRANSFORMS_VECTORIZE_VPLAN_H