Instructions.h 202 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411
  1. #pragma once
  2. #ifdef __GNUC__
  3. #pragma GCC diagnostic push
  4. #pragma GCC diagnostic ignored "-Wunused-parameter"
  5. #endif
  6. //===- llvm/Instructions.h - Instruction subclass definitions ---*- C++ -*-===//
  7. //
  8. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  9. // See https://llvm.org/LICENSE.txt for license information.
  10. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  11. //
  12. //===----------------------------------------------------------------------===//
  13. //
  14. // This file exposes the class definitions of all of the subclasses of the
  15. // Instruction class. This is meant to be an easy way to get access to all
  16. // instruction subclasses.
  17. //
  18. //===----------------------------------------------------------------------===//
  19. #ifndef LLVM_IR_INSTRUCTIONS_H
  20. #define LLVM_IR_INSTRUCTIONS_H
  21. #include "llvm/ADT/ArrayRef.h"
  22. #include "llvm/ADT/Bitfields.h"
  23. #include "llvm/ADT/MapVector.h"
  24. #include "llvm/ADT/None.h"
  25. #include "llvm/ADT/STLExtras.h"
  26. #include "llvm/ADT/SmallVector.h"
  27. #include "llvm/ADT/StringRef.h"
  28. #include "llvm/ADT/Twine.h"
  29. #include "llvm/ADT/iterator.h"
  30. #include "llvm/ADT/iterator_range.h"
  31. #include "llvm/IR/Attributes.h"
  32. #include "llvm/IR/BasicBlock.h"
  33. #include "llvm/IR/CFG.h"
  34. #include "llvm/IR/Constant.h"
  35. #include "llvm/IR/DerivedTypes.h"
  36. #include "llvm/IR/InstrTypes.h"
  37. #include "llvm/IR/Instruction.h"
  38. #include "llvm/IR/OperandTraits.h"
  39. #include "llvm/IR/Type.h"
  40. #include "llvm/IR/Use.h"
  41. #include "llvm/IR/User.h"
  42. #include "llvm/IR/Value.h"
  43. #include "llvm/Support/AtomicOrdering.h"
  44. #include "llvm/Support/Casting.h"
  45. #include "llvm/Support/ErrorHandling.h"
  46. #include <cassert>
  47. #include <cstddef>
  48. #include <cstdint>
  49. #include <iterator>
  50. namespace llvm {
  51. class APInt;
  52. class ConstantInt;
  53. class DataLayout;
  54. //===----------------------------------------------------------------------===//
  55. // AllocaInst Class
  56. //===----------------------------------------------------------------------===//
  57. /// an instruction to allocate memory on the stack
  58. class AllocaInst : public UnaryInstruction {
  59. Type *AllocatedType;
  60. using AlignmentField = AlignmentBitfieldElementT<0>;
  61. using UsedWithInAllocaField = BoolBitfieldElementT<AlignmentField::NextBit>;
  62. using SwiftErrorField = BoolBitfieldElementT<UsedWithInAllocaField::NextBit>;
  63. static_assert(Bitfield::areContiguous<AlignmentField, UsedWithInAllocaField,
  64. SwiftErrorField>(),
  65. "Bitfields must be contiguous");
  66. protected:
  67. // Note: Instruction needs to be a friend here to call cloneImpl.
  68. friend class Instruction;
  69. AllocaInst *cloneImpl() const;
  70. public:
  71. explicit AllocaInst(Type *Ty, unsigned AddrSpace, Value *ArraySize,
  72. const Twine &Name, Instruction *InsertBefore);
  73. AllocaInst(Type *Ty, unsigned AddrSpace, Value *ArraySize,
  74. const Twine &Name, BasicBlock *InsertAtEnd);
  75. AllocaInst(Type *Ty, unsigned AddrSpace, const Twine &Name,
  76. Instruction *InsertBefore);
  77. AllocaInst(Type *Ty, unsigned AddrSpace,
  78. const Twine &Name, BasicBlock *InsertAtEnd);
  79. AllocaInst(Type *Ty, unsigned AddrSpace, Value *ArraySize, Align Align,
  80. const Twine &Name = "", Instruction *InsertBefore = nullptr);
  81. AllocaInst(Type *Ty, unsigned AddrSpace, Value *ArraySize, Align Align,
  82. const Twine &Name, BasicBlock *InsertAtEnd);
  83. /// Return true if there is an allocation size parameter to the allocation
  84. /// instruction that is not 1.
  85. bool isArrayAllocation() const;
  86. /// Get the number of elements allocated. For a simple allocation of a single
  87. /// element, this will return a constant 1 value.
  88. const Value *getArraySize() const { return getOperand(0); }
  89. Value *getArraySize() { return getOperand(0); }
  90. /// Overload to return most specific pointer type.
  91. PointerType *getType() const {
  92. return cast<PointerType>(Instruction::getType());
  93. }
  94. /// Return the address space for the allocation.
  95. unsigned getAddressSpace() const {
  96. return getType()->getAddressSpace();
  97. }
  98. /// Get allocation size in bits. Returns None if size can't be determined,
  99. /// e.g. in case of a VLA.
  100. Optional<TypeSize> getAllocationSizeInBits(const DataLayout &DL) const;
  101. /// Return the type that is being allocated by the instruction.
  102. Type *getAllocatedType() const { return AllocatedType; }
  103. /// for use only in special circumstances that need to generically
  104. /// transform a whole instruction (eg: IR linking and vectorization).
  105. void setAllocatedType(Type *Ty) { AllocatedType = Ty; }
  106. /// Return the alignment of the memory that is being allocated by the
  107. /// instruction.
  108. Align getAlign() const {
  109. return Align(1ULL << getSubclassData<AlignmentField>());
  110. }
  111. void setAlignment(Align Align) {
  112. setSubclassData<AlignmentField>(Log2(Align));
  113. }
  114. // FIXME: Remove this one transition to Align is over.
  115. uint64_t getAlignment() const { return getAlign().value(); }
  116. /// Return true if this alloca is in the entry block of the function and is a
  117. /// constant size. If so, the code generator will fold it into the
  118. /// prolog/epilog code, so it is basically free.
  119. bool isStaticAlloca() const;
  120. /// Return true if this alloca is used as an inalloca argument to a call. Such
  121. /// allocas are never considered static even if they are in the entry block.
  122. bool isUsedWithInAlloca() const {
  123. return getSubclassData<UsedWithInAllocaField>();
  124. }
  125. /// Specify whether this alloca is used to represent the arguments to a call.
  126. void setUsedWithInAlloca(bool V) {
  127. setSubclassData<UsedWithInAllocaField>(V);
  128. }
  129. /// Return true if this alloca is used as a swifterror argument to a call.
  130. bool isSwiftError() const { return getSubclassData<SwiftErrorField>(); }
  131. /// Specify whether this alloca is used to represent a swifterror.
  132. void setSwiftError(bool V) { setSubclassData<SwiftErrorField>(V); }
  133. // Methods for support type inquiry through isa, cast, and dyn_cast:
  134. static bool classof(const Instruction *I) {
  135. return (I->getOpcode() == Instruction::Alloca);
  136. }
  137. static bool classof(const Value *V) {
  138. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  139. }
  140. private:
  141. // Shadow Instruction::setInstructionSubclassData with a private forwarding
  142. // method so that subclasses cannot accidentally use it.
  143. template <typename Bitfield>
  144. void setSubclassData(typename Bitfield::Type Value) {
  145. Instruction::setSubclassData<Bitfield>(Value);
  146. }
  147. };
  148. //===----------------------------------------------------------------------===//
  149. // LoadInst Class
  150. //===----------------------------------------------------------------------===//
  151. /// An instruction for reading from memory. This uses the SubclassData field in
  152. /// Value to store whether or not the load is volatile.
  153. class LoadInst : public UnaryInstruction {
  154. using VolatileField = BoolBitfieldElementT<0>;
  155. using AlignmentField = AlignmentBitfieldElementT<VolatileField::NextBit>;
  156. using OrderingField = AtomicOrderingBitfieldElementT<AlignmentField::NextBit>;
  157. static_assert(
  158. Bitfield::areContiguous<VolatileField, AlignmentField, OrderingField>(),
  159. "Bitfields must be contiguous");
  160. void AssertOK();
  161. protected:
  162. // Note: Instruction needs to be a friend here to call cloneImpl.
  163. friend class Instruction;
  164. LoadInst *cloneImpl() const;
  165. public:
  166. LoadInst(Type *Ty, Value *Ptr, const Twine &NameStr,
  167. Instruction *InsertBefore);
  168. LoadInst(Type *Ty, Value *Ptr, const Twine &NameStr, BasicBlock *InsertAtEnd);
  169. LoadInst(Type *Ty, Value *Ptr, const Twine &NameStr, bool isVolatile,
  170. Instruction *InsertBefore);
  171. LoadInst(Type *Ty, Value *Ptr, const Twine &NameStr, bool isVolatile,
  172. BasicBlock *InsertAtEnd);
  173. LoadInst(Type *Ty, Value *Ptr, const Twine &NameStr, bool isVolatile,
  174. Align Align, Instruction *InsertBefore = nullptr);
  175. LoadInst(Type *Ty, Value *Ptr, const Twine &NameStr, bool isVolatile,
  176. Align Align, BasicBlock *InsertAtEnd);
  177. LoadInst(Type *Ty, Value *Ptr, const Twine &NameStr, bool isVolatile,
  178. Align Align, AtomicOrdering Order,
  179. SyncScope::ID SSID = SyncScope::System,
  180. Instruction *InsertBefore = nullptr);
  181. LoadInst(Type *Ty, Value *Ptr, const Twine &NameStr, bool isVolatile,
  182. Align Align, AtomicOrdering Order, SyncScope::ID SSID,
  183. BasicBlock *InsertAtEnd);
  184. /// Return true if this is a load from a volatile memory location.
  185. bool isVolatile() const { return getSubclassData<VolatileField>(); }
  186. /// Specify whether this is a volatile load or not.
  187. void setVolatile(bool V) { setSubclassData<VolatileField>(V); }
  188. /// Return the alignment of the access that is being performed.
  189. /// FIXME: Remove this function once transition to Align is over.
  190. /// Use getAlign() instead.
  191. uint64_t getAlignment() const { return getAlign().value(); }
  192. /// Return the alignment of the access that is being performed.
  193. Align getAlign() const {
  194. return Align(1ULL << (getSubclassData<AlignmentField>()));
  195. }
  196. void setAlignment(Align Align) {
  197. setSubclassData<AlignmentField>(Log2(Align));
  198. }
  199. /// Returns the ordering constraint of this load instruction.
  200. AtomicOrdering getOrdering() const {
  201. return getSubclassData<OrderingField>();
  202. }
  203. /// Sets the ordering constraint of this load instruction. May not be Release
  204. /// or AcquireRelease.
  205. void setOrdering(AtomicOrdering Ordering) {
  206. setSubclassData<OrderingField>(Ordering);
  207. }
  208. /// Returns the synchronization scope ID of this load instruction.
  209. SyncScope::ID getSyncScopeID() const {
  210. return SSID;
  211. }
  212. /// Sets the synchronization scope ID of this load instruction.
  213. void setSyncScopeID(SyncScope::ID SSID) {
  214. this->SSID = SSID;
  215. }
  216. /// Sets the ordering constraint and the synchronization scope ID of this load
  217. /// instruction.
  218. void setAtomic(AtomicOrdering Ordering,
  219. SyncScope::ID SSID = SyncScope::System) {
  220. setOrdering(Ordering);
  221. setSyncScopeID(SSID);
  222. }
  223. bool isSimple() const { return !isAtomic() && !isVolatile(); }
  224. bool isUnordered() const {
  225. return (getOrdering() == AtomicOrdering::NotAtomic ||
  226. getOrdering() == AtomicOrdering::Unordered) &&
  227. !isVolatile();
  228. }
  229. Value *getPointerOperand() { return getOperand(0); }
  230. const Value *getPointerOperand() const { return getOperand(0); }
  231. static unsigned getPointerOperandIndex() { return 0U; }
  232. Type *getPointerOperandType() const { return getPointerOperand()->getType(); }
  233. /// Returns the address space of the pointer operand.
  234. unsigned getPointerAddressSpace() const {
  235. return getPointerOperandType()->getPointerAddressSpace();
  236. }
  237. // Methods for support type inquiry through isa, cast, and dyn_cast:
  238. static bool classof(const Instruction *I) {
  239. return I->getOpcode() == Instruction::Load;
  240. }
  241. static bool classof(const Value *V) {
  242. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  243. }
  244. private:
  245. // Shadow Instruction::setInstructionSubclassData with a private forwarding
  246. // method so that subclasses cannot accidentally use it.
  247. template <typename Bitfield>
  248. void setSubclassData(typename Bitfield::Type Value) {
  249. Instruction::setSubclassData<Bitfield>(Value);
  250. }
  251. /// The synchronization scope ID of this load instruction. Not quite enough
  252. /// room in SubClassData for everything, so synchronization scope ID gets its
  253. /// own field.
  254. SyncScope::ID SSID;
  255. };
  256. //===----------------------------------------------------------------------===//
  257. // StoreInst Class
  258. //===----------------------------------------------------------------------===//
  259. /// An instruction for storing to memory.
  260. class StoreInst : public Instruction {
  261. using VolatileField = BoolBitfieldElementT<0>;
  262. using AlignmentField = AlignmentBitfieldElementT<VolatileField::NextBit>;
  263. using OrderingField = AtomicOrderingBitfieldElementT<AlignmentField::NextBit>;
  264. static_assert(
  265. Bitfield::areContiguous<VolatileField, AlignmentField, OrderingField>(),
  266. "Bitfields must be contiguous");
  267. void AssertOK();
  268. protected:
  269. // Note: Instruction needs to be a friend here to call cloneImpl.
  270. friend class Instruction;
  271. StoreInst *cloneImpl() const;
  272. public:
  273. StoreInst(Value *Val, Value *Ptr, Instruction *InsertBefore);
  274. StoreInst(Value *Val, Value *Ptr, BasicBlock *InsertAtEnd);
  275. StoreInst(Value *Val, Value *Ptr, bool isVolatile, Instruction *InsertBefore);
  276. StoreInst(Value *Val, Value *Ptr, bool isVolatile, BasicBlock *InsertAtEnd);
  277. StoreInst(Value *Val, Value *Ptr, bool isVolatile, Align Align,
  278. Instruction *InsertBefore = nullptr);
  279. StoreInst(Value *Val, Value *Ptr, bool isVolatile, Align Align,
  280. BasicBlock *InsertAtEnd);
  281. StoreInst(Value *Val, Value *Ptr, bool isVolatile, Align Align,
  282. AtomicOrdering Order, SyncScope::ID SSID = SyncScope::System,
  283. Instruction *InsertBefore = nullptr);
  284. StoreInst(Value *Val, Value *Ptr, bool isVolatile, Align Align,
  285. AtomicOrdering Order, SyncScope::ID SSID, BasicBlock *InsertAtEnd);
  286. // allocate space for exactly two operands
  287. void *operator new(size_t S) { return User::operator new(S, 2); }
  288. void operator delete(void *Ptr) { User::operator delete(Ptr); }
  289. /// Return true if this is a store to a volatile memory location.
  290. bool isVolatile() const { return getSubclassData<VolatileField>(); }
  291. /// Specify whether this is a volatile store or not.
  292. void setVolatile(bool V) { setSubclassData<VolatileField>(V); }
  293. /// Transparently provide more efficient getOperand methods.
  294. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  295. /// Return the alignment of the access that is being performed
  296. /// FIXME: Remove this function once transition to Align is over.
  297. /// Use getAlign() instead.
  298. uint64_t getAlignment() const { return getAlign().value(); }
  299. Align getAlign() const {
  300. return Align(1ULL << (getSubclassData<AlignmentField>()));
  301. }
  302. void setAlignment(Align Align) {
  303. setSubclassData<AlignmentField>(Log2(Align));
  304. }
  305. /// Returns the ordering constraint of this store instruction.
  306. AtomicOrdering getOrdering() const {
  307. return getSubclassData<OrderingField>();
  308. }
  309. /// Sets the ordering constraint of this store instruction. May not be
  310. /// Acquire or AcquireRelease.
  311. void setOrdering(AtomicOrdering Ordering) {
  312. setSubclassData<OrderingField>(Ordering);
  313. }
  314. /// Returns the synchronization scope ID of this store instruction.
  315. SyncScope::ID getSyncScopeID() const {
  316. return SSID;
  317. }
  318. /// Sets the synchronization scope ID of this store instruction.
  319. void setSyncScopeID(SyncScope::ID SSID) {
  320. this->SSID = SSID;
  321. }
  322. /// Sets the ordering constraint and the synchronization scope ID of this
  323. /// store instruction.
  324. void setAtomic(AtomicOrdering Ordering,
  325. SyncScope::ID SSID = SyncScope::System) {
  326. setOrdering(Ordering);
  327. setSyncScopeID(SSID);
  328. }
  329. bool isSimple() const { return !isAtomic() && !isVolatile(); }
  330. bool isUnordered() const {
  331. return (getOrdering() == AtomicOrdering::NotAtomic ||
  332. getOrdering() == AtomicOrdering::Unordered) &&
  333. !isVolatile();
  334. }
  335. Value *getValueOperand() { return getOperand(0); }
  336. const Value *getValueOperand() const { return getOperand(0); }
  337. Value *getPointerOperand() { return getOperand(1); }
  338. const Value *getPointerOperand() const { return getOperand(1); }
  339. static unsigned getPointerOperandIndex() { return 1U; }
  340. Type *getPointerOperandType() const { return getPointerOperand()->getType(); }
  341. /// Returns the address space of the pointer operand.
  342. unsigned getPointerAddressSpace() const {
  343. return getPointerOperandType()->getPointerAddressSpace();
  344. }
  345. // Methods for support type inquiry through isa, cast, and dyn_cast:
  346. static bool classof(const Instruction *I) {
  347. return I->getOpcode() == Instruction::Store;
  348. }
  349. static bool classof(const Value *V) {
  350. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  351. }
  352. private:
  353. // Shadow Instruction::setInstructionSubclassData with a private forwarding
  354. // method so that subclasses cannot accidentally use it.
  355. template <typename Bitfield>
  356. void setSubclassData(typename Bitfield::Type Value) {
  357. Instruction::setSubclassData<Bitfield>(Value);
  358. }
  359. /// The synchronization scope ID of this store instruction. Not quite enough
  360. /// room in SubClassData for everything, so synchronization scope ID gets its
  361. /// own field.
  362. SyncScope::ID SSID;
  363. };
  364. template <>
  365. struct OperandTraits<StoreInst> : public FixedNumOperandTraits<StoreInst, 2> {
  366. };
  367. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(StoreInst, Value)
  368. //===----------------------------------------------------------------------===//
  369. // FenceInst Class
  370. //===----------------------------------------------------------------------===//
  371. /// An instruction for ordering other memory operations.
  372. class FenceInst : public Instruction {
  373. using OrderingField = AtomicOrderingBitfieldElementT<0>;
  374. void Init(AtomicOrdering Ordering, SyncScope::ID SSID);
  375. protected:
  376. // Note: Instruction needs to be a friend here to call cloneImpl.
  377. friend class Instruction;
  378. FenceInst *cloneImpl() const;
  379. public:
  380. // Ordering may only be Acquire, Release, AcquireRelease, or
  381. // SequentiallyConsistent.
  382. FenceInst(LLVMContext &C, AtomicOrdering Ordering,
  383. SyncScope::ID SSID = SyncScope::System,
  384. Instruction *InsertBefore = nullptr);
  385. FenceInst(LLVMContext &C, AtomicOrdering Ordering, SyncScope::ID SSID,
  386. BasicBlock *InsertAtEnd);
  387. // allocate space for exactly zero operands
  388. void *operator new(size_t S) { return User::operator new(S, 0); }
  389. void operator delete(void *Ptr) { User::operator delete(Ptr); }
  390. /// Returns the ordering constraint of this fence instruction.
  391. AtomicOrdering getOrdering() const {
  392. return getSubclassData<OrderingField>();
  393. }
  394. /// Sets the ordering constraint of this fence instruction. May only be
  395. /// Acquire, Release, AcquireRelease, or SequentiallyConsistent.
  396. void setOrdering(AtomicOrdering Ordering) {
  397. setSubclassData<OrderingField>(Ordering);
  398. }
  399. /// Returns the synchronization scope ID of this fence instruction.
  400. SyncScope::ID getSyncScopeID() const {
  401. return SSID;
  402. }
  403. /// Sets the synchronization scope ID of this fence instruction.
  404. void setSyncScopeID(SyncScope::ID SSID) {
  405. this->SSID = SSID;
  406. }
  407. // Methods for support type inquiry through isa, cast, and dyn_cast:
  408. static bool classof(const Instruction *I) {
  409. return I->getOpcode() == Instruction::Fence;
  410. }
  411. static bool classof(const Value *V) {
  412. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  413. }
  414. private:
  415. // Shadow Instruction::setInstructionSubclassData with a private forwarding
  416. // method so that subclasses cannot accidentally use it.
  417. template <typename Bitfield>
  418. void setSubclassData(typename Bitfield::Type Value) {
  419. Instruction::setSubclassData<Bitfield>(Value);
  420. }
  421. /// The synchronization scope ID of this fence instruction. Not quite enough
  422. /// room in SubClassData for everything, so synchronization scope ID gets its
  423. /// own field.
  424. SyncScope::ID SSID;
  425. };
  426. //===----------------------------------------------------------------------===//
  427. // AtomicCmpXchgInst Class
  428. //===----------------------------------------------------------------------===//
  429. /// An instruction that atomically checks whether a
  430. /// specified value is in a memory location, and, if it is, stores a new value
  431. /// there. The value returned by this instruction is a pair containing the
  432. /// original value as first element, and an i1 indicating success (true) or
  433. /// failure (false) as second element.
  434. ///
  435. class AtomicCmpXchgInst : public Instruction {
  436. void Init(Value *Ptr, Value *Cmp, Value *NewVal, Align Align,
  437. AtomicOrdering SuccessOrdering, AtomicOrdering FailureOrdering,
  438. SyncScope::ID SSID);
  439. template <unsigned Offset>
  440. using AtomicOrderingBitfieldElement =
  441. typename Bitfield::Element<AtomicOrdering, Offset, 3,
  442. AtomicOrdering::LAST>;
  443. protected:
  444. // Note: Instruction needs to be a friend here to call cloneImpl.
  445. friend class Instruction;
  446. AtomicCmpXchgInst *cloneImpl() const;
  447. public:
  448. AtomicCmpXchgInst(Value *Ptr, Value *Cmp, Value *NewVal, Align Alignment,
  449. AtomicOrdering SuccessOrdering,
  450. AtomicOrdering FailureOrdering, SyncScope::ID SSID,
  451. Instruction *InsertBefore = nullptr);
  452. AtomicCmpXchgInst(Value *Ptr, Value *Cmp, Value *NewVal, Align Alignment,
  453. AtomicOrdering SuccessOrdering,
  454. AtomicOrdering FailureOrdering, SyncScope::ID SSID,
  455. BasicBlock *InsertAtEnd);
  456. // allocate space for exactly three operands
  457. void *operator new(size_t S) { return User::operator new(S, 3); }
  458. void operator delete(void *Ptr) { User::operator delete(Ptr); }
  459. using VolatileField = BoolBitfieldElementT<0>;
  460. using WeakField = BoolBitfieldElementT<VolatileField::NextBit>;
  461. using SuccessOrderingField =
  462. AtomicOrderingBitfieldElementT<WeakField::NextBit>;
  463. using FailureOrderingField =
  464. AtomicOrderingBitfieldElementT<SuccessOrderingField::NextBit>;
  465. using AlignmentField =
  466. AlignmentBitfieldElementT<FailureOrderingField::NextBit>;
  467. static_assert(
  468. Bitfield::areContiguous<VolatileField, WeakField, SuccessOrderingField,
  469. FailureOrderingField, AlignmentField>(),
  470. "Bitfields must be contiguous");
  471. /// Return the alignment of the memory that is being allocated by the
  472. /// instruction.
  473. Align getAlign() const {
  474. return Align(1ULL << getSubclassData<AlignmentField>());
  475. }
  476. void setAlignment(Align Align) {
  477. setSubclassData<AlignmentField>(Log2(Align));
  478. }
  479. /// Return true if this is a cmpxchg from a volatile memory
  480. /// location.
  481. ///
  482. bool isVolatile() const { return getSubclassData<VolatileField>(); }
  483. /// Specify whether this is a volatile cmpxchg.
  484. ///
  485. void setVolatile(bool V) { setSubclassData<VolatileField>(V); }
  486. /// Return true if this cmpxchg may spuriously fail.
  487. bool isWeak() const { return getSubclassData<WeakField>(); }
  488. void setWeak(bool IsWeak) { setSubclassData<WeakField>(IsWeak); }
  489. /// Transparently provide more efficient getOperand methods.
  490. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  491. static bool isValidSuccessOrdering(AtomicOrdering Ordering) {
  492. return Ordering != AtomicOrdering::NotAtomic &&
  493. Ordering != AtomicOrdering::Unordered;
  494. }
  495. static bool isValidFailureOrdering(AtomicOrdering Ordering) {
  496. return Ordering != AtomicOrdering::NotAtomic &&
  497. Ordering != AtomicOrdering::Unordered &&
  498. Ordering != AtomicOrdering::AcquireRelease &&
  499. Ordering != AtomicOrdering::Release;
  500. }
  501. /// Returns the success ordering constraint of this cmpxchg instruction.
  502. AtomicOrdering getSuccessOrdering() const {
  503. return getSubclassData<SuccessOrderingField>();
  504. }
  505. /// Sets the success ordering constraint of this cmpxchg instruction.
  506. void setSuccessOrdering(AtomicOrdering Ordering) {
  507. assert(isValidSuccessOrdering(Ordering) &&
  508. "invalid CmpXchg success ordering");
  509. setSubclassData<SuccessOrderingField>(Ordering);
  510. }
  511. /// Returns the failure ordering constraint of this cmpxchg instruction.
  512. AtomicOrdering getFailureOrdering() const {
  513. return getSubclassData<FailureOrderingField>();
  514. }
  515. /// Sets the failure ordering constraint of this cmpxchg instruction.
  516. void setFailureOrdering(AtomicOrdering Ordering) {
  517. assert(isValidFailureOrdering(Ordering) &&
  518. "invalid CmpXchg failure ordering");
  519. setSubclassData<FailureOrderingField>(Ordering);
  520. }
  521. /// Returns a single ordering which is at least as strong as both the
  522. /// success and failure orderings for this cmpxchg.
  523. AtomicOrdering getMergedOrdering() const {
  524. if (getFailureOrdering() == AtomicOrdering::SequentiallyConsistent)
  525. return AtomicOrdering::SequentiallyConsistent;
  526. if (getFailureOrdering() == AtomicOrdering::Acquire) {
  527. if (getSuccessOrdering() == AtomicOrdering::Monotonic)
  528. return AtomicOrdering::Acquire;
  529. if (getSuccessOrdering() == AtomicOrdering::Release)
  530. return AtomicOrdering::AcquireRelease;
  531. }
  532. return getSuccessOrdering();
  533. }
  534. /// Returns the synchronization scope ID of this cmpxchg instruction.
  535. SyncScope::ID getSyncScopeID() const {
  536. return SSID;
  537. }
  538. /// Sets the synchronization scope ID of this cmpxchg instruction.
  539. void setSyncScopeID(SyncScope::ID SSID) {
  540. this->SSID = SSID;
  541. }
  542. Value *getPointerOperand() { return getOperand(0); }
  543. const Value *getPointerOperand() const { return getOperand(0); }
  544. static unsigned getPointerOperandIndex() { return 0U; }
  545. Value *getCompareOperand() { return getOperand(1); }
  546. const Value *getCompareOperand() const { return getOperand(1); }
  547. Value *getNewValOperand() { return getOperand(2); }
  548. const Value *getNewValOperand() const { return getOperand(2); }
  549. /// Returns the address space of the pointer operand.
  550. unsigned getPointerAddressSpace() const {
  551. return getPointerOperand()->getType()->getPointerAddressSpace();
  552. }
  553. /// Returns the strongest permitted ordering on failure, given the
  554. /// desired ordering on success.
  555. ///
  556. /// If the comparison in a cmpxchg operation fails, there is no atomic store
  557. /// so release semantics cannot be provided. So this function drops explicit
  558. /// Release requests from the AtomicOrdering. A SequentiallyConsistent
  559. /// operation would remain SequentiallyConsistent.
  560. static AtomicOrdering
  561. getStrongestFailureOrdering(AtomicOrdering SuccessOrdering) {
  562. switch (SuccessOrdering) {
  563. default:
  564. llvm_unreachable("invalid cmpxchg success ordering");
  565. case AtomicOrdering::Release:
  566. case AtomicOrdering::Monotonic:
  567. return AtomicOrdering::Monotonic;
  568. case AtomicOrdering::AcquireRelease:
  569. case AtomicOrdering::Acquire:
  570. return AtomicOrdering::Acquire;
  571. case AtomicOrdering::SequentiallyConsistent:
  572. return AtomicOrdering::SequentiallyConsistent;
  573. }
  574. }
  575. // Methods for support type inquiry through isa, cast, and dyn_cast:
  576. static bool classof(const Instruction *I) {
  577. return I->getOpcode() == Instruction::AtomicCmpXchg;
  578. }
  579. static bool classof(const Value *V) {
  580. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  581. }
  582. private:
  583. // Shadow Instruction::setInstructionSubclassData with a private forwarding
  584. // method so that subclasses cannot accidentally use it.
  585. template <typename Bitfield>
  586. void setSubclassData(typename Bitfield::Type Value) {
  587. Instruction::setSubclassData<Bitfield>(Value);
  588. }
  589. /// The synchronization scope ID of this cmpxchg instruction. Not quite
  590. /// enough room in SubClassData for everything, so synchronization scope ID
  591. /// gets its own field.
  592. SyncScope::ID SSID;
  593. };
  594. template <>
  595. struct OperandTraits<AtomicCmpXchgInst> :
  596. public FixedNumOperandTraits<AtomicCmpXchgInst, 3> {
  597. };
  598. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(AtomicCmpXchgInst, Value)
  599. //===----------------------------------------------------------------------===//
  600. // AtomicRMWInst Class
  601. //===----------------------------------------------------------------------===//
  602. /// an instruction that atomically reads a memory location,
  603. /// combines it with another value, and then stores the result back. Returns
  604. /// the old value.
  605. ///
  606. class AtomicRMWInst : public Instruction {
  607. protected:
  608. // Note: Instruction needs to be a friend here to call cloneImpl.
  609. friend class Instruction;
  610. AtomicRMWInst *cloneImpl() const;
  611. public:
  612. /// This enumeration lists the possible modifications atomicrmw can make. In
  613. /// the descriptions, 'p' is the pointer to the instruction's memory location,
  614. /// 'old' is the initial value of *p, and 'v' is the other value passed to the
  615. /// instruction. These instructions always return 'old'.
  616. enum BinOp : unsigned {
  617. /// *p = v
  618. Xchg,
  619. /// *p = old + v
  620. Add,
  621. /// *p = old - v
  622. Sub,
  623. /// *p = old & v
  624. And,
  625. /// *p = ~(old & v)
  626. Nand,
  627. /// *p = old | v
  628. Or,
  629. /// *p = old ^ v
  630. Xor,
  631. /// *p = old >signed v ? old : v
  632. Max,
  633. /// *p = old <signed v ? old : v
  634. Min,
  635. /// *p = old >unsigned v ? old : v
  636. UMax,
  637. /// *p = old <unsigned v ? old : v
  638. UMin,
  639. /// *p = old + v
  640. FAdd,
  641. /// *p = old - v
  642. FSub,
  643. FIRST_BINOP = Xchg,
  644. LAST_BINOP = FSub,
  645. BAD_BINOP
  646. };
  647. private:
  648. template <unsigned Offset>
  649. using AtomicOrderingBitfieldElement =
  650. typename Bitfield::Element<AtomicOrdering, Offset, 3,
  651. AtomicOrdering::LAST>;
  652. template <unsigned Offset>
  653. using BinOpBitfieldElement =
  654. typename Bitfield::Element<BinOp, Offset, 4, BinOp::LAST_BINOP>;
  655. public:
  656. AtomicRMWInst(BinOp Operation, Value *Ptr, Value *Val, Align Alignment,
  657. AtomicOrdering Ordering, SyncScope::ID SSID,
  658. Instruction *InsertBefore = nullptr);
  659. AtomicRMWInst(BinOp Operation, Value *Ptr, Value *Val, Align Alignment,
  660. AtomicOrdering Ordering, SyncScope::ID SSID,
  661. BasicBlock *InsertAtEnd);
  662. // allocate space for exactly two operands
  663. void *operator new(size_t S) { return User::operator new(S, 2); }
  664. void operator delete(void *Ptr) { User::operator delete(Ptr); }
  665. using VolatileField = BoolBitfieldElementT<0>;
  666. using AtomicOrderingField =
  667. AtomicOrderingBitfieldElementT<VolatileField::NextBit>;
  668. using OperationField = BinOpBitfieldElement<AtomicOrderingField::NextBit>;
  669. using AlignmentField = AlignmentBitfieldElementT<OperationField::NextBit>;
  670. static_assert(Bitfield::areContiguous<VolatileField, AtomicOrderingField,
  671. OperationField, AlignmentField>(),
  672. "Bitfields must be contiguous");
  673. BinOp getOperation() const { return getSubclassData<OperationField>(); }
  674. static StringRef getOperationName(BinOp Op);
  675. static bool isFPOperation(BinOp Op) {
  676. switch (Op) {
  677. case AtomicRMWInst::FAdd:
  678. case AtomicRMWInst::FSub:
  679. return true;
  680. default:
  681. return false;
  682. }
  683. }
  684. void setOperation(BinOp Operation) {
  685. setSubclassData<OperationField>(Operation);
  686. }
  687. /// Return the alignment of the memory that is being allocated by the
  688. /// instruction.
  689. Align getAlign() const {
  690. return Align(1ULL << getSubclassData<AlignmentField>());
  691. }
  692. void setAlignment(Align Align) {
  693. setSubclassData<AlignmentField>(Log2(Align));
  694. }
  695. /// Return true if this is a RMW on a volatile memory location.
  696. ///
  697. bool isVolatile() const { return getSubclassData<VolatileField>(); }
  698. /// Specify whether this is a volatile RMW or not.
  699. ///
  700. void setVolatile(bool V) { setSubclassData<VolatileField>(V); }
  701. /// Transparently provide more efficient getOperand methods.
  702. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  703. /// Returns the ordering constraint of this rmw instruction.
  704. AtomicOrdering getOrdering() const {
  705. return getSubclassData<AtomicOrderingField>();
  706. }
  707. /// Sets the ordering constraint of this rmw instruction.
  708. void setOrdering(AtomicOrdering Ordering) {
  709. assert(Ordering != AtomicOrdering::NotAtomic &&
  710. "atomicrmw instructions can only be atomic.");
  711. setSubclassData<AtomicOrderingField>(Ordering);
  712. }
  713. /// Returns the synchronization scope ID of this rmw instruction.
  714. SyncScope::ID getSyncScopeID() const {
  715. return SSID;
  716. }
  717. /// Sets the synchronization scope ID of this rmw instruction.
  718. void setSyncScopeID(SyncScope::ID SSID) {
  719. this->SSID = SSID;
  720. }
  721. Value *getPointerOperand() { return getOperand(0); }
  722. const Value *getPointerOperand() const { return getOperand(0); }
  723. static unsigned getPointerOperandIndex() { return 0U; }
  724. Value *getValOperand() { return getOperand(1); }
  725. const Value *getValOperand() const { return getOperand(1); }
  726. /// Returns the address space of the pointer operand.
  727. unsigned getPointerAddressSpace() const {
  728. return getPointerOperand()->getType()->getPointerAddressSpace();
  729. }
  730. bool isFloatingPointOperation() const {
  731. return isFPOperation(getOperation());
  732. }
  733. // Methods for support type inquiry through isa, cast, and dyn_cast:
  734. static bool classof(const Instruction *I) {
  735. return I->getOpcode() == Instruction::AtomicRMW;
  736. }
  737. static bool classof(const Value *V) {
  738. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  739. }
  740. private:
  741. void Init(BinOp Operation, Value *Ptr, Value *Val, Align Align,
  742. AtomicOrdering Ordering, SyncScope::ID SSID);
  743. // Shadow Instruction::setInstructionSubclassData with a private forwarding
  744. // method so that subclasses cannot accidentally use it.
  745. template <typename Bitfield>
  746. void setSubclassData(typename Bitfield::Type Value) {
  747. Instruction::setSubclassData<Bitfield>(Value);
  748. }
  749. /// The synchronization scope ID of this rmw instruction. Not quite enough
  750. /// room in SubClassData for everything, so synchronization scope ID gets its
  751. /// own field.
  752. SyncScope::ID SSID;
  753. };
  754. template <>
  755. struct OperandTraits<AtomicRMWInst>
  756. : public FixedNumOperandTraits<AtomicRMWInst,2> {
  757. };
  758. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(AtomicRMWInst, Value)
  759. //===----------------------------------------------------------------------===//
  760. // GetElementPtrInst Class
  761. //===----------------------------------------------------------------------===//
  762. // checkGEPType - Simple wrapper function to give a better assertion failure
  763. // message on bad indexes for a gep instruction.
  764. //
  765. inline Type *checkGEPType(Type *Ty) {
  766. assert(Ty && "Invalid GetElementPtrInst indices for type!");
  767. return Ty;
  768. }
  769. /// an instruction for type-safe pointer arithmetic to
  770. /// access elements of arrays and structs
  771. ///
  772. class GetElementPtrInst : public Instruction {
  773. Type *SourceElementType;
  774. Type *ResultElementType;
  775. GetElementPtrInst(const GetElementPtrInst &GEPI);
  776. /// Constructors - Create a getelementptr instruction with a base pointer an
  777. /// list of indices. The first ctor can optionally insert before an existing
  778. /// instruction, the second appends the new instruction to the specified
  779. /// BasicBlock.
  780. inline GetElementPtrInst(Type *PointeeType, Value *Ptr,
  781. ArrayRef<Value *> IdxList, unsigned Values,
  782. const Twine &NameStr, Instruction *InsertBefore);
  783. inline GetElementPtrInst(Type *PointeeType, Value *Ptr,
  784. ArrayRef<Value *> IdxList, unsigned Values,
  785. const Twine &NameStr, BasicBlock *InsertAtEnd);
  786. void init(Value *Ptr, ArrayRef<Value *> IdxList, const Twine &NameStr);
  787. protected:
  788. // Note: Instruction needs to be a friend here to call cloneImpl.
  789. friend class Instruction;
  790. GetElementPtrInst *cloneImpl() const;
  791. public:
  792. static GetElementPtrInst *Create(Type *PointeeType, Value *Ptr,
  793. ArrayRef<Value *> IdxList,
  794. const Twine &NameStr = "",
  795. Instruction *InsertBefore = nullptr) {
  796. unsigned Values = 1 + unsigned(IdxList.size());
  797. assert(PointeeType && "Must specify element type");
  798. assert(cast<PointerType>(Ptr->getType()->getScalarType())
  799. ->isOpaqueOrPointeeTypeMatches(PointeeType));
  800. return new (Values) GetElementPtrInst(PointeeType, Ptr, IdxList, Values,
  801. NameStr, InsertBefore);
  802. }
  803. static GetElementPtrInst *Create(Type *PointeeType, Value *Ptr,
  804. ArrayRef<Value *> IdxList,
  805. const Twine &NameStr,
  806. BasicBlock *InsertAtEnd) {
  807. unsigned Values = 1 + unsigned(IdxList.size());
  808. assert(PointeeType && "Must specify element type");
  809. assert(cast<PointerType>(Ptr->getType()->getScalarType())
  810. ->isOpaqueOrPointeeTypeMatches(PointeeType));
  811. return new (Values) GetElementPtrInst(PointeeType, Ptr, IdxList, Values,
  812. NameStr, InsertAtEnd);
  813. }
  814. /// Create an "inbounds" getelementptr. See the documentation for the
  815. /// "inbounds" flag in LangRef.html for details.
  816. static GetElementPtrInst *
  817. CreateInBounds(Type *PointeeType, Value *Ptr, ArrayRef<Value *> IdxList,
  818. const Twine &NameStr = "",
  819. Instruction *InsertBefore = nullptr) {
  820. GetElementPtrInst *GEP =
  821. Create(PointeeType, Ptr, IdxList, NameStr, InsertBefore);
  822. GEP->setIsInBounds(true);
  823. return GEP;
  824. }
  825. static GetElementPtrInst *CreateInBounds(Type *PointeeType, Value *Ptr,
  826. ArrayRef<Value *> IdxList,
  827. const Twine &NameStr,
  828. BasicBlock *InsertAtEnd) {
  829. GetElementPtrInst *GEP =
  830. Create(PointeeType, Ptr, IdxList, NameStr, InsertAtEnd);
  831. GEP->setIsInBounds(true);
  832. return GEP;
  833. }
  834. /// Transparently provide more efficient getOperand methods.
  835. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  836. Type *getSourceElementType() const { return SourceElementType; }
  837. void setSourceElementType(Type *Ty) { SourceElementType = Ty; }
  838. void setResultElementType(Type *Ty) { ResultElementType = Ty; }
  839. Type *getResultElementType() const {
  840. assert(cast<PointerType>(getType()->getScalarType())
  841. ->isOpaqueOrPointeeTypeMatches(ResultElementType));
  842. return ResultElementType;
  843. }
  844. /// Returns the address space of this instruction's pointer type.
  845. unsigned getAddressSpace() const {
  846. // Note that this is always the same as the pointer operand's address space
  847. // and that is cheaper to compute, so cheat here.
  848. return getPointerAddressSpace();
  849. }
  850. /// Returns the result type of a getelementptr with the given source
  851. /// element type and indexes.
  852. ///
  853. /// Null is returned if the indices are invalid for the specified
  854. /// source element type.
  855. static Type *getIndexedType(Type *Ty, ArrayRef<Value *> IdxList);
  856. static Type *getIndexedType(Type *Ty, ArrayRef<Constant *> IdxList);
  857. static Type *getIndexedType(Type *Ty, ArrayRef<uint64_t> IdxList);
  858. /// Return the type of the element at the given index of an indexable
  859. /// type. This is equivalent to "getIndexedType(Agg, {Zero, Idx})".
  860. ///
  861. /// Returns null if the type can't be indexed, or the given index is not
  862. /// legal for the given type.
  863. static Type *getTypeAtIndex(Type *Ty, Value *Idx);
  864. static Type *getTypeAtIndex(Type *Ty, uint64_t Idx);
  865. inline op_iterator idx_begin() { return op_begin()+1; }
  866. inline const_op_iterator idx_begin() const { return op_begin()+1; }
  867. inline op_iterator idx_end() { return op_end(); }
  868. inline const_op_iterator idx_end() const { return op_end(); }
  869. inline iterator_range<op_iterator> indices() {
  870. return make_range(idx_begin(), idx_end());
  871. }
  872. inline iterator_range<const_op_iterator> indices() const {
  873. return make_range(idx_begin(), idx_end());
  874. }
  875. Value *getPointerOperand() {
  876. return getOperand(0);
  877. }
  878. const Value *getPointerOperand() const {
  879. return getOperand(0);
  880. }
  881. static unsigned getPointerOperandIndex() {
  882. return 0U; // get index for modifying correct operand.
  883. }
  884. /// Method to return the pointer operand as a
  885. /// PointerType.
  886. Type *getPointerOperandType() const {
  887. return getPointerOperand()->getType();
  888. }
  889. /// Returns the address space of the pointer operand.
  890. unsigned getPointerAddressSpace() const {
  891. return getPointerOperandType()->getPointerAddressSpace();
  892. }
  893. /// Returns the pointer type returned by the GEP
  894. /// instruction, which may be a vector of pointers.
  895. static Type *getGEPReturnType(Type *ElTy, Value *Ptr,
  896. ArrayRef<Value *> IdxList) {
  897. PointerType *OrigPtrTy = cast<PointerType>(Ptr->getType()->getScalarType());
  898. unsigned AddrSpace = OrigPtrTy->getAddressSpace();
  899. Type *ResultElemTy = checkGEPType(getIndexedType(ElTy, IdxList));
  900. Type *PtrTy = OrigPtrTy->isOpaque()
  901. ? PointerType::get(OrigPtrTy->getContext(), AddrSpace)
  902. : PointerType::get(ResultElemTy, AddrSpace);
  903. // Vector GEP
  904. if (auto *PtrVTy = dyn_cast<VectorType>(Ptr->getType())) {
  905. ElementCount EltCount = PtrVTy->getElementCount();
  906. return VectorType::get(PtrTy, EltCount);
  907. }
  908. for (Value *Index : IdxList)
  909. if (auto *IndexVTy = dyn_cast<VectorType>(Index->getType())) {
  910. ElementCount EltCount = IndexVTy->getElementCount();
  911. return VectorType::get(PtrTy, EltCount);
  912. }
  913. // Scalar GEP
  914. return PtrTy;
  915. }
  916. unsigned getNumIndices() const { // Note: always non-negative
  917. return getNumOperands() - 1;
  918. }
  919. bool hasIndices() const {
  920. return getNumOperands() > 1;
  921. }
  922. /// Return true if all of the indices of this GEP are
  923. /// zeros. If so, the result pointer and the first operand have the same
  924. /// value, just potentially different types.
  925. bool hasAllZeroIndices() const;
  926. /// Return true if all of the indices of this GEP are
  927. /// constant integers. If so, the result pointer and the first operand have
  928. /// a constant offset between them.
  929. bool hasAllConstantIndices() const;
  930. /// Set or clear the inbounds flag on this GEP instruction.
  931. /// See LangRef.html for the meaning of inbounds on a getelementptr.
  932. void setIsInBounds(bool b = true);
  933. /// Determine whether the GEP has the inbounds flag.
  934. bool isInBounds() const;
  935. /// Accumulate the constant address offset of this GEP if possible.
  936. ///
  937. /// This routine accepts an APInt into which it will accumulate the constant
  938. /// offset of this GEP if the GEP is in fact constant. If the GEP is not
  939. /// all-constant, it returns false and the value of the offset APInt is
  940. /// undefined (it is *not* preserved!). The APInt passed into this routine
  941. /// must be at least as wide as the IntPtr type for the address space of
  942. /// the base GEP pointer.
  943. bool accumulateConstantOffset(const DataLayout &DL, APInt &Offset) const;
  944. bool collectOffset(const DataLayout &DL, unsigned BitWidth,
  945. MapVector<Value *, APInt> &VariableOffsets,
  946. APInt &ConstantOffset) const;
  947. // Methods for support type inquiry through isa, cast, and dyn_cast:
  948. static bool classof(const Instruction *I) {
  949. return (I->getOpcode() == Instruction::GetElementPtr);
  950. }
  951. static bool classof(const Value *V) {
  952. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  953. }
  954. };
  955. template <>
  956. struct OperandTraits<GetElementPtrInst> :
  957. public VariadicOperandTraits<GetElementPtrInst, 1> {
  958. };
  959. GetElementPtrInst::GetElementPtrInst(Type *PointeeType, Value *Ptr,
  960. ArrayRef<Value *> IdxList, unsigned Values,
  961. const Twine &NameStr,
  962. Instruction *InsertBefore)
  963. : Instruction(getGEPReturnType(PointeeType, Ptr, IdxList), GetElementPtr,
  964. OperandTraits<GetElementPtrInst>::op_end(this) - Values,
  965. Values, InsertBefore),
  966. SourceElementType(PointeeType),
  967. ResultElementType(getIndexedType(PointeeType, IdxList)) {
  968. assert(cast<PointerType>(getType()->getScalarType())
  969. ->isOpaqueOrPointeeTypeMatches(ResultElementType));
  970. init(Ptr, IdxList, NameStr);
  971. }
  972. GetElementPtrInst::GetElementPtrInst(Type *PointeeType, Value *Ptr,
  973. ArrayRef<Value *> IdxList, unsigned Values,
  974. const Twine &NameStr,
  975. BasicBlock *InsertAtEnd)
  976. : Instruction(getGEPReturnType(PointeeType, Ptr, IdxList), GetElementPtr,
  977. OperandTraits<GetElementPtrInst>::op_end(this) - Values,
  978. Values, InsertAtEnd),
  979. SourceElementType(PointeeType),
  980. ResultElementType(getIndexedType(PointeeType, IdxList)) {
  981. assert(cast<PointerType>(getType()->getScalarType())
  982. ->isOpaqueOrPointeeTypeMatches(ResultElementType));
  983. init(Ptr, IdxList, NameStr);
  984. }
  985. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(GetElementPtrInst, Value)
  986. //===----------------------------------------------------------------------===//
  987. // ICmpInst Class
  988. //===----------------------------------------------------------------------===//
  989. /// This instruction compares its operands according to the predicate given
  990. /// to the constructor. It only operates on integers or pointers. The operands
  991. /// must be identical types.
  992. /// Represent an integer comparison operator.
  993. class ICmpInst: public CmpInst {
  994. void AssertOK() {
  995. assert(isIntPredicate() &&
  996. "Invalid ICmp predicate value");
  997. assert(getOperand(0)->getType() == getOperand(1)->getType() &&
  998. "Both operands to ICmp instruction are not of the same type!");
  999. // Check that the operands are the right type
  1000. assert((getOperand(0)->getType()->isIntOrIntVectorTy() ||
  1001. getOperand(0)->getType()->isPtrOrPtrVectorTy()) &&
  1002. "Invalid operand types for ICmp instruction");
  1003. }
  1004. protected:
  1005. // Note: Instruction needs to be a friend here to call cloneImpl.
  1006. friend class Instruction;
  1007. /// Clone an identical ICmpInst
  1008. ICmpInst *cloneImpl() const;
  1009. public:
  1010. /// Constructor with insert-before-instruction semantics.
  1011. ICmpInst(
  1012. Instruction *InsertBefore, ///< Where to insert
  1013. Predicate pred, ///< The predicate to use for the comparison
  1014. Value *LHS, ///< The left-hand-side of the expression
  1015. Value *RHS, ///< The right-hand-side of the expression
  1016. const Twine &NameStr = "" ///< Name of the instruction
  1017. ) : CmpInst(makeCmpResultType(LHS->getType()),
  1018. Instruction::ICmp, pred, LHS, RHS, NameStr,
  1019. InsertBefore) {
  1020. #ifndef NDEBUG
  1021. AssertOK();
  1022. #endif
  1023. }
  1024. /// Constructor with insert-at-end semantics.
  1025. ICmpInst(
  1026. BasicBlock &InsertAtEnd, ///< Block to insert into.
  1027. Predicate pred, ///< The predicate to use for the comparison
  1028. Value *LHS, ///< The left-hand-side of the expression
  1029. Value *RHS, ///< The right-hand-side of the expression
  1030. const Twine &NameStr = "" ///< Name of the instruction
  1031. ) : CmpInst(makeCmpResultType(LHS->getType()),
  1032. Instruction::ICmp, pred, LHS, RHS, NameStr,
  1033. &InsertAtEnd) {
  1034. #ifndef NDEBUG
  1035. AssertOK();
  1036. #endif
  1037. }
  1038. /// Constructor with no-insertion semantics
  1039. ICmpInst(
  1040. Predicate pred, ///< The predicate to use for the comparison
  1041. Value *LHS, ///< The left-hand-side of the expression
  1042. Value *RHS, ///< The right-hand-side of the expression
  1043. const Twine &NameStr = "" ///< Name of the instruction
  1044. ) : CmpInst(makeCmpResultType(LHS->getType()),
  1045. Instruction::ICmp, pred, LHS, RHS, NameStr) {
  1046. #ifndef NDEBUG
  1047. AssertOK();
  1048. #endif
  1049. }
  1050. /// For example, EQ->EQ, SLE->SLE, UGT->SGT, etc.
  1051. /// @returns the predicate that would be the result if the operand were
  1052. /// regarded as signed.
  1053. /// Return the signed version of the predicate
  1054. Predicate getSignedPredicate() const {
  1055. return getSignedPredicate(getPredicate());
  1056. }
  1057. /// This is a static version that you can use without an instruction.
  1058. /// Return the signed version of the predicate.
  1059. static Predicate getSignedPredicate(Predicate pred);
  1060. /// For example, EQ->EQ, SLE->ULE, UGT->UGT, etc.
  1061. /// @returns the predicate that would be the result if the operand were
  1062. /// regarded as unsigned.
  1063. /// Return the unsigned version of the predicate
  1064. Predicate getUnsignedPredicate() const {
  1065. return getUnsignedPredicate(getPredicate());
  1066. }
  1067. /// This is a static version that you can use without an instruction.
  1068. /// Return the unsigned version of the predicate.
  1069. static Predicate getUnsignedPredicate(Predicate pred);
  1070. /// Return true if this predicate is either EQ or NE. This also
  1071. /// tests for commutativity.
  1072. static bool isEquality(Predicate P) {
  1073. return P == ICMP_EQ || P == ICMP_NE;
  1074. }
  1075. /// Return true if this predicate is either EQ or NE. This also
  1076. /// tests for commutativity.
  1077. bool isEquality() const {
  1078. return isEquality(getPredicate());
  1079. }
  1080. /// @returns true if the predicate of this ICmpInst is commutative
  1081. /// Determine if this relation is commutative.
  1082. bool isCommutative() const { return isEquality(); }
  1083. /// Return true if the predicate is relational (not EQ or NE).
  1084. ///
  1085. bool isRelational() const {
  1086. return !isEquality();
  1087. }
  1088. /// Return true if the predicate is relational (not EQ or NE).
  1089. ///
  1090. static bool isRelational(Predicate P) {
  1091. return !isEquality(P);
  1092. }
  1093. /// Return true if the predicate is SGT or UGT.
  1094. ///
  1095. static bool isGT(Predicate P) {
  1096. return P == ICMP_SGT || P == ICMP_UGT;
  1097. }
  1098. /// Return true if the predicate is SLT or ULT.
  1099. ///
  1100. static bool isLT(Predicate P) {
  1101. return P == ICMP_SLT || P == ICMP_ULT;
  1102. }
  1103. /// Return true if the predicate is SGE or UGE.
  1104. ///
  1105. static bool isGE(Predicate P) {
  1106. return P == ICMP_SGE || P == ICMP_UGE;
  1107. }
  1108. /// Return true if the predicate is SLE or ULE.
  1109. ///
  1110. static bool isLE(Predicate P) {
  1111. return P == ICMP_SLE || P == ICMP_ULE;
  1112. }
  1113. /// Returns the sequence of all ICmp predicates.
  1114. ///
  1115. static auto predicates() { return ICmpPredicates(); }
  1116. /// Exchange the two operands to this instruction in such a way that it does
  1117. /// not modify the semantics of the instruction. The predicate value may be
  1118. /// changed to retain the same result if the predicate is order dependent
  1119. /// (e.g. ult).
  1120. /// Swap operands and adjust predicate.
  1121. void swapOperands() {
  1122. setPredicate(getSwappedPredicate());
  1123. Op<0>().swap(Op<1>());
  1124. }
  1125. /// Return result of `LHS Pred RHS` comparison.
  1126. static bool compare(const APInt &LHS, const APInt &RHS,
  1127. ICmpInst::Predicate Pred);
  1128. // Methods for support type inquiry through isa, cast, and dyn_cast:
  1129. static bool classof(const Instruction *I) {
  1130. return I->getOpcode() == Instruction::ICmp;
  1131. }
  1132. static bool classof(const Value *V) {
  1133. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  1134. }
  1135. };
  1136. //===----------------------------------------------------------------------===//
  1137. // FCmpInst Class
  1138. //===----------------------------------------------------------------------===//
  1139. /// This instruction compares its operands according to the predicate given
  1140. /// to the constructor. It only operates on floating point values or packed
  1141. /// vectors of floating point values. The operands must be identical types.
  1142. /// Represents a floating point comparison operator.
  1143. class FCmpInst: public CmpInst {
  1144. void AssertOK() {
  1145. assert(isFPPredicate() && "Invalid FCmp predicate value");
  1146. assert(getOperand(0)->getType() == getOperand(1)->getType() &&
  1147. "Both operands to FCmp instruction are not of the same type!");
  1148. // Check that the operands are the right type
  1149. assert(getOperand(0)->getType()->isFPOrFPVectorTy() &&
  1150. "Invalid operand types for FCmp instruction");
  1151. }
  1152. protected:
  1153. // Note: Instruction needs to be a friend here to call cloneImpl.
  1154. friend class Instruction;
  1155. /// Clone an identical FCmpInst
  1156. FCmpInst *cloneImpl() const;
  1157. public:
  1158. /// Constructor with insert-before-instruction semantics.
  1159. FCmpInst(
  1160. Instruction *InsertBefore, ///< Where to insert
  1161. Predicate pred, ///< The predicate to use for the comparison
  1162. Value *LHS, ///< The left-hand-side of the expression
  1163. Value *RHS, ///< The right-hand-side of the expression
  1164. const Twine &NameStr = "" ///< Name of the instruction
  1165. ) : CmpInst(makeCmpResultType(LHS->getType()),
  1166. Instruction::FCmp, pred, LHS, RHS, NameStr,
  1167. InsertBefore) {
  1168. AssertOK();
  1169. }
  1170. /// Constructor with insert-at-end semantics.
  1171. FCmpInst(
  1172. BasicBlock &InsertAtEnd, ///< Block to insert into.
  1173. Predicate pred, ///< The predicate to use for the comparison
  1174. Value *LHS, ///< The left-hand-side of the expression
  1175. Value *RHS, ///< The right-hand-side of the expression
  1176. const Twine &NameStr = "" ///< Name of the instruction
  1177. ) : CmpInst(makeCmpResultType(LHS->getType()),
  1178. Instruction::FCmp, pred, LHS, RHS, NameStr,
  1179. &InsertAtEnd) {
  1180. AssertOK();
  1181. }
  1182. /// Constructor with no-insertion semantics
  1183. FCmpInst(
  1184. Predicate Pred, ///< The predicate to use for the comparison
  1185. Value *LHS, ///< The left-hand-side of the expression
  1186. Value *RHS, ///< The right-hand-side of the expression
  1187. const Twine &NameStr = "", ///< Name of the instruction
  1188. Instruction *FlagsSource = nullptr
  1189. ) : CmpInst(makeCmpResultType(LHS->getType()), Instruction::FCmp, Pred, LHS,
  1190. RHS, NameStr, nullptr, FlagsSource) {
  1191. AssertOK();
  1192. }
  1193. /// @returns true if the predicate of this instruction is EQ or NE.
  1194. /// Determine if this is an equality predicate.
  1195. static bool isEquality(Predicate Pred) {
  1196. return Pred == FCMP_OEQ || Pred == FCMP_ONE || Pred == FCMP_UEQ ||
  1197. Pred == FCMP_UNE;
  1198. }
  1199. /// @returns true if the predicate of this instruction is EQ or NE.
  1200. /// Determine if this is an equality predicate.
  1201. bool isEquality() const { return isEquality(getPredicate()); }
  1202. /// @returns true if the predicate of this instruction is commutative.
  1203. /// Determine if this is a commutative predicate.
  1204. bool isCommutative() const {
  1205. return isEquality() ||
  1206. getPredicate() == FCMP_FALSE ||
  1207. getPredicate() == FCMP_TRUE ||
  1208. getPredicate() == FCMP_ORD ||
  1209. getPredicate() == FCMP_UNO;
  1210. }
  1211. /// @returns true if the predicate is relational (not EQ or NE).
  1212. /// Determine if this a relational predicate.
  1213. bool isRelational() const { return !isEquality(); }
  1214. /// Exchange the two operands to this instruction in such a way that it does
  1215. /// not modify the semantics of the instruction. The predicate value may be
  1216. /// changed to retain the same result if the predicate is order dependent
  1217. /// (e.g. ult).
  1218. /// Swap operands and adjust predicate.
  1219. void swapOperands() {
  1220. setPredicate(getSwappedPredicate());
  1221. Op<0>().swap(Op<1>());
  1222. }
  1223. /// Returns the sequence of all FCmp predicates.
  1224. ///
  1225. static auto predicates() { return FCmpPredicates(); }
  1226. /// Return result of `LHS Pred RHS` comparison.
  1227. static bool compare(const APFloat &LHS, const APFloat &RHS,
  1228. FCmpInst::Predicate Pred);
  1229. /// Methods for support type inquiry through isa, cast, and dyn_cast:
  1230. static bool classof(const Instruction *I) {
  1231. return I->getOpcode() == Instruction::FCmp;
  1232. }
  1233. static bool classof(const Value *V) {
  1234. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  1235. }
  1236. };
  1237. //===----------------------------------------------------------------------===//
  1238. /// This class represents a function call, abstracting a target
  1239. /// machine's calling convention. This class uses low bit of the SubClassData
  1240. /// field to indicate whether or not this is a tail call. The rest of the bits
  1241. /// hold the calling convention of the call.
  1242. ///
  1243. class CallInst : public CallBase {
  1244. CallInst(const CallInst &CI);
  1245. /// Construct a CallInst given a range of arguments.
  1246. /// Construct a CallInst from a range of arguments
  1247. inline CallInst(FunctionType *Ty, Value *Func, ArrayRef<Value *> Args,
  1248. ArrayRef<OperandBundleDef> Bundles, const Twine &NameStr,
  1249. Instruction *InsertBefore);
  1250. inline CallInst(FunctionType *Ty, Value *Func, ArrayRef<Value *> Args,
  1251. const Twine &NameStr, Instruction *InsertBefore)
  1252. : CallInst(Ty, Func, Args, None, NameStr, InsertBefore) {}
  1253. /// Construct a CallInst given a range of arguments.
  1254. /// Construct a CallInst from a range of arguments
  1255. inline CallInst(FunctionType *Ty, Value *Func, ArrayRef<Value *> Args,
  1256. ArrayRef<OperandBundleDef> Bundles, const Twine &NameStr,
  1257. BasicBlock *InsertAtEnd);
  1258. explicit CallInst(FunctionType *Ty, Value *F, const Twine &NameStr,
  1259. Instruction *InsertBefore);
  1260. CallInst(FunctionType *ty, Value *F, const Twine &NameStr,
  1261. BasicBlock *InsertAtEnd);
  1262. void init(FunctionType *FTy, Value *Func, ArrayRef<Value *> Args,
  1263. ArrayRef<OperandBundleDef> Bundles, const Twine &NameStr);
  1264. void init(FunctionType *FTy, Value *Func, const Twine &NameStr);
  1265. /// Compute the number of operands to allocate.
  1266. static int ComputeNumOperands(int NumArgs, int NumBundleInputs = 0) {
  1267. // We need one operand for the called function, plus the input operand
  1268. // counts provided.
  1269. return 1 + NumArgs + NumBundleInputs;
  1270. }
  1271. protected:
  1272. // Note: Instruction needs to be a friend here to call cloneImpl.
  1273. friend class Instruction;
  1274. CallInst *cloneImpl() const;
  1275. public:
  1276. static CallInst *Create(FunctionType *Ty, Value *F, const Twine &NameStr = "",
  1277. Instruction *InsertBefore = nullptr) {
  1278. return new (ComputeNumOperands(0)) CallInst(Ty, F, NameStr, InsertBefore);
  1279. }
  1280. static CallInst *Create(FunctionType *Ty, Value *Func, ArrayRef<Value *> Args,
  1281. const Twine &NameStr,
  1282. Instruction *InsertBefore = nullptr) {
  1283. return new (ComputeNumOperands(Args.size()))
  1284. CallInst(Ty, Func, Args, None, NameStr, InsertBefore);
  1285. }
  1286. static CallInst *Create(FunctionType *Ty, Value *Func, ArrayRef<Value *> Args,
  1287. ArrayRef<OperandBundleDef> Bundles = None,
  1288. const Twine &NameStr = "",
  1289. Instruction *InsertBefore = nullptr) {
  1290. const int NumOperands =
  1291. ComputeNumOperands(Args.size(), CountBundleInputs(Bundles));
  1292. const unsigned DescriptorBytes = Bundles.size() * sizeof(BundleOpInfo);
  1293. return new (NumOperands, DescriptorBytes)
  1294. CallInst(Ty, Func, Args, Bundles, NameStr, InsertBefore);
  1295. }
  1296. static CallInst *Create(FunctionType *Ty, Value *F, const Twine &NameStr,
  1297. BasicBlock *InsertAtEnd) {
  1298. return new (ComputeNumOperands(0)) CallInst(Ty, F, NameStr, InsertAtEnd);
  1299. }
  1300. static CallInst *Create(FunctionType *Ty, Value *Func, ArrayRef<Value *> Args,
  1301. const Twine &NameStr, BasicBlock *InsertAtEnd) {
  1302. return new (ComputeNumOperands(Args.size()))
  1303. CallInst(Ty, Func, Args, None, NameStr, InsertAtEnd);
  1304. }
  1305. static CallInst *Create(FunctionType *Ty, Value *Func, ArrayRef<Value *> Args,
  1306. ArrayRef<OperandBundleDef> Bundles,
  1307. const Twine &NameStr, BasicBlock *InsertAtEnd) {
  1308. const int NumOperands =
  1309. ComputeNumOperands(Args.size(), CountBundleInputs(Bundles));
  1310. const unsigned DescriptorBytes = Bundles.size() * sizeof(BundleOpInfo);
  1311. return new (NumOperands, DescriptorBytes)
  1312. CallInst(Ty, Func, Args, Bundles, NameStr, InsertAtEnd);
  1313. }
  1314. static CallInst *Create(FunctionCallee Func, const Twine &NameStr = "",
  1315. Instruction *InsertBefore = nullptr) {
  1316. return Create(Func.getFunctionType(), Func.getCallee(), NameStr,
  1317. InsertBefore);
  1318. }
  1319. static CallInst *Create(FunctionCallee Func, ArrayRef<Value *> Args,
  1320. ArrayRef<OperandBundleDef> Bundles = None,
  1321. const Twine &NameStr = "",
  1322. Instruction *InsertBefore = nullptr) {
  1323. return Create(Func.getFunctionType(), Func.getCallee(), Args, Bundles,
  1324. NameStr, InsertBefore);
  1325. }
  1326. static CallInst *Create(FunctionCallee Func, ArrayRef<Value *> Args,
  1327. const Twine &NameStr,
  1328. Instruction *InsertBefore = nullptr) {
  1329. return Create(Func.getFunctionType(), Func.getCallee(), Args, NameStr,
  1330. InsertBefore);
  1331. }
  1332. static CallInst *Create(FunctionCallee Func, const Twine &NameStr,
  1333. BasicBlock *InsertAtEnd) {
  1334. return Create(Func.getFunctionType(), Func.getCallee(), NameStr,
  1335. InsertAtEnd);
  1336. }
  1337. static CallInst *Create(FunctionCallee Func, ArrayRef<Value *> Args,
  1338. const Twine &NameStr, BasicBlock *InsertAtEnd) {
  1339. return Create(Func.getFunctionType(), Func.getCallee(), Args, NameStr,
  1340. InsertAtEnd);
  1341. }
  1342. static CallInst *Create(FunctionCallee Func, ArrayRef<Value *> Args,
  1343. ArrayRef<OperandBundleDef> Bundles,
  1344. const Twine &NameStr, BasicBlock *InsertAtEnd) {
  1345. return Create(Func.getFunctionType(), Func.getCallee(), Args, Bundles,
  1346. NameStr, InsertAtEnd);
  1347. }
  1348. /// Create a clone of \p CI with a different set of operand bundles and
  1349. /// insert it before \p InsertPt.
  1350. ///
  1351. /// The returned call instruction is identical \p CI in every way except that
  1352. /// the operand bundles for the new instruction are set to the operand bundles
  1353. /// in \p Bundles.
  1354. static CallInst *Create(CallInst *CI, ArrayRef<OperandBundleDef> Bundles,
  1355. Instruction *InsertPt = nullptr);
  1356. /// Generate the IR for a call to malloc:
  1357. /// 1. Compute the malloc call's argument as the specified type's size,
  1358. /// possibly multiplied by the array size if the array size is not
  1359. /// constant 1.
  1360. /// 2. Call malloc with that argument.
  1361. /// 3. Bitcast the result of the malloc call to the specified type.
  1362. static Instruction *CreateMalloc(Instruction *InsertBefore, Type *IntPtrTy,
  1363. Type *AllocTy, Value *AllocSize,
  1364. Value *ArraySize = nullptr,
  1365. Function *MallocF = nullptr,
  1366. const Twine &Name = "");
  1367. static Instruction *CreateMalloc(BasicBlock *InsertAtEnd, Type *IntPtrTy,
  1368. Type *AllocTy, Value *AllocSize,
  1369. Value *ArraySize = nullptr,
  1370. Function *MallocF = nullptr,
  1371. const Twine &Name = "");
  1372. static Instruction *CreateMalloc(Instruction *InsertBefore, Type *IntPtrTy,
  1373. Type *AllocTy, Value *AllocSize,
  1374. Value *ArraySize = nullptr,
  1375. ArrayRef<OperandBundleDef> Bundles = None,
  1376. Function *MallocF = nullptr,
  1377. const Twine &Name = "");
  1378. static Instruction *CreateMalloc(BasicBlock *InsertAtEnd, Type *IntPtrTy,
  1379. Type *AllocTy, Value *AllocSize,
  1380. Value *ArraySize = nullptr,
  1381. ArrayRef<OperandBundleDef> Bundles = None,
  1382. Function *MallocF = nullptr,
  1383. const Twine &Name = "");
  1384. /// Generate the IR for a call to the builtin free function.
  1385. static Instruction *CreateFree(Value *Source, Instruction *InsertBefore);
  1386. static Instruction *CreateFree(Value *Source, BasicBlock *InsertAtEnd);
  1387. static Instruction *CreateFree(Value *Source,
  1388. ArrayRef<OperandBundleDef> Bundles,
  1389. Instruction *InsertBefore);
  1390. static Instruction *CreateFree(Value *Source,
  1391. ArrayRef<OperandBundleDef> Bundles,
  1392. BasicBlock *InsertAtEnd);
  1393. // Note that 'musttail' implies 'tail'.
  1394. enum TailCallKind : unsigned {
  1395. TCK_None = 0,
  1396. TCK_Tail = 1,
  1397. TCK_MustTail = 2,
  1398. TCK_NoTail = 3,
  1399. TCK_LAST = TCK_NoTail
  1400. };
  1401. using TailCallKindField = Bitfield::Element<TailCallKind, 0, 2, TCK_LAST>;
  1402. static_assert(
  1403. Bitfield::areContiguous<TailCallKindField, CallBase::CallingConvField>(),
  1404. "Bitfields must be contiguous");
  1405. TailCallKind getTailCallKind() const {
  1406. return getSubclassData<TailCallKindField>();
  1407. }
  1408. bool isTailCall() const {
  1409. TailCallKind Kind = getTailCallKind();
  1410. return Kind == TCK_Tail || Kind == TCK_MustTail;
  1411. }
  1412. bool isMustTailCall() const { return getTailCallKind() == TCK_MustTail; }
  1413. bool isNoTailCall() const { return getTailCallKind() == TCK_NoTail; }
  1414. void setTailCallKind(TailCallKind TCK) {
  1415. setSubclassData<TailCallKindField>(TCK);
  1416. }
  1417. void setTailCall(bool IsTc = true) {
  1418. setTailCallKind(IsTc ? TCK_Tail : TCK_None);
  1419. }
  1420. /// Return true if the call can return twice
  1421. bool canReturnTwice() const { return hasFnAttr(Attribute::ReturnsTwice); }
  1422. void setCanReturnTwice() { addFnAttr(Attribute::ReturnsTwice); }
  1423. // Methods for support type inquiry through isa, cast, and dyn_cast:
  1424. static bool classof(const Instruction *I) {
  1425. return I->getOpcode() == Instruction::Call;
  1426. }
  1427. static bool classof(const Value *V) {
  1428. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  1429. }
  1430. /// Updates profile metadata by scaling it by \p S / \p T.
  1431. void updateProfWeight(uint64_t S, uint64_t T);
  1432. private:
  1433. // Shadow Instruction::setInstructionSubclassData with a private forwarding
  1434. // method so that subclasses cannot accidentally use it.
  1435. template <typename Bitfield>
  1436. void setSubclassData(typename Bitfield::Type Value) {
  1437. Instruction::setSubclassData<Bitfield>(Value);
  1438. }
  1439. };
  1440. CallInst::CallInst(FunctionType *Ty, Value *Func, ArrayRef<Value *> Args,
  1441. ArrayRef<OperandBundleDef> Bundles, const Twine &NameStr,
  1442. BasicBlock *InsertAtEnd)
  1443. : CallBase(Ty->getReturnType(), Instruction::Call,
  1444. OperandTraits<CallBase>::op_end(this) -
  1445. (Args.size() + CountBundleInputs(Bundles) + 1),
  1446. unsigned(Args.size() + CountBundleInputs(Bundles) + 1),
  1447. InsertAtEnd) {
  1448. init(Ty, Func, Args, Bundles, NameStr);
  1449. }
  1450. CallInst::CallInst(FunctionType *Ty, Value *Func, ArrayRef<Value *> Args,
  1451. ArrayRef<OperandBundleDef> Bundles, const Twine &NameStr,
  1452. Instruction *InsertBefore)
  1453. : CallBase(Ty->getReturnType(), Instruction::Call,
  1454. OperandTraits<CallBase>::op_end(this) -
  1455. (Args.size() + CountBundleInputs(Bundles) + 1),
  1456. unsigned(Args.size() + CountBundleInputs(Bundles) + 1),
  1457. InsertBefore) {
  1458. init(Ty, Func, Args, Bundles, NameStr);
  1459. }
  1460. //===----------------------------------------------------------------------===//
  1461. // SelectInst Class
  1462. //===----------------------------------------------------------------------===//
  1463. /// This class represents the LLVM 'select' instruction.
  1464. ///
  1465. class SelectInst : public Instruction {
  1466. SelectInst(Value *C, Value *S1, Value *S2, const Twine &NameStr,
  1467. Instruction *InsertBefore)
  1468. : Instruction(S1->getType(), Instruction::Select,
  1469. &Op<0>(), 3, InsertBefore) {
  1470. init(C, S1, S2);
  1471. setName(NameStr);
  1472. }
  1473. SelectInst(Value *C, Value *S1, Value *S2, const Twine &NameStr,
  1474. BasicBlock *InsertAtEnd)
  1475. : Instruction(S1->getType(), Instruction::Select,
  1476. &Op<0>(), 3, InsertAtEnd) {
  1477. init(C, S1, S2);
  1478. setName(NameStr);
  1479. }
  1480. void init(Value *C, Value *S1, Value *S2) {
  1481. assert(!areInvalidOperands(C, S1, S2) && "Invalid operands for select");
  1482. Op<0>() = C;
  1483. Op<1>() = S1;
  1484. Op<2>() = S2;
  1485. }
  1486. protected:
  1487. // Note: Instruction needs to be a friend here to call cloneImpl.
  1488. friend class Instruction;
  1489. SelectInst *cloneImpl() const;
  1490. public:
  1491. static SelectInst *Create(Value *C, Value *S1, Value *S2,
  1492. const Twine &NameStr = "",
  1493. Instruction *InsertBefore = nullptr,
  1494. Instruction *MDFrom = nullptr) {
  1495. SelectInst *Sel = new(3) SelectInst(C, S1, S2, NameStr, InsertBefore);
  1496. if (MDFrom)
  1497. Sel->copyMetadata(*MDFrom);
  1498. return Sel;
  1499. }
  1500. static SelectInst *Create(Value *C, Value *S1, Value *S2,
  1501. const Twine &NameStr,
  1502. BasicBlock *InsertAtEnd) {
  1503. return new(3) SelectInst(C, S1, S2, NameStr, InsertAtEnd);
  1504. }
  1505. const Value *getCondition() const { return Op<0>(); }
  1506. const Value *getTrueValue() const { return Op<1>(); }
  1507. const Value *getFalseValue() const { return Op<2>(); }
  1508. Value *getCondition() { return Op<0>(); }
  1509. Value *getTrueValue() { return Op<1>(); }
  1510. Value *getFalseValue() { return Op<2>(); }
  1511. void setCondition(Value *V) { Op<0>() = V; }
  1512. void setTrueValue(Value *V) { Op<1>() = V; }
  1513. void setFalseValue(Value *V) { Op<2>() = V; }
  1514. /// Swap the true and false values of the select instruction.
  1515. /// This doesn't swap prof metadata.
  1516. void swapValues() { Op<1>().swap(Op<2>()); }
  1517. /// Return a string if the specified operands are invalid
  1518. /// for a select operation, otherwise return null.
  1519. static const char *areInvalidOperands(Value *Cond, Value *True, Value *False);
  1520. /// Transparently provide more efficient getOperand methods.
  1521. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  1522. OtherOps getOpcode() const {
  1523. return static_cast<OtherOps>(Instruction::getOpcode());
  1524. }
  1525. // Methods for support type inquiry through isa, cast, and dyn_cast:
  1526. static bool classof(const Instruction *I) {
  1527. return I->getOpcode() == Instruction::Select;
  1528. }
  1529. static bool classof(const Value *V) {
  1530. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  1531. }
  1532. };
  1533. template <>
  1534. struct OperandTraits<SelectInst> : public FixedNumOperandTraits<SelectInst, 3> {
  1535. };
  1536. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(SelectInst, Value)
  1537. //===----------------------------------------------------------------------===//
  1538. // VAArgInst Class
  1539. //===----------------------------------------------------------------------===//
  1540. /// This class represents the va_arg llvm instruction, which returns
  1541. /// an argument of the specified type given a va_list and increments that list
  1542. ///
  1543. class VAArgInst : public UnaryInstruction {
  1544. protected:
  1545. // Note: Instruction needs to be a friend here to call cloneImpl.
  1546. friend class Instruction;
  1547. VAArgInst *cloneImpl() const;
  1548. public:
  1549. VAArgInst(Value *List, Type *Ty, const Twine &NameStr = "",
  1550. Instruction *InsertBefore = nullptr)
  1551. : UnaryInstruction(Ty, VAArg, List, InsertBefore) {
  1552. setName(NameStr);
  1553. }
  1554. VAArgInst(Value *List, Type *Ty, const Twine &NameStr,
  1555. BasicBlock *InsertAtEnd)
  1556. : UnaryInstruction(Ty, VAArg, List, InsertAtEnd) {
  1557. setName(NameStr);
  1558. }
  1559. Value *getPointerOperand() { return getOperand(0); }
  1560. const Value *getPointerOperand() const { return getOperand(0); }
  1561. static unsigned getPointerOperandIndex() { return 0U; }
  1562. // Methods for support type inquiry through isa, cast, and dyn_cast:
  1563. static bool classof(const Instruction *I) {
  1564. return I->getOpcode() == VAArg;
  1565. }
  1566. static bool classof(const Value *V) {
  1567. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  1568. }
  1569. };
  1570. //===----------------------------------------------------------------------===//
  1571. // ExtractElementInst Class
  1572. //===----------------------------------------------------------------------===//
  1573. /// This instruction extracts a single (scalar)
  1574. /// element from a VectorType value
  1575. ///
  1576. class ExtractElementInst : public Instruction {
  1577. ExtractElementInst(Value *Vec, Value *Idx, const Twine &NameStr = "",
  1578. Instruction *InsertBefore = nullptr);
  1579. ExtractElementInst(Value *Vec, Value *Idx, const Twine &NameStr,
  1580. BasicBlock *InsertAtEnd);
  1581. protected:
  1582. // Note: Instruction needs to be a friend here to call cloneImpl.
  1583. friend class Instruction;
  1584. ExtractElementInst *cloneImpl() const;
  1585. public:
  1586. static ExtractElementInst *Create(Value *Vec, Value *Idx,
  1587. const Twine &NameStr = "",
  1588. Instruction *InsertBefore = nullptr) {
  1589. return new(2) ExtractElementInst(Vec, Idx, NameStr, InsertBefore);
  1590. }
  1591. static ExtractElementInst *Create(Value *Vec, Value *Idx,
  1592. const Twine &NameStr,
  1593. BasicBlock *InsertAtEnd) {
  1594. return new(2) ExtractElementInst(Vec, Idx, NameStr, InsertAtEnd);
  1595. }
  1596. /// Return true if an extractelement instruction can be
  1597. /// formed with the specified operands.
  1598. static bool isValidOperands(const Value *Vec, const Value *Idx);
  1599. Value *getVectorOperand() { return Op<0>(); }
  1600. Value *getIndexOperand() { return Op<1>(); }
  1601. const Value *getVectorOperand() const { return Op<0>(); }
  1602. const Value *getIndexOperand() const { return Op<1>(); }
  1603. VectorType *getVectorOperandType() const {
  1604. return cast<VectorType>(getVectorOperand()->getType());
  1605. }
  1606. /// Transparently provide more efficient getOperand methods.
  1607. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  1608. // Methods for support type inquiry through isa, cast, and dyn_cast:
  1609. static bool classof(const Instruction *I) {
  1610. return I->getOpcode() == Instruction::ExtractElement;
  1611. }
  1612. static bool classof(const Value *V) {
  1613. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  1614. }
  1615. };
  1616. template <>
  1617. struct OperandTraits<ExtractElementInst> :
  1618. public FixedNumOperandTraits<ExtractElementInst, 2> {
  1619. };
  1620. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(ExtractElementInst, Value)
  1621. //===----------------------------------------------------------------------===//
  1622. // InsertElementInst Class
  1623. //===----------------------------------------------------------------------===//
  1624. /// This instruction inserts a single (scalar)
  1625. /// element into a VectorType value
  1626. ///
  1627. class InsertElementInst : public Instruction {
  1628. InsertElementInst(Value *Vec, Value *NewElt, Value *Idx,
  1629. const Twine &NameStr = "",
  1630. Instruction *InsertBefore = nullptr);
  1631. InsertElementInst(Value *Vec, Value *NewElt, Value *Idx, const Twine &NameStr,
  1632. BasicBlock *InsertAtEnd);
  1633. protected:
  1634. // Note: Instruction needs to be a friend here to call cloneImpl.
  1635. friend class Instruction;
  1636. InsertElementInst *cloneImpl() const;
  1637. public:
  1638. static InsertElementInst *Create(Value *Vec, Value *NewElt, Value *Idx,
  1639. const Twine &NameStr = "",
  1640. Instruction *InsertBefore = nullptr) {
  1641. return new(3) InsertElementInst(Vec, NewElt, Idx, NameStr, InsertBefore);
  1642. }
  1643. static InsertElementInst *Create(Value *Vec, Value *NewElt, Value *Idx,
  1644. const Twine &NameStr,
  1645. BasicBlock *InsertAtEnd) {
  1646. return new(3) InsertElementInst(Vec, NewElt, Idx, NameStr, InsertAtEnd);
  1647. }
  1648. /// Return true if an insertelement instruction can be
  1649. /// formed with the specified operands.
  1650. static bool isValidOperands(const Value *Vec, const Value *NewElt,
  1651. const Value *Idx);
  1652. /// Overload to return most specific vector type.
  1653. ///
  1654. VectorType *getType() const {
  1655. return cast<VectorType>(Instruction::getType());
  1656. }
  1657. /// Transparently provide more efficient getOperand methods.
  1658. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  1659. // Methods for support type inquiry through isa, cast, and dyn_cast:
  1660. static bool classof(const Instruction *I) {
  1661. return I->getOpcode() == Instruction::InsertElement;
  1662. }
  1663. static bool classof(const Value *V) {
  1664. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  1665. }
  1666. };
  1667. template <>
  1668. struct OperandTraits<InsertElementInst> :
  1669. public FixedNumOperandTraits<InsertElementInst, 3> {
  1670. };
  1671. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(InsertElementInst, Value)
  1672. //===----------------------------------------------------------------------===//
  1673. // ShuffleVectorInst Class
  1674. //===----------------------------------------------------------------------===//
  1675. constexpr int UndefMaskElem = -1;
  1676. /// This instruction constructs a fixed permutation of two
  1677. /// input vectors.
  1678. ///
  1679. /// For each element of the result vector, the shuffle mask selects an element
  1680. /// from one of the input vectors to copy to the result. Non-negative elements
  1681. /// in the mask represent an index into the concatenated pair of input vectors.
  1682. /// UndefMaskElem (-1) specifies that the result element is undefined.
  1683. ///
  1684. /// For scalable vectors, all the elements of the mask must be 0 or -1. This
  1685. /// requirement may be relaxed in the future.
  1686. class ShuffleVectorInst : public Instruction {
  1687. SmallVector<int, 4> ShuffleMask;
  1688. Constant *ShuffleMaskForBitcode;
  1689. protected:
  1690. // Note: Instruction needs to be a friend here to call cloneImpl.
  1691. friend class Instruction;
  1692. ShuffleVectorInst *cloneImpl() const;
  1693. public:
  1694. ShuffleVectorInst(Value *V1, Value *Mask, const Twine &NameStr = "",
  1695. Instruction *InsertBefore = nullptr);
  1696. ShuffleVectorInst(Value *V1, Value *Mask, const Twine &NameStr,
  1697. BasicBlock *InsertAtEnd);
  1698. ShuffleVectorInst(Value *V1, ArrayRef<int> Mask, const Twine &NameStr = "",
  1699. Instruction *InsertBefore = nullptr);
  1700. ShuffleVectorInst(Value *V1, ArrayRef<int> Mask, const Twine &NameStr,
  1701. BasicBlock *InsertAtEnd);
  1702. ShuffleVectorInst(Value *V1, Value *V2, Value *Mask,
  1703. const Twine &NameStr = "",
  1704. Instruction *InsertBefor = nullptr);
  1705. ShuffleVectorInst(Value *V1, Value *V2, Value *Mask,
  1706. const Twine &NameStr, BasicBlock *InsertAtEnd);
  1707. ShuffleVectorInst(Value *V1, Value *V2, ArrayRef<int> Mask,
  1708. const Twine &NameStr = "",
  1709. Instruction *InsertBefor = nullptr);
  1710. ShuffleVectorInst(Value *V1, Value *V2, ArrayRef<int> Mask,
  1711. const Twine &NameStr, BasicBlock *InsertAtEnd);
  1712. void *operator new(size_t S) { return User::operator new(S, 2); }
  1713. void operator delete(void *Ptr) { return User::operator delete(Ptr); }
  1714. /// Swap the operands and adjust the mask to preserve the semantics
  1715. /// of the instruction.
  1716. void commute();
  1717. /// Return true if a shufflevector instruction can be
  1718. /// formed with the specified operands.
  1719. static bool isValidOperands(const Value *V1, const Value *V2,
  1720. const Value *Mask);
  1721. static bool isValidOperands(const Value *V1, const Value *V2,
  1722. ArrayRef<int> Mask);
  1723. /// Overload to return most specific vector type.
  1724. ///
  1725. VectorType *getType() const {
  1726. return cast<VectorType>(Instruction::getType());
  1727. }
  1728. /// Transparently provide more efficient getOperand methods.
  1729. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  1730. /// Return the shuffle mask value of this instruction for the given element
  1731. /// index. Return UndefMaskElem if the element is undef.
  1732. int getMaskValue(unsigned Elt) const { return ShuffleMask[Elt]; }
  1733. /// Convert the input shuffle mask operand to a vector of integers. Undefined
  1734. /// elements of the mask are returned as UndefMaskElem.
  1735. static void getShuffleMask(const Constant *Mask,
  1736. SmallVectorImpl<int> &Result);
  1737. /// Return the mask for this instruction as a vector of integers. Undefined
  1738. /// elements of the mask are returned as UndefMaskElem.
  1739. void getShuffleMask(SmallVectorImpl<int> &Result) const {
  1740. Result.assign(ShuffleMask.begin(), ShuffleMask.end());
  1741. }
  1742. /// Return the mask for this instruction, for use in bitcode.
  1743. ///
  1744. /// TODO: This is temporary until we decide a new bitcode encoding for
  1745. /// shufflevector.
  1746. Constant *getShuffleMaskForBitcode() const { return ShuffleMaskForBitcode; }
  1747. static Constant *convertShuffleMaskForBitcode(ArrayRef<int> Mask,
  1748. Type *ResultTy);
  1749. void setShuffleMask(ArrayRef<int> Mask);
  1750. ArrayRef<int> getShuffleMask() const { return ShuffleMask; }
  1751. /// Return true if this shuffle returns a vector with a different number of
  1752. /// elements than its source vectors.
  1753. /// Examples: shufflevector <4 x n> A, <4 x n> B, <1,2,3>
  1754. /// shufflevector <4 x n> A, <4 x n> B, <1,2,3,4,5>
  1755. bool changesLength() const {
  1756. unsigned NumSourceElts = cast<VectorType>(Op<0>()->getType())
  1757. ->getElementCount()
  1758. .getKnownMinValue();
  1759. unsigned NumMaskElts = ShuffleMask.size();
  1760. return NumSourceElts != NumMaskElts;
  1761. }
  1762. /// Return true if this shuffle returns a vector with a greater number of
  1763. /// elements than its source vectors.
  1764. /// Example: shufflevector <2 x n> A, <2 x n> B, <1,2,3>
  1765. bool increasesLength() const {
  1766. unsigned NumSourceElts = cast<VectorType>(Op<0>()->getType())
  1767. ->getElementCount()
  1768. .getKnownMinValue();
  1769. unsigned NumMaskElts = ShuffleMask.size();
  1770. return NumSourceElts < NumMaskElts;
  1771. }
  1772. /// Return true if this shuffle mask chooses elements from exactly one source
  1773. /// vector.
  1774. /// Example: <7,5,undef,7>
  1775. /// This assumes that vector operands are the same length as the mask.
  1776. static bool isSingleSourceMask(ArrayRef<int> Mask);
  1777. static bool isSingleSourceMask(const Constant *Mask) {
  1778. assert(Mask->getType()->isVectorTy() && "Shuffle needs vector constant.");
  1779. SmallVector<int, 16> MaskAsInts;
  1780. getShuffleMask(Mask, MaskAsInts);
  1781. return isSingleSourceMask(MaskAsInts);
  1782. }
  1783. /// Return true if this shuffle chooses elements from exactly one source
  1784. /// vector without changing the length of that vector.
  1785. /// Example: shufflevector <4 x n> A, <4 x n> B, <3,0,undef,3>
  1786. /// TODO: Optionally allow length-changing shuffles.
  1787. bool isSingleSource() const {
  1788. return !changesLength() && isSingleSourceMask(ShuffleMask);
  1789. }
  1790. /// Return true if this shuffle mask chooses elements from exactly one source
  1791. /// vector without lane crossings. A shuffle using this mask is not
  1792. /// necessarily a no-op because it may change the number of elements from its
  1793. /// input vectors or it may provide demanded bits knowledge via undef lanes.
  1794. /// Example: <undef,undef,2,3>
  1795. static bool isIdentityMask(ArrayRef<int> Mask);
  1796. static bool isIdentityMask(const Constant *Mask) {
  1797. assert(Mask->getType()->isVectorTy() && "Shuffle needs vector constant.");
  1798. SmallVector<int, 16> MaskAsInts;
  1799. getShuffleMask(Mask, MaskAsInts);
  1800. return isIdentityMask(MaskAsInts);
  1801. }
  1802. /// Return true if this shuffle chooses elements from exactly one source
  1803. /// vector without lane crossings and does not change the number of elements
  1804. /// from its input vectors.
  1805. /// Example: shufflevector <4 x n> A, <4 x n> B, <4,undef,6,undef>
  1806. bool isIdentity() const {
  1807. return !changesLength() && isIdentityMask(ShuffleMask);
  1808. }
  1809. /// Return true if this shuffle lengthens exactly one source vector with
  1810. /// undefs in the high elements.
  1811. bool isIdentityWithPadding() const;
  1812. /// Return true if this shuffle extracts the first N elements of exactly one
  1813. /// source vector.
  1814. bool isIdentityWithExtract() const;
  1815. /// Return true if this shuffle concatenates its 2 source vectors. This
  1816. /// returns false if either input is undefined. In that case, the shuffle is
  1817. /// is better classified as an identity with padding operation.
  1818. bool isConcat() const;
  1819. /// Return true if this shuffle mask chooses elements from its source vectors
  1820. /// without lane crossings. A shuffle using this mask would be
  1821. /// equivalent to a vector select with a constant condition operand.
  1822. /// Example: <4,1,6,undef>
  1823. /// This returns false if the mask does not choose from both input vectors.
  1824. /// In that case, the shuffle is better classified as an identity shuffle.
  1825. /// This assumes that vector operands are the same length as the mask
  1826. /// (a length-changing shuffle can never be equivalent to a vector select).
  1827. static bool isSelectMask(ArrayRef<int> Mask);
  1828. static bool isSelectMask(const Constant *Mask) {
  1829. assert(Mask->getType()->isVectorTy() && "Shuffle needs vector constant.");
  1830. SmallVector<int, 16> MaskAsInts;
  1831. getShuffleMask(Mask, MaskAsInts);
  1832. return isSelectMask(MaskAsInts);
  1833. }
  1834. /// Return true if this shuffle chooses elements from its source vectors
  1835. /// without lane crossings and all operands have the same number of elements.
  1836. /// In other words, this shuffle is equivalent to a vector select with a
  1837. /// constant condition operand.
  1838. /// Example: shufflevector <4 x n> A, <4 x n> B, <undef,1,6,3>
  1839. /// This returns false if the mask does not choose from both input vectors.
  1840. /// In that case, the shuffle is better classified as an identity shuffle.
  1841. /// TODO: Optionally allow length-changing shuffles.
  1842. bool isSelect() const {
  1843. return !changesLength() && isSelectMask(ShuffleMask);
  1844. }
  1845. /// Return true if this shuffle mask swaps the order of elements from exactly
  1846. /// one source vector.
  1847. /// Example: <7,6,undef,4>
  1848. /// This assumes that vector operands are the same length as the mask.
  1849. static bool isReverseMask(ArrayRef<int> Mask);
  1850. static bool isReverseMask(const Constant *Mask) {
  1851. assert(Mask->getType()->isVectorTy() && "Shuffle needs vector constant.");
  1852. SmallVector<int, 16> MaskAsInts;
  1853. getShuffleMask(Mask, MaskAsInts);
  1854. return isReverseMask(MaskAsInts);
  1855. }
  1856. /// Return true if this shuffle swaps the order of elements from exactly
  1857. /// one source vector.
  1858. /// Example: shufflevector <4 x n> A, <4 x n> B, <3,undef,1,undef>
  1859. /// TODO: Optionally allow length-changing shuffles.
  1860. bool isReverse() const {
  1861. return !changesLength() && isReverseMask(ShuffleMask);
  1862. }
  1863. /// Return true if this shuffle mask chooses all elements with the same value
  1864. /// as the first element of exactly one source vector.
  1865. /// Example: <4,undef,undef,4>
  1866. /// This assumes that vector operands are the same length as the mask.
  1867. static bool isZeroEltSplatMask(ArrayRef<int> Mask);
  1868. static bool isZeroEltSplatMask(const Constant *Mask) {
  1869. assert(Mask->getType()->isVectorTy() && "Shuffle needs vector constant.");
  1870. SmallVector<int, 16> MaskAsInts;
  1871. getShuffleMask(Mask, MaskAsInts);
  1872. return isZeroEltSplatMask(MaskAsInts);
  1873. }
  1874. /// Return true if all elements of this shuffle are the same value as the
  1875. /// first element of exactly one source vector without changing the length
  1876. /// of that vector.
  1877. /// Example: shufflevector <4 x n> A, <4 x n> B, <undef,0,undef,0>
  1878. /// TODO: Optionally allow length-changing shuffles.
  1879. /// TODO: Optionally allow splats from other elements.
  1880. bool isZeroEltSplat() const {
  1881. return !changesLength() && isZeroEltSplatMask(ShuffleMask);
  1882. }
  1883. /// Return true if this shuffle mask is a transpose mask.
  1884. /// Transpose vector masks transpose a 2xn matrix. They read corresponding
  1885. /// even- or odd-numbered vector elements from two n-dimensional source
  1886. /// vectors and write each result into consecutive elements of an
  1887. /// n-dimensional destination vector. Two shuffles are necessary to complete
  1888. /// the transpose, one for the even elements and another for the odd elements.
  1889. /// This description closely follows how the TRN1 and TRN2 AArch64
  1890. /// instructions operate.
  1891. ///
  1892. /// For example, a simple 2x2 matrix can be transposed with:
  1893. ///
  1894. /// ; Original matrix
  1895. /// m0 = < a, b >
  1896. /// m1 = < c, d >
  1897. ///
  1898. /// ; Transposed matrix
  1899. /// t0 = < a, c > = shufflevector m0, m1, < 0, 2 >
  1900. /// t1 = < b, d > = shufflevector m0, m1, < 1, 3 >
  1901. ///
  1902. /// For matrices having greater than n columns, the resulting nx2 transposed
  1903. /// matrix is stored in two result vectors such that one vector contains
  1904. /// interleaved elements from all the even-numbered rows and the other vector
  1905. /// contains interleaved elements from all the odd-numbered rows. For example,
  1906. /// a 2x4 matrix can be transposed with:
  1907. ///
  1908. /// ; Original matrix
  1909. /// m0 = < a, b, c, d >
  1910. /// m1 = < e, f, g, h >
  1911. ///
  1912. /// ; Transposed matrix
  1913. /// t0 = < a, e, c, g > = shufflevector m0, m1 < 0, 4, 2, 6 >
  1914. /// t1 = < b, f, d, h > = shufflevector m0, m1 < 1, 5, 3, 7 >
  1915. static bool isTransposeMask(ArrayRef<int> Mask);
  1916. static bool isTransposeMask(const Constant *Mask) {
  1917. assert(Mask->getType()->isVectorTy() && "Shuffle needs vector constant.");
  1918. SmallVector<int, 16> MaskAsInts;
  1919. getShuffleMask(Mask, MaskAsInts);
  1920. return isTransposeMask(MaskAsInts);
  1921. }
  1922. /// Return true if this shuffle transposes the elements of its inputs without
  1923. /// changing the length of the vectors. This operation may also be known as a
  1924. /// merge or interleave. See the description for isTransposeMask() for the
  1925. /// exact specification.
  1926. /// Example: shufflevector <4 x n> A, <4 x n> B, <0,4,2,6>
  1927. bool isTranspose() const {
  1928. return !changesLength() && isTransposeMask(ShuffleMask);
  1929. }
  1930. /// Return true if this shuffle mask is an extract subvector mask.
  1931. /// A valid extract subvector mask returns a smaller vector from a single
  1932. /// source operand. The base extraction index is returned as well.
  1933. static bool isExtractSubvectorMask(ArrayRef<int> Mask, int NumSrcElts,
  1934. int &Index);
  1935. static bool isExtractSubvectorMask(const Constant *Mask, int NumSrcElts,
  1936. int &Index) {
  1937. assert(Mask->getType()->isVectorTy() && "Shuffle needs vector constant.");
  1938. // Not possible to express a shuffle mask for a scalable vector for this
  1939. // case.
  1940. if (isa<ScalableVectorType>(Mask->getType()))
  1941. return false;
  1942. SmallVector<int, 16> MaskAsInts;
  1943. getShuffleMask(Mask, MaskAsInts);
  1944. return isExtractSubvectorMask(MaskAsInts, NumSrcElts, Index);
  1945. }
  1946. /// Return true if this shuffle mask is an extract subvector mask.
  1947. bool isExtractSubvectorMask(int &Index) const {
  1948. // Not possible to express a shuffle mask for a scalable vector for this
  1949. // case.
  1950. if (isa<ScalableVectorType>(getType()))
  1951. return false;
  1952. int NumSrcElts =
  1953. cast<FixedVectorType>(Op<0>()->getType())->getNumElements();
  1954. return isExtractSubvectorMask(ShuffleMask, NumSrcElts, Index);
  1955. }
  1956. /// Return true if this shuffle mask is an insert subvector mask.
  1957. /// A valid insert subvector mask inserts the lowest elements of a second
  1958. /// source operand into an in-place first source operand operand.
  1959. /// Both the sub vector width and the insertion index is returned.
  1960. static bool isInsertSubvectorMask(ArrayRef<int> Mask, int NumSrcElts,
  1961. int &NumSubElts, int &Index);
  1962. static bool isInsertSubvectorMask(const Constant *Mask, int NumSrcElts,
  1963. int &NumSubElts, int &Index) {
  1964. assert(Mask->getType()->isVectorTy() && "Shuffle needs vector constant.");
  1965. // Not possible to express a shuffle mask for a scalable vector for this
  1966. // case.
  1967. if (isa<ScalableVectorType>(Mask->getType()))
  1968. return false;
  1969. SmallVector<int, 16> MaskAsInts;
  1970. getShuffleMask(Mask, MaskAsInts);
  1971. return isInsertSubvectorMask(MaskAsInts, NumSrcElts, NumSubElts, Index);
  1972. }
  1973. /// Return true if this shuffle mask is an insert subvector mask.
  1974. bool isInsertSubvectorMask(int &NumSubElts, int &Index) const {
  1975. // Not possible to express a shuffle mask for a scalable vector for this
  1976. // case.
  1977. if (isa<ScalableVectorType>(getType()))
  1978. return false;
  1979. int NumSrcElts =
  1980. cast<FixedVectorType>(Op<0>()->getType())->getNumElements();
  1981. return isInsertSubvectorMask(ShuffleMask, NumSrcElts, NumSubElts, Index);
  1982. }
  1983. /// Return true if this shuffle mask replicates each of the \p VF elements
  1984. /// in a vector \p ReplicationFactor times.
  1985. /// For example, the mask for \p ReplicationFactor=3 and \p VF=4 is:
  1986. /// <0,0,0,1,1,1,2,2,2,3,3,3>
  1987. static bool isReplicationMask(ArrayRef<int> Mask, int &ReplicationFactor,
  1988. int &VF);
  1989. static bool isReplicationMask(const Constant *Mask, int &ReplicationFactor,
  1990. int &VF) {
  1991. assert(Mask->getType()->isVectorTy() && "Shuffle needs vector constant.");
  1992. // Not possible to express a shuffle mask for a scalable vector for this
  1993. // case.
  1994. if (isa<ScalableVectorType>(Mask->getType()))
  1995. return false;
  1996. SmallVector<int, 16> MaskAsInts;
  1997. getShuffleMask(Mask, MaskAsInts);
  1998. return isReplicationMask(MaskAsInts, ReplicationFactor, VF);
  1999. }
  2000. /// Return true if this shuffle mask is a replication mask.
  2001. bool isReplicationMask(int &ReplicationFactor, int &VF) const;
  2002. /// Change values in a shuffle permute mask assuming the two vector operands
  2003. /// of length InVecNumElts have swapped position.
  2004. static void commuteShuffleMask(MutableArrayRef<int> Mask,
  2005. unsigned InVecNumElts) {
  2006. for (int &Idx : Mask) {
  2007. if (Idx == -1)
  2008. continue;
  2009. Idx = Idx < (int)InVecNumElts ? Idx + InVecNumElts : Idx - InVecNumElts;
  2010. assert(Idx >= 0 && Idx < (int)InVecNumElts * 2 &&
  2011. "shufflevector mask index out of range");
  2012. }
  2013. }
  2014. // Methods for support type inquiry through isa, cast, and dyn_cast:
  2015. static bool classof(const Instruction *I) {
  2016. return I->getOpcode() == Instruction::ShuffleVector;
  2017. }
  2018. static bool classof(const Value *V) {
  2019. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  2020. }
  2021. };
  2022. template <>
  2023. struct OperandTraits<ShuffleVectorInst>
  2024. : public FixedNumOperandTraits<ShuffleVectorInst, 2> {};
  2025. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(ShuffleVectorInst, Value)
  2026. //===----------------------------------------------------------------------===//
  2027. // ExtractValueInst Class
  2028. //===----------------------------------------------------------------------===//
  2029. /// This instruction extracts a struct member or array
  2030. /// element value from an aggregate value.
  2031. ///
  2032. class ExtractValueInst : public UnaryInstruction {
  2033. SmallVector<unsigned, 4> Indices;
  2034. ExtractValueInst(const ExtractValueInst &EVI);
  2035. /// Constructors - Create a extractvalue instruction with a base aggregate
  2036. /// value and a list of indices. The first ctor can optionally insert before
  2037. /// an existing instruction, the second appends the new instruction to the
  2038. /// specified BasicBlock.
  2039. inline ExtractValueInst(Value *Agg,
  2040. ArrayRef<unsigned> Idxs,
  2041. const Twine &NameStr,
  2042. Instruction *InsertBefore);
  2043. inline ExtractValueInst(Value *Agg,
  2044. ArrayRef<unsigned> Idxs,
  2045. const Twine &NameStr, BasicBlock *InsertAtEnd);
  2046. void init(ArrayRef<unsigned> Idxs, const Twine &NameStr);
  2047. protected:
  2048. // Note: Instruction needs to be a friend here to call cloneImpl.
  2049. friend class Instruction;
  2050. ExtractValueInst *cloneImpl() const;
  2051. public:
  2052. static ExtractValueInst *Create(Value *Agg,
  2053. ArrayRef<unsigned> Idxs,
  2054. const Twine &NameStr = "",
  2055. Instruction *InsertBefore = nullptr) {
  2056. return new
  2057. ExtractValueInst(Agg, Idxs, NameStr, InsertBefore);
  2058. }
  2059. static ExtractValueInst *Create(Value *Agg,
  2060. ArrayRef<unsigned> Idxs,
  2061. const Twine &NameStr,
  2062. BasicBlock *InsertAtEnd) {
  2063. return new ExtractValueInst(Agg, Idxs, NameStr, InsertAtEnd);
  2064. }
  2065. /// Returns the type of the element that would be extracted
  2066. /// with an extractvalue instruction with the specified parameters.
  2067. ///
  2068. /// Null is returned if the indices are invalid for the specified type.
  2069. static Type *getIndexedType(Type *Agg, ArrayRef<unsigned> Idxs);
  2070. using idx_iterator = const unsigned*;
  2071. inline idx_iterator idx_begin() const { return Indices.begin(); }
  2072. inline idx_iterator idx_end() const { return Indices.end(); }
  2073. inline iterator_range<idx_iterator> indices() const {
  2074. return make_range(idx_begin(), idx_end());
  2075. }
  2076. Value *getAggregateOperand() {
  2077. return getOperand(0);
  2078. }
  2079. const Value *getAggregateOperand() const {
  2080. return getOperand(0);
  2081. }
  2082. static unsigned getAggregateOperandIndex() {
  2083. return 0U; // get index for modifying correct operand
  2084. }
  2085. ArrayRef<unsigned> getIndices() const {
  2086. return Indices;
  2087. }
  2088. unsigned getNumIndices() const {
  2089. return (unsigned)Indices.size();
  2090. }
  2091. bool hasIndices() const {
  2092. return true;
  2093. }
  2094. // Methods for support type inquiry through isa, cast, and dyn_cast:
  2095. static bool classof(const Instruction *I) {
  2096. return I->getOpcode() == Instruction::ExtractValue;
  2097. }
  2098. static bool classof(const Value *V) {
  2099. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  2100. }
  2101. };
  2102. ExtractValueInst::ExtractValueInst(Value *Agg,
  2103. ArrayRef<unsigned> Idxs,
  2104. const Twine &NameStr,
  2105. Instruction *InsertBefore)
  2106. : UnaryInstruction(checkGEPType(getIndexedType(Agg->getType(), Idxs)),
  2107. ExtractValue, Agg, InsertBefore) {
  2108. init(Idxs, NameStr);
  2109. }
  2110. ExtractValueInst::ExtractValueInst(Value *Agg,
  2111. ArrayRef<unsigned> Idxs,
  2112. const Twine &NameStr,
  2113. BasicBlock *InsertAtEnd)
  2114. : UnaryInstruction(checkGEPType(getIndexedType(Agg->getType(), Idxs)),
  2115. ExtractValue, Agg, InsertAtEnd) {
  2116. init(Idxs, NameStr);
  2117. }
  2118. //===----------------------------------------------------------------------===//
  2119. // InsertValueInst Class
  2120. //===----------------------------------------------------------------------===//
  2121. /// This instruction inserts a struct field of array element
  2122. /// value into an aggregate value.
  2123. ///
  2124. class InsertValueInst : public Instruction {
  2125. SmallVector<unsigned, 4> Indices;
  2126. InsertValueInst(const InsertValueInst &IVI);
  2127. /// Constructors - Create a insertvalue instruction with a base aggregate
  2128. /// value, a value to insert, and a list of indices. The first ctor can
  2129. /// optionally insert before an existing instruction, the second appends
  2130. /// the new instruction to the specified BasicBlock.
  2131. inline InsertValueInst(Value *Agg, Value *Val,
  2132. ArrayRef<unsigned> Idxs,
  2133. const Twine &NameStr,
  2134. Instruction *InsertBefore);
  2135. inline InsertValueInst(Value *Agg, Value *Val,
  2136. ArrayRef<unsigned> Idxs,
  2137. const Twine &NameStr, BasicBlock *InsertAtEnd);
  2138. /// Constructors - These two constructors are convenience methods because one
  2139. /// and two index insertvalue instructions are so common.
  2140. InsertValueInst(Value *Agg, Value *Val, unsigned Idx,
  2141. const Twine &NameStr = "",
  2142. Instruction *InsertBefore = nullptr);
  2143. InsertValueInst(Value *Agg, Value *Val, unsigned Idx, const Twine &NameStr,
  2144. BasicBlock *InsertAtEnd);
  2145. void init(Value *Agg, Value *Val, ArrayRef<unsigned> Idxs,
  2146. const Twine &NameStr);
  2147. protected:
  2148. // Note: Instruction needs to be a friend here to call cloneImpl.
  2149. friend class Instruction;
  2150. InsertValueInst *cloneImpl() const;
  2151. public:
  2152. // allocate space for exactly two operands
  2153. void *operator new(size_t S) { return User::operator new(S, 2); }
  2154. void operator delete(void *Ptr) { User::operator delete(Ptr); }
  2155. static InsertValueInst *Create(Value *Agg, Value *Val,
  2156. ArrayRef<unsigned> Idxs,
  2157. const Twine &NameStr = "",
  2158. Instruction *InsertBefore = nullptr) {
  2159. return new InsertValueInst(Agg, Val, Idxs, NameStr, InsertBefore);
  2160. }
  2161. static InsertValueInst *Create(Value *Agg, Value *Val,
  2162. ArrayRef<unsigned> Idxs,
  2163. const Twine &NameStr,
  2164. BasicBlock *InsertAtEnd) {
  2165. return new InsertValueInst(Agg, Val, Idxs, NameStr, InsertAtEnd);
  2166. }
  2167. /// Transparently provide more efficient getOperand methods.
  2168. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  2169. using idx_iterator = const unsigned*;
  2170. inline idx_iterator idx_begin() const { return Indices.begin(); }
  2171. inline idx_iterator idx_end() const { return Indices.end(); }
  2172. inline iterator_range<idx_iterator> indices() const {
  2173. return make_range(idx_begin(), idx_end());
  2174. }
  2175. Value *getAggregateOperand() {
  2176. return getOperand(0);
  2177. }
  2178. const Value *getAggregateOperand() const {
  2179. return getOperand(0);
  2180. }
  2181. static unsigned getAggregateOperandIndex() {
  2182. return 0U; // get index for modifying correct operand
  2183. }
  2184. Value *getInsertedValueOperand() {
  2185. return getOperand(1);
  2186. }
  2187. const Value *getInsertedValueOperand() const {
  2188. return getOperand(1);
  2189. }
  2190. static unsigned getInsertedValueOperandIndex() {
  2191. return 1U; // get index for modifying correct operand
  2192. }
  2193. ArrayRef<unsigned> getIndices() const {
  2194. return Indices;
  2195. }
  2196. unsigned getNumIndices() const {
  2197. return (unsigned)Indices.size();
  2198. }
  2199. bool hasIndices() const {
  2200. return true;
  2201. }
  2202. // Methods for support type inquiry through isa, cast, and dyn_cast:
  2203. static bool classof(const Instruction *I) {
  2204. return I->getOpcode() == Instruction::InsertValue;
  2205. }
  2206. static bool classof(const Value *V) {
  2207. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  2208. }
  2209. };
  2210. template <>
  2211. struct OperandTraits<InsertValueInst> :
  2212. public FixedNumOperandTraits<InsertValueInst, 2> {
  2213. };
  2214. InsertValueInst::InsertValueInst(Value *Agg,
  2215. Value *Val,
  2216. ArrayRef<unsigned> Idxs,
  2217. const Twine &NameStr,
  2218. Instruction *InsertBefore)
  2219. : Instruction(Agg->getType(), InsertValue,
  2220. OperandTraits<InsertValueInst>::op_begin(this),
  2221. 2, InsertBefore) {
  2222. init(Agg, Val, Idxs, NameStr);
  2223. }
  2224. InsertValueInst::InsertValueInst(Value *Agg,
  2225. Value *Val,
  2226. ArrayRef<unsigned> Idxs,
  2227. const Twine &NameStr,
  2228. BasicBlock *InsertAtEnd)
  2229. : Instruction(Agg->getType(), InsertValue,
  2230. OperandTraits<InsertValueInst>::op_begin(this),
  2231. 2, InsertAtEnd) {
  2232. init(Agg, Val, Idxs, NameStr);
  2233. }
  2234. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(InsertValueInst, Value)
  2235. //===----------------------------------------------------------------------===//
  2236. // PHINode Class
  2237. //===----------------------------------------------------------------------===//
  2238. // PHINode - The PHINode class is used to represent the magical mystical PHI
  2239. // node, that can not exist in nature, but can be synthesized in a computer
  2240. // scientist's overactive imagination.
  2241. //
  2242. class PHINode : public Instruction {
  2243. /// The number of operands actually allocated. NumOperands is
  2244. /// the number actually in use.
  2245. unsigned ReservedSpace;
  2246. PHINode(const PHINode &PN);
  2247. explicit PHINode(Type *Ty, unsigned NumReservedValues,
  2248. const Twine &NameStr = "",
  2249. Instruction *InsertBefore = nullptr)
  2250. : Instruction(Ty, Instruction::PHI, nullptr, 0, InsertBefore),
  2251. ReservedSpace(NumReservedValues) {
  2252. assert(!Ty->isTokenTy() && "PHI nodes cannot have token type!");
  2253. setName(NameStr);
  2254. allocHungoffUses(ReservedSpace);
  2255. }
  2256. PHINode(Type *Ty, unsigned NumReservedValues, const Twine &NameStr,
  2257. BasicBlock *InsertAtEnd)
  2258. : Instruction(Ty, Instruction::PHI, nullptr, 0, InsertAtEnd),
  2259. ReservedSpace(NumReservedValues) {
  2260. assert(!Ty->isTokenTy() && "PHI nodes cannot have token type!");
  2261. setName(NameStr);
  2262. allocHungoffUses(ReservedSpace);
  2263. }
  2264. protected:
  2265. // Note: Instruction needs to be a friend here to call cloneImpl.
  2266. friend class Instruction;
  2267. PHINode *cloneImpl() const;
  2268. // allocHungoffUses - this is more complicated than the generic
  2269. // User::allocHungoffUses, because we have to allocate Uses for the incoming
  2270. // values and pointers to the incoming blocks, all in one allocation.
  2271. void allocHungoffUses(unsigned N) {
  2272. User::allocHungoffUses(N, /* IsPhi */ true);
  2273. }
  2274. public:
  2275. /// Constructors - NumReservedValues is a hint for the number of incoming
  2276. /// edges that this phi node will have (use 0 if you really have no idea).
  2277. static PHINode *Create(Type *Ty, unsigned NumReservedValues,
  2278. const Twine &NameStr = "",
  2279. Instruction *InsertBefore = nullptr) {
  2280. return new PHINode(Ty, NumReservedValues, NameStr, InsertBefore);
  2281. }
  2282. static PHINode *Create(Type *Ty, unsigned NumReservedValues,
  2283. const Twine &NameStr, BasicBlock *InsertAtEnd) {
  2284. return new PHINode(Ty, NumReservedValues, NameStr, InsertAtEnd);
  2285. }
  2286. /// Provide fast operand accessors
  2287. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  2288. // Block iterator interface. This provides access to the list of incoming
  2289. // basic blocks, which parallels the list of incoming values.
  2290. using block_iterator = BasicBlock **;
  2291. using const_block_iterator = BasicBlock * const *;
  2292. block_iterator block_begin() {
  2293. return reinterpret_cast<block_iterator>(op_begin() + ReservedSpace);
  2294. }
  2295. const_block_iterator block_begin() const {
  2296. return reinterpret_cast<const_block_iterator>(op_begin() + ReservedSpace);
  2297. }
  2298. block_iterator block_end() {
  2299. return block_begin() + getNumOperands();
  2300. }
  2301. const_block_iterator block_end() const {
  2302. return block_begin() + getNumOperands();
  2303. }
  2304. iterator_range<block_iterator> blocks() {
  2305. return make_range(block_begin(), block_end());
  2306. }
  2307. iterator_range<const_block_iterator> blocks() const {
  2308. return make_range(block_begin(), block_end());
  2309. }
  2310. op_range incoming_values() { return operands(); }
  2311. const_op_range incoming_values() const { return operands(); }
  2312. /// Return the number of incoming edges
  2313. ///
  2314. unsigned getNumIncomingValues() const { return getNumOperands(); }
  2315. /// Return incoming value number x
  2316. ///
  2317. Value *getIncomingValue(unsigned i) const {
  2318. return getOperand(i);
  2319. }
  2320. void setIncomingValue(unsigned i, Value *V) {
  2321. assert(V && "PHI node got a null value!");
  2322. assert(getType() == V->getType() &&
  2323. "All operands to PHI node must be the same type as the PHI node!");
  2324. setOperand(i, V);
  2325. }
  2326. static unsigned getOperandNumForIncomingValue(unsigned i) {
  2327. return i;
  2328. }
  2329. static unsigned getIncomingValueNumForOperand(unsigned i) {
  2330. return i;
  2331. }
  2332. /// Return incoming basic block number @p i.
  2333. ///
  2334. BasicBlock *getIncomingBlock(unsigned i) const {
  2335. return block_begin()[i];
  2336. }
  2337. /// Return incoming basic block corresponding
  2338. /// to an operand of the PHI.
  2339. ///
  2340. BasicBlock *getIncomingBlock(const Use &U) const {
  2341. assert(this == U.getUser() && "Iterator doesn't point to PHI's Uses?");
  2342. return getIncomingBlock(unsigned(&U - op_begin()));
  2343. }
  2344. /// Return incoming basic block corresponding
  2345. /// to value use iterator.
  2346. ///
  2347. BasicBlock *getIncomingBlock(Value::const_user_iterator I) const {
  2348. return getIncomingBlock(I.getUse());
  2349. }
  2350. void setIncomingBlock(unsigned i, BasicBlock *BB) {
  2351. assert(BB && "PHI node got a null basic block!");
  2352. block_begin()[i] = BB;
  2353. }
  2354. /// Replace every incoming basic block \p Old to basic block \p New.
  2355. void replaceIncomingBlockWith(const BasicBlock *Old, BasicBlock *New) {
  2356. assert(New && Old && "PHI node got a null basic block!");
  2357. for (unsigned Op = 0, NumOps = getNumOperands(); Op != NumOps; ++Op)
  2358. if (getIncomingBlock(Op) == Old)
  2359. setIncomingBlock(Op, New);
  2360. }
  2361. /// Add an incoming value to the end of the PHI list
  2362. ///
  2363. void addIncoming(Value *V, BasicBlock *BB) {
  2364. if (getNumOperands() == ReservedSpace)
  2365. growOperands(); // Get more space!
  2366. // Initialize some new operands.
  2367. setNumHungOffUseOperands(getNumOperands() + 1);
  2368. setIncomingValue(getNumOperands() - 1, V);
  2369. setIncomingBlock(getNumOperands() - 1, BB);
  2370. }
  2371. /// Remove an incoming value. This is useful if a
  2372. /// predecessor basic block is deleted. The value removed is returned.
  2373. ///
  2374. /// If the last incoming value for a PHI node is removed (and DeletePHIIfEmpty
  2375. /// is true), the PHI node is destroyed and any uses of it are replaced with
  2376. /// dummy values. The only time there should be zero incoming values to a PHI
  2377. /// node is when the block is dead, so this strategy is sound.
  2378. ///
  2379. Value *removeIncomingValue(unsigned Idx, bool DeletePHIIfEmpty = true);
  2380. Value *removeIncomingValue(const BasicBlock *BB, bool DeletePHIIfEmpty=true) {
  2381. int Idx = getBasicBlockIndex(BB);
  2382. assert(Idx >= 0 && "Invalid basic block argument to remove!");
  2383. return removeIncomingValue(Idx, DeletePHIIfEmpty);
  2384. }
  2385. /// Return the first index of the specified basic
  2386. /// block in the value list for this PHI. Returns -1 if no instance.
  2387. ///
  2388. int getBasicBlockIndex(const BasicBlock *BB) const {
  2389. for (unsigned i = 0, e = getNumOperands(); i != e; ++i)
  2390. if (block_begin()[i] == BB)
  2391. return i;
  2392. return -1;
  2393. }
  2394. Value *getIncomingValueForBlock(const BasicBlock *BB) const {
  2395. int Idx = getBasicBlockIndex(BB);
  2396. assert(Idx >= 0 && "Invalid basic block argument!");
  2397. return getIncomingValue(Idx);
  2398. }
  2399. /// Set every incoming value(s) for block \p BB to \p V.
  2400. void setIncomingValueForBlock(const BasicBlock *BB, Value *V) {
  2401. assert(BB && "PHI node got a null basic block!");
  2402. bool Found = false;
  2403. for (unsigned Op = 0, NumOps = getNumOperands(); Op != NumOps; ++Op)
  2404. if (getIncomingBlock(Op) == BB) {
  2405. Found = true;
  2406. setIncomingValue(Op, V);
  2407. }
  2408. (void)Found;
  2409. assert(Found && "Invalid basic block argument to set!");
  2410. }
  2411. /// If the specified PHI node always merges together the
  2412. /// same value, return the value, otherwise return null.
  2413. Value *hasConstantValue() const;
  2414. /// Whether the specified PHI node always merges
  2415. /// together the same value, assuming undefs are equal to a unique
  2416. /// non-undef value.
  2417. bool hasConstantOrUndefValue() const;
  2418. /// If the PHI node is complete which means all of its parent's predecessors
  2419. /// have incoming value in this PHI, return true, otherwise return false.
  2420. bool isComplete() const {
  2421. return llvm::all_of(predecessors(getParent()),
  2422. [this](const BasicBlock *Pred) {
  2423. return getBasicBlockIndex(Pred) >= 0;
  2424. });
  2425. }
  2426. /// Methods for support type inquiry through isa, cast, and dyn_cast:
  2427. static bool classof(const Instruction *I) {
  2428. return I->getOpcode() == Instruction::PHI;
  2429. }
  2430. static bool classof(const Value *V) {
  2431. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  2432. }
  2433. private:
  2434. void growOperands();
  2435. };
  2436. template <>
  2437. struct OperandTraits<PHINode> : public HungoffOperandTraits<2> {
  2438. };
  2439. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(PHINode, Value)
  2440. //===----------------------------------------------------------------------===//
  2441. // LandingPadInst Class
  2442. //===----------------------------------------------------------------------===//
  2443. //===---------------------------------------------------------------------------
  2444. /// The landingpad instruction holds all of the information
  2445. /// necessary to generate correct exception handling. The landingpad instruction
  2446. /// cannot be moved from the top of a landing pad block, which itself is
  2447. /// accessible only from the 'unwind' edge of an invoke. This uses the
  2448. /// SubclassData field in Value to store whether or not the landingpad is a
  2449. /// cleanup.
  2450. ///
  2451. class LandingPadInst : public Instruction {
  2452. using CleanupField = BoolBitfieldElementT<0>;
  2453. /// The number of operands actually allocated. NumOperands is
  2454. /// the number actually in use.
  2455. unsigned ReservedSpace;
  2456. LandingPadInst(const LandingPadInst &LP);
  2457. public:
  2458. enum ClauseType { Catch, Filter };
  2459. private:
  2460. explicit LandingPadInst(Type *RetTy, unsigned NumReservedValues,
  2461. const Twine &NameStr, Instruction *InsertBefore);
  2462. explicit LandingPadInst(Type *RetTy, unsigned NumReservedValues,
  2463. const Twine &NameStr, BasicBlock *InsertAtEnd);
  2464. // Allocate space for exactly zero operands.
  2465. void *operator new(size_t S) { return User::operator new(S); }
  2466. void growOperands(unsigned Size);
  2467. void init(unsigned NumReservedValues, const Twine &NameStr);
  2468. protected:
  2469. // Note: Instruction needs to be a friend here to call cloneImpl.
  2470. friend class Instruction;
  2471. LandingPadInst *cloneImpl() const;
  2472. public:
  2473. void operator delete(void *Ptr) { User::operator delete(Ptr); }
  2474. /// Constructors - NumReservedClauses is a hint for the number of incoming
  2475. /// clauses that this landingpad will have (use 0 if you really have no idea).
  2476. static LandingPadInst *Create(Type *RetTy, unsigned NumReservedClauses,
  2477. const Twine &NameStr = "",
  2478. Instruction *InsertBefore = nullptr);
  2479. static LandingPadInst *Create(Type *RetTy, unsigned NumReservedClauses,
  2480. const Twine &NameStr, BasicBlock *InsertAtEnd);
  2481. /// Provide fast operand accessors
  2482. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  2483. /// Return 'true' if this landingpad instruction is a
  2484. /// cleanup. I.e., it should be run when unwinding even if its landing pad
  2485. /// doesn't catch the exception.
  2486. bool isCleanup() const { return getSubclassData<CleanupField>(); }
  2487. /// Indicate that this landingpad instruction is a cleanup.
  2488. void setCleanup(bool V) { setSubclassData<CleanupField>(V); }
  2489. /// Add a catch or filter clause to the landing pad.
  2490. void addClause(Constant *ClauseVal);
  2491. /// Get the value of the clause at index Idx. Use isCatch/isFilter to
  2492. /// determine what type of clause this is.
  2493. Constant *getClause(unsigned Idx) const {
  2494. return cast<Constant>(getOperandList()[Idx]);
  2495. }
  2496. /// Return 'true' if the clause and index Idx is a catch clause.
  2497. bool isCatch(unsigned Idx) const {
  2498. return !isa<ArrayType>(getOperandList()[Idx]->getType());
  2499. }
  2500. /// Return 'true' if the clause and index Idx is a filter clause.
  2501. bool isFilter(unsigned Idx) const {
  2502. return isa<ArrayType>(getOperandList()[Idx]->getType());
  2503. }
  2504. /// Get the number of clauses for this landing pad.
  2505. unsigned getNumClauses() const { return getNumOperands(); }
  2506. /// Grow the size of the operand list to accommodate the new
  2507. /// number of clauses.
  2508. void reserveClauses(unsigned Size) { growOperands(Size); }
  2509. // Methods for support type inquiry through isa, cast, and dyn_cast:
  2510. static bool classof(const Instruction *I) {
  2511. return I->getOpcode() == Instruction::LandingPad;
  2512. }
  2513. static bool classof(const Value *V) {
  2514. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  2515. }
  2516. };
  2517. template <>
  2518. struct OperandTraits<LandingPadInst> : public HungoffOperandTraits<1> {
  2519. };
  2520. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(LandingPadInst, Value)
  2521. //===----------------------------------------------------------------------===//
  2522. // ReturnInst Class
  2523. //===----------------------------------------------------------------------===//
  2524. //===---------------------------------------------------------------------------
  2525. /// Return a value (possibly void), from a function. Execution
  2526. /// does not continue in this function any longer.
  2527. ///
  2528. class ReturnInst : public Instruction {
  2529. ReturnInst(const ReturnInst &RI);
  2530. private:
  2531. // ReturnInst constructors:
  2532. // ReturnInst() - 'ret void' instruction
  2533. // ReturnInst( null) - 'ret void' instruction
  2534. // ReturnInst(Value* X) - 'ret X' instruction
  2535. // ReturnInst( null, Inst *I) - 'ret void' instruction, insert before I
  2536. // ReturnInst(Value* X, Inst *I) - 'ret X' instruction, insert before I
  2537. // ReturnInst( null, BB *B) - 'ret void' instruction, insert @ end of B
  2538. // ReturnInst(Value* X, BB *B) - 'ret X' instruction, insert @ end of B
  2539. //
  2540. // NOTE: If the Value* passed is of type void then the constructor behaves as
  2541. // if it was passed NULL.
  2542. explicit ReturnInst(LLVMContext &C, Value *retVal = nullptr,
  2543. Instruction *InsertBefore = nullptr);
  2544. ReturnInst(LLVMContext &C, Value *retVal, BasicBlock *InsertAtEnd);
  2545. explicit ReturnInst(LLVMContext &C, BasicBlock *InsertAtEnd);
  2546. protected:
  2547. // Note: Instruction needs to be a friend here to call cloneImpl.
  2548. friend class Instruction;
  2549. ReturnInst *cloneImpl() const;
  2550. public:
  2551. static ReturnInst* Create(LLVMContext &C, Value *retVal = nullptr,
  2552. Instruction *InsertBefore = nullptr) {
  2553. return new(!!retVal) ReturnInst(C, retVal, InsertBefore);
  2554. }
  2555. static ReturnInst* Create(LLVMContext &C, Value *retVal,
  2556. BasicBlock *InsertAtEnd) {
  2557. return new(!!retVal) ReturnInst(C, retVal, InsertAtEnd);
  2558. }
  2559. static ReturnInst* Create(LLVMContext &C, BasicBlock *InsertAtEnd) {
  2560. return new(0) ReturnInst(C, InsertAtEnd);
  2561. }
  2562. /// Provide fast operand accessors
  2563. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  2564. /// Convenience accessor. Returns null if there is no return value.
  2565. Value *getReturnValue() const {
  2566. return getNumOperands() != 0 ? getOperand(0) : nullptr;
  2567. }
  2568. unsigned getNumSuccessors() const { return 0; }
  2569. // Methods for support type inquiry through isa, cast, and dyn_cast:
  2570. static bool classof(const Instruction *I) {
  2571. return (I->getOpcode() == Instruction::Ret);
  2572. }
  2573. static bool classof(const Value *V) {
  2574. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  2575. }
  2576. private:
  2577. BasicBlock *getSuccessor(unsigned idx) const {
  2578. llvm_unreachable("ReturnInst has no successors!");
  2579. }
  2580. void setSuccessor(unsigned idx, BasicBlock *B) {
  2581. llvm_unreachable("ReturnInst has no successors!");
  2582. }
  2583. };
  2584. template <>
  2585. struct OperandTraits<ReturnInst> : public VariadicOperandTraits<ReturnInst> {
  2586. };
  2587. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(ReturnInst, Value)
  2588. //===----------------------------------------------------------------------===//
  2589. // BranchInst Class
  2590. //===----------------------------------------------------------------------===//
  2591. //===---------------------------------------------------------------------------
  2592. /// Conditional or Unconditional Branch instruction.
  2593. ///
  2594. class BranchInst : public Instruction {
  2595. /// Ops list - Branches are strange. The operands are ordered:
  2596. /// [Cond, FalseDest,] TrueDest. This makes some accessors faster because
  2597. /// they don't have to check for cond/uncond branchness. These are mostly
  2598. /// accessed relative from op_end().
  2599. BranchInst(const BranchInst &BI);
  2600. // BranchInst constructors (where {B, T, F} are blocks, and C is a condition):
  2601. // BranchInst(BB *B) - 'br B'
  2602. // BranchInst(BB* T, BB *F, Value *C) - 'br C, T, F'
  2603. // BranchInst(BB* B, Inst *I) - 'br B' insert before I
  2604. // BranchInst(BB* T, BB *F, Value *C, Inst *I) - 'br C, T, F', insert before I
  2605. // BranchInst(BB* B, BB *I) - 'br B' insert at end
  2606. // BranchInst(BB* T, BB *F, Value *C, BB *I) - 'br C, T, F', insert at end
  2607. explicit BranchInst(BasicBlock *IfTrue, Instruction *InsertBefore = nullptr);
  2608. BranchInst(BasicBlock *IfTrue, BasicBlock *IfFalse, Value *Cond,
  2609. Instruction *InsertBefore = nullptr);
  2610. BranchInst(BasicBlock *IfTrue, BasicBlock *InsertAtEnd);
  2611. BranchInst(BasicBlock *IfTrue, BasicBlock *IfFalse, Value *Cond,
  2612. BasicBlock *InsertAtEnd);
  2613. void AssertOK();
  2614. protected:
  2615. // Note: Instruction needs to be a friend here to call cloneImpl.
  2616. friend class Instruction;
  2617. BranchInst *cloneImpl() const;
  2618. public:
  2619. /// Iterator type that casts an operand to a basic block.
  2620. ///
  2621. /// This only makes sense because the successors are stored as adjacent
  2622. /// operands for branch instructions.
  2623. struct succ_op_iterator
  2624. : iterator_adaptor_base<succ_op_iterator, value_op_iterator,
  2625. std::random_access_iterator_tag, BasicBlock *,
  2626. ptrdiff_t, BasicBlock *, BasicBlock *> {
  2627. explicit succ_op_iterator(value_op_iterator I) : iterator_adaptor_base(I) {}
  2628. BasicBlock *operator*() const { return cast<BasicBlock>(*I); }
  2629. BasicBlock *operator->() const { return operator*(); }
  2630. };
  2631. /// The const version of `succ_op_iterator`.
  2632. struct const_succ_op_iterator
  2633. : iterator_adaptor_base<const_succ_op_iterator, const_value_op_iterator,
  2634. std::random_access_iterator_tag,
  2635. const BasicBlock *, ptrdiff_t, const BasicBlock *,
  2636. const BasicBlock *> {
  2637. explicit const_succ_op_iterator(const_value_op_iterator I)
  2638. : iterator_adaptor_base(I) {}
  2639. const BasicBlock *operator*() const { return cast<BasicBlock>(*I); }
  2640. const BasicBlock *operator->() const { return operator*(); }
  2641. };
  2642. static BranchInst *Create(BasicBlock *IfTrue,
  2643. Instruction *InsertBefore = nullptr) {
  2644. return new(1) BranchInst(IfTrue, InsertBefore);
  2645. }
  2646. static BranchInst *Create(BasicBlock *IfTrue, BasicBlock *IfFalse,
  2647. Value *Cond, Instruction *InsertBefore = nullptr) {
  2648. return new(3) BranchInst(IfTrue, IfFalse, Cond, InsertBefore);
  2649. }
  2650. static BranchInst *Create(BasicBlock *IfTrue, BasicBlock *InsertAtEnd) {
  2651. return new(1) BranchInst(IfTrue, InsertAtEnd);
  2652. }
  2653. static BranchInst *Create(BasicBlock *IfTrue, BasicBlock *IfFalse,
  2654. Value *Cond, BasicBlock *InsertAtEnd) {
  2655. return new(3) BranchInst(IfTrue, IfFalse, Cond, InsertAtEnd);
  2656. }
  2657. /// Transparently provide more efficient getOperand methods.
  2658. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  2659. bool isUnconditional() const { return getNumOperands() == 1; }
  2660. bool isConditional() const { return getNumOperands() == 3; }
  2661. Value *getCondition() const {
  2662. assert(isConditional() && "Cannot get condition of an uncond branch!");
  2663. return Op<-3>();
  2664. }
  2665. void setCondition(Value *V) {
  2666. assert(isConditional() && "Cannot set condition of unconditional branch!");
  2667. Op<-3>() = V;
  2668. }
  2669. unsigned getNumSuccessors() const { return 1+isConditional(); }
  2670. BasicBlock *getSuccessor(unsigned i) const {
  2671. assert(i < getNumSuccessors() && "Successor # out of range for Branch!");
  2672. return cast_or_null<BasicBlock>((&Op<-1>() - i)->get());
  2673. }
  2674. void setSuccessor(unsigned idx, BasicBlock *NewSucc) {
  2675. assert(idx < getNumSuccessors() && "Successor # out of range for Branch!");
  2676. *(&Op<-1>() - idx) = NewSucc;
  2677. }
  2678. /// Swap the successors of this branch instruction.
  2679. ///
  2680. /// Swaps the successors of the branch instruction. This also swaps any
  2681. /// branch weight metadata associated with the instruction so that it
  2682. /// continues to map correctly to each operand.
  2683. void swapSuccessors();
  2684. iterator_range<succ_op_iterator> successors() {
  2685. return make_range(
  2686. succ_op_iterator(std::next(value_op_begin(), isConditional() ? 1 : 0)),
  2687. succ_op_iterator(value_op_end()));
  2688. }
  2689. iterator_range<const_succ_op_iterator> successors() const {
  2690. return make_range(const_succ_op_iterator(
  2691. std::next(value_op_begin(), isConditional() ? 1 : 0)),
  2692. const_succ_op_iterator(value_op_end()));
  2693. }
  2694. // Methods for support type inquiry through isa, cast, and dyn_cast:
  2695. static bool classof(const Instruction *I) {
  2696. return (I->getOpcode() == Instruction::Br);
  2697. }
  2698. static bool classof(const Value *V) {
  2699. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  2700. }
  2701. };
  2702. template <>
  2703. struct OperandTraits<BranchInst> : public VariadicOperandTraits<BranchInst, 1> {
  2704. };
  2705. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(BranchInst, Value)
  2706. //===----------------------------------------------------------------------===//
  2707. // SwitchInst Class
  2708. //===----------------------------------------------------------------------===//
  2709. //===---------------------------------------------------------------------------
  2710. /// Multiway switch
  2711. ///
  2712. class SwitchInst : public Instruction {
  2713. unsigned ReservedSpace;
  2714. // Operand[0] = Value to switch on
  2715. // Operand[1] = Default basic block destination
  2716. // Operand[2n ] = Value to match
  2717. // Operand[2n+1] = BasicBlock to go to on match
  2718. SwitchInst(const SwitchInst &SI);
  2719. /// Create a new switch instruction, specifying a value to switch on and a
  2720. /// default destination. The number of additional cases can be specified here
  2721. /// to make memory allocation more efficient. This constructor can also
  2722. /// auto-insert before another instruction.
  2723. SwitchInst(Value *Value, BasicBlock *Default, unsigned NumCases,
  2724. Instruction *InsertBefore);
  2725. /// Create a new switch instruction, specifying a value to switch on and a
  2726. /// default destination. The number of additional cases can be specified here
  2727. /// to make memory allocation more efficient. This constructor also
  2728. /// auto-inserts at the end of the specified BasicBlock.
  2729. SwitchInst(Value *Value, BasicBlock *Default, unsigned NumCases,
  2730. BasicBlock *InsertAtEnd);
  2731. // allocate space for exactly zero operands
  2732. void *operator new(size_t S) { return User::operator new(S); }
  2733. void init(Value *Value, BasicBlock *Default, unsigned NumReserved);
  2734. void growOperands();
  2735. protected:
  2736. // Note: Instruction needs to be a friend here to call cloneImpl.
  2737. friend class Instruction;
  2738. SwitchInst *cloneImpl() const;
  2739. public:
  2740. void operator delete(void *Ptr) { User::operator delete(Ptr); }
  2741. // -2
  2742. static const unsigned DefaultPseudoIndex = static_cast<unsigned>(~0L-1);
  2743. template <typename CaseHandleT> class CaseIteratorImpl;
  2744. /// A handle to a particular switch case. It exposes a convenient interface
  2745. /// to both the case value and the successor block.
  2746. ///
  2747. /// We define this as a template and instantiate it to form both a const and
  2748. /// non-const handle.
  2749. template <typename SwitchInstT, typename ConstantIntT, typename BasicBlockT>
  2750. class CaseHandleImpl {
  2751. // Directly befriend both const and non-const iterators.
  2752. friend class SwitchInst::CaseIteratorImpl<
  2753. CaseHandleImpl<SwitchInstT, ConstantIntT, BasicBlockT>>;
  2754. protected:
  2755. // Expose the switch type we're parameterized with to the iterator.
  2756. using SwitchInstType = SwitchInstT;
  2757. SwitchInstT *SI;
  2758. ptrdiff_t Index;
  2759. CaseHandleImpl() = default;
  2760. CaseHandleImpl(SwitchInstT *SI, ptrdiff_t Index) : SI(SI), Index(Index) {}
  2761. public:
  2762. /// Resolves case value for current case.
  2763. ConstantIntT *getCaseValue() const {
  2764. assert((unsigned)Index < SI->getNumCases() &&
  2765. "Index out the number of cases.");
  2766. return reinterpret_cast<ConstantIntT *>(SI->getOperand(2 + Index * 2));
  2767. }
  2768. /// Resolves successor for current case.
  2769. BasicBlockT *getCaseSuccessor() const {
  2770. assert(((unsigned)Index < SI->getNumCases() ||
  2771. (unsigned)Index == DefaultPseudoIndex) &&
  2772. "Index out the number of cases.");
  2773. return SI->getSuccessor(getSuccessorIndex());
  2774. }
  2775. /// Returns number of current case.
  2776. unsigned getCaseIndex() const { return Index; }
  2777. /// Returns successor index for current case successor.
  2778. unsigned getSuccessorIndex() const {
  2779. assert(((unsigned)Index == DefaultPseudoIndex ||
  2780. (unsigned)Index < SI->getNumCases()) &&
  2781. "Index out the number of cases.");
  2782. return (unsigned)Index != DefaultPseudoIndex ? Index + 1 : 0;
  2783. }
  2784. bool operator==(const CaseHandleImpl &RHS) const {
  2785. assert(SI == RHS.SI && "Incompatible operators.");
  2786. return Index == RHS.Index;
  2787. }
  2788. };
  2789. using ConstCaseHandle =
  2790. CaseHandleImpl<const SwitchInst, const ConstantInt, const BasicBlock>;
  2791. class CaseHandle
  2792. : public CaseHandleImpl<SwitchInst, ConstantInt, BasicBlock> {
  2793. friend class SwitchInst::CaseIteratorImpl<CaseHandle>;
  2794. public:
  2795. CaseHandle(SwitchInst *SI, ptrdiff_t Index) : CaseHandleImpl(SI, Index) {}
  2796. /// Sets the new value for current case.
  2797. void setValue(ConstantInt *V) const {
  2798. assert((unsigned)Index < SI->getNumCases() &&
  2799. "Index out the number of cases.");
  2800. SI->setOperand(2 + Index*2, reinterpret_cast<Value*>(V));
  2801. }
  2802. /// Sets the new successor for current case.
  2803. void setSuccessor(BasicBlock *S) const {
  2804. SI->setSuccessor(getSuccessorIndex(), S);
  2805. }
  2806. };
  2807. template <typename CaseHandleT>
  2808. class CaseIteratorImpl
  2809. : public iterator_facade_base<CaseIteratorImpl<CaseHandleT>,
  2810. std::random_access_iterator_tag,
  2811. const CaseHandleT> {
  2812. using SwitchInstT = typename CaseHandleT::SwitchInstType;
  2813. CaseHandleT Case;
  2814. public:
  2815. /// Default constructed iterator is in an invalid state until assigned to
  2816. /// a case for a particular switch.
  2817. CaseIteratorImpl() = default;
  2818. /// Initializes case iterator for given SwitchInst and for given
  2819. /// case number.
  2820. CaseIteratorImpl(SwitchInstT *SI, unsigned CaseNum) : Case(SI, CaseNum) {}
  2821. /// Initializes case iterator for given SwitchInst and for given
  2822. /// successor index.
  2823. static CaseIteratorImpl fromSuccessorIndex(SwitchInstT *SI,
  2824. unsigned SuccessorIndex) {
  2825. assert(SuccessorIndex < SI->getNumSuccessors() &&
  2826. "Successor index # out of range!");
  2827. return SuccessorIndex != 0 ? CaseIteratorImpl(SI, SuccessorIndex - 1)
  2828. : CaseIteratorImpl(SI, DefaultPseudoIndex);
  2829. }
  2830. /// Support converting to the const variant. This will be a no-op for const
  2831. /// variant.
  2832. operator CaseIteratorImpl<ConstCaseHandle>() const {
  2833. return CaseIteratorImpl<ConstCaseHandle>(Case.SI, Case.Index);
  2834. }
  2835. CaseIteratorImpl &operator+=(ptrdiff_t N) {
  2836. // Check index correctness after addition.
  2837. // Note: Index == getNumCases() means end().
  2838. assert(Case.Index + N >= 0 &&
  2839. (unsigned)(Case.Index + N) <= Case.SI->getNumCases() &&
  2840. "Case.Index out the number of cases.");
  2841. Case.Index += N;
  2842. return *this;
  2843. }
  2844. CaseIteratorImpl &operator-=(ptrdiff_t N) {
  2845. // Check index correctness after subtraction.
  2846. // Note: Case.Index == getNumCases() means end().
  2847. assert(Case.Index - N >= 0 &&
  2848. (unsigned)(Case.Index - N) <= Case.SI->getNumCases() &&
  2849. "Case.Index out the number of cases.");
  2850. Case.Index -= N;
  2851. return *this;
  2852. }
  2853. ptrdiff_t operator-(const CaseIteratorImpl &RHS) const {
  2854. assert(Case.SI == RHS.Case.SI && "Incompatible operators.");
  2855. return Case.Index - RHS.Case.Index;
  2856. }
  2857. bool operator==(const CaseIteratorImpl &RHS) const {
  2858. return Case == RHS.Case;
  2859. }
  2860. bool operator<(const CaseIteratorImpl &RHS) const {
  2861. assert(Case.SI == RHS.Case.SI && "Incompatible operators.");
  2862. return Case.Index < RHS.Case.Index;
  2863. }
  2864. const CaseHandleT &operator*() const { return Case; }
  2865. };
  2866. using CaseIt = CaseIteratorImpl<CaseHandle>;
  2867. using ConstCaseIt = CaseIteratorImpl<ConstCaseHandle>;
  2868. static SwitchInst *Create(Value *Value, BasicBlock *Default,
  2869. unsigned NumCases,
  2870. Instruction *InsertBefore = nullptr) {
  2871. return new SwitchInst(Value, Default, NumCases, InsertBefore);
  2872. }
  2873. static SwitchInst *Create(Value *Value, BasicBlock *Default,
  2874. unsigned NumCases, BasicBlock *InsertAtEnd) {
  2875. return new SwitchInst(Value, Default, NumCases, InsertAtEnd);
  2876. }
  2877. /// Provide fast operand accessors
  2878. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  2879. // Accessor Methods for Switch stmt
  2880. Value *getCondition() const { return getOperand(0); }
  2881. void setCondition(Value *V) { setOperand(0, V); }
  2882. BasicBlock *getDefaultDest() const {
  2883. return cast<BasicBlock>(getOperand(1));
  2884. }
  2885. void setDefaultDest(BasicBlock *DefaultCase) {
  2886. setOperand(1, reinterpret_cast<Value*>(DefaultCase));
  2887. }
  2888. /// Return the number of 'cases' in this switch instruction, excluding the
  2889. /// default case.
  2890. unsigned getNumCases() const {
  2891. return getNumOperands()/2 - 1;
  2892. }
  2893. /// Returns a read/write iterator that points to the first case in the
  2894. /// SwitchInst.
  2895. CaseIt case_begin() {
  2896. return CaseIt(this, 0);
  2897. }
  2898. /// Returns a read-only iterator that points to the first case in the
  2899. /// SwitchInst.
  2900. ConstCaseIt case_begin() const {
  2901. return ConstCaseIt(this, 0);
  2902. }
  2903. /// Returns a read/write iterator that points one past the last in the
  2904. /// SwitchInst.
  2905. CaseIt case_end() {
  2906. return CaseIt(this, getNumCases());
  2907. }
  2908. /// Returns a read-only iterator that points one past the last in the
  2909. /// SwitchInst.
  2910. ConstCaseIt case_end() const {
  2911. return ConstCaseIt(this, getNumCases());
  2912. }
  2913. /// Iteration adapter for range-for loops.
  2914. iterator_range<CaseIt> cases() {
  2915. return make_range(case_begin(), case_end());
  2916. }
  2917. /// Constant iteration adapter for range-for loops.
  2918. iterator_range<ConstCaseIt> cases() const {
  2919. return make_range(case_begin(), case_end());
  2920. }
  2921. /// Returns an iterator that points to the default case.
  2922. /// Note: this iterator allows to resolve successor only. Attempt
  2923. /// to resolve case value causes an assertion.
  2924. /// Also note, that increment and decrement also causes an assertion and
  2925. /// makes iterator invalid.
  2926. CaseIt case_default() {
  2927. return CaseIt(this, DefaultPseudoIndex);
  2928. }
  2929. ConstCaseIt case_default() const {
  2930. return ConstCaseIt(this, DefaultPseudoIndex);
  2931. }
  2932. /// Search all of the case values for the specified constant. If it is
  2933. /// explicitly handled, return the case iterator of it, otherwise return
  2934. /// default case iterator to indicate that it is handled by the default
  2935. /// handler.
  2936. CaseIt findCaseValue(const ConstantInt *C) {
  2937. return CaseIt(
  2938. this,
  2939. const_cast<const SwitchInst *>(this)->findCaseValue(C)->getCaseIndex());
  2940. }
  2941. ConstCaseIt findCaseValue(const ConstantInt *C) const {
  2942. ConstCaseIt I = llvm::find_if(cases(), [C](const ConstCaseHandle &Case) {
  2943. return Case.getCaseValue() == C;
  2944. });
  2945. if (I != case_end())
  2946. return I;
  2947. return case_default();
  2948. }
  2949. /// Finds the unique case value for a given successor. Returns null if the
  2950. /// successor is not found, not unique, or is the default case.
  2951. ConstantInt *findCaseDest(BasicBlock *BB) {
  2952. if (BB == getDefaultDest())
  2953. return nullptr;
  2954. ConstantInt *CI = nullptr;
  2955. for (auto Case : cases()) {
  2956. if (Case.getCaseSuccessor() != BB)
  2957. continue;
  2958. if (CI)
  2959. return nullptr; // Multiple cases lead to BB.
  2960. CI = Case.getCaseValue();
  2961. }
  2962. return CI;
  2963. }
  2964. /// Add an entry to the switch instruction.
  2965. /// Note:
  2966. /// This action invalidates case_end(). Old case_end() iterator will
  2967. /// point to the added case.
  2968. void addCase(ConstantInt *OnVal, BasicBlock *Dest);
  2969. /// This method removes the specified case and its successor from the switch
  2970. /// instruction. Note that this operation may reorder the remaining cases at
  2971. /// index idx and above.
  2972. /// Note:
  2973. /// This action invalidates iterators for all cases following the one removed,
  2974. /// including the case_end() iterator. It returns an iterator for the next
  2975. /// case.
  2976. CaseIt removeCase(CaseIt I);
  2977. unsigned getNumSuccessors() const { return getNumOperands()/2; }
  2978. BasicBlock *getSuccessor(unsigned idx) const {
  2979. assert(idx < getNumSuccessors() &&"Successor idx out of range for switch!");
  2980. return cast<BasicBlock>(getOperand(idx*2+1));
  2981. }
  2982. void setSuccessor(unsigned idx, BasicBlock *NewSucc) {
  2983. assert(idx < getNumSuccessors() && "Successor # out of range for switch!");
  2984. setOperand(idx * 2 + 1, NewSucc);
  2985. }
  2986. // Methods for support type inquiry through isa, cast, and dyn_cast:
  2987. static bool classof(const Instruction *I) {
  2988. return I->getOpcode() == Instruction::Switch;
  2989. }
  2990. static bool classof(const Value *V) {
  2991. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  2992. }
  2993. };
  2994. /// A wrapper class to simplify modification of SwitchInst cases along with
  2995. /// their prof branch_weights metadata.
  2996. class SwitchInstProfUpdateWrapper {
  2997. SwitchInst &SI;
  2998. Optional<SmallVector<uint32_t, 8> > Weights = None;
  2999. bool Changed = false;
  3000. protected:
  3001. static MDNode *getProfBranchWeightsMD(const SwitchInst &SI);
  3002. MDNode *buildProfBranchWeightsMD();
  3003. void init();
  3004. public:
  3005. using CaseWeightOpt = Optional<uint32_t>;
  3006. SwitchInst *operator->() { return &SI; }
  3007. SwitchInst &operator*() { return SI; }
  3008. operator SwitchInst *() { return &SI; }
  3009. SwitchInstProfUpdateWrapper(SwitchInst &SI) : SI(SI) { init(); }
  3010. ~SwitchInstProfUpdateWrapper() {
  3011. if (Changed)
  3012. SI.setMetadata(LLVMContext::MD_prof, buildProfBranchWeightsMD());
  3013. }
  3014. /// Delegate the call to the underlying SwitchInst::removeCase() and remove
  3015. /// correspondent branch weight.
  3016. SwitchInst::CaseIt removeCase(SwitchInst::CaseIt I);
  3017. /// Delegate the call to the underlying SwitchInst::addCase() and set the
  3018. /// specified branch weight for the added case.
  3019. void addCase(ConstantInt *OnVal, BasicBlock *Dest, CaseWeightOpt W);
  3020. /// Delegate the call to the underlying SwitchInst::eraseFromParent() and mark
  3021. /// this object to not touch the underlying SwitchInst in destructor.
  3022. SymbolTableList<Instruction>::iterator eraseFromParent();
  3023. void setSuccessorWeight(unsigned idx, CaseWeightOpt W);
  3024. CaseWeightOpt getSuccessorWeight(unsigned idx);
  3025. static CaseWeightOpt getSuccessorWeight(const SwitchInst &SI, unsigned idx);
  3026. };
  3027. template <>
  3028. struct OperandTraits<SwitchInst> : public HungoffOperandTraits<2> {
  3029. };
  3030. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(SwitchInst, Value)
  3031. //===----------------------------------------------------------------------===//
  3032. // IndirectBrInst Class
  3033. //===----------------------------------------------------------------------===//
  3034. //===---------------------------------------------------------------------------
  3035. /// Indirect Branch Instruction.
  3036. ///
  3037. class IndirectBrInst : public Instruction {
  3038. unsigned ReservedSpace;
  3039. // Operand[0] = Address to jump to
  3040. // Operand[n+1] = n-th destination
  3041. IndirectBrInst(const IndirectBrInst &IBI);
  3042. /// Create a new indirectbr instruction, specifying an
  3043. /// Address to jump to. The number of expected destinations can be specified
  3044. /// here to make memory allocation more efficient. This constructor can also
  3045. /// autoinsert before another instruction.
  3046. IndirectBrInst(Value *Address, unsigned NumDests, Instruction *InsertBefore);
  3047. /// Create a new indirectbr instruction, specifying an
  3048. /// Address to jump to. The number of expected destinations can be specified
  3049. /// here to make memory allocation more efficient. This constructor also
  3050. /// autoinserts at the end of the specified BasicBlock.
  3051. IndirectBrInst(Value *Address, unsigned NumDests, BasicBlock *InsertAtEnd);
  3052. // allocate space for exactly zero operands
  3053. void *operator new(size_t S) { return User::operator new(S); }
  3054. void init(Value *Address, unsigned NumDests);
  3055. void growOperands();
  3056. protected:
  3057. // Note: Instruction needs to be a friend here to call cloneImpl.
  3058. friend class Instruction;
  3059. IndirectBrInst *cloneImpl() const;
  3060. public:
  3061. void operator delete(void *Ptr) { User::operator delete(Ptr); }
  3062. /// Iterator type that casts an operand to a basic block.
  3063. ///
  3064. /// This only makes sense because the successors are stored as adjacent
  3065. /// operands for indirectbr instructions.
  3066. struct succ_op_iterator
  3067. : iterator_adaptor_base<succ_op_iterator, value_op_iterator,
  3068. std::random_access_iterator_tag, BasicBlock *,
  3069. ptrdiff_t, BasicBlock *, BasicBlock *> {
  3070. explicit succ_op_iterator(value_op_iterator I) : iterator_adaptor_base(I) {}
  3071. BasicBlock *operator*() const { return cast<BasicBlock>(*I); }
  3072. BasicBlock *operator->() const { return operator*(); }
  3073. };
  3074. /// The const version of `succ_op_iterator`.
  3075. struct const_succ_op_iterator
  3076. : iterator_adaptor_base<const_succ_op_iterator, const_value_op_iterator,
  3077. std::random_access_iterator_tag,
  3078. const BasicBlock *, ptrdiff_t, const BasicBlock *,
  3079. const BasicBlock *> {
  3080. explicit const_succ_op_iterator(const_value_op_iterator I)
  3081. : iterator_adaptor_base(I) {}
  3082. const BasicBlock *operator*() const { return cast<BasicBlock>(*I); }
  3083. const BasicBlock *operator->() const { return operator*(); }
  3084. };
  3085. static IndirectBrInst *Create(Value *Address, unsigned NumDests,
  3086. Instruction *InsertBefore = nullptr) {
  3087. return new IndirectBrInst(Address, NumDests, InsertBefore);
  3088. }
  3089. static IndirectBrInst *Create(Value *Address, unsigned NumDests,
  3090. BasicBlock *InsertAtEnd) {
  3091. return new IndirectBrInst(Address, NumDests, InsertAtEnd);
  3092. }
  3093. /// Provide fast operand accessors.
  3094. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  3095. // Accessor Methods for IndirectBrInst instruction.
  3096. Value *getAddress() { return getOperand(0); }
  3097. const Value *getAddress() const { return getOperand(0); }
  3098. void setAddress(Value *V) { setOperand(0, V); }
  3099. /// return the number of possible destinations in this
  3100. /// indirectbr instruction.
  3101. unsigned getNumDestinations() const { return getNumOperands()-1; }
  3102. /// Return the specified destination.
  3103. BasicBlock *getDestination(unsigned i) { return getSuccessor(i); }
  3104. const BasicBlock *getDestination(unsigned i) const { return getSuccessor(i); }
  3105. /// Add a destination.
  3106. ///
  3107. void addDestination(BasicBlock *Dest);
  3108. /// This method removes the specified successor from the
  3109. /// indirectbr instruction.
  3110. void removeDestination(unsigned i);
  3111. unsigned getNumSuccessors() const { return getNumOperands()-1; }
  3112. BasicBlock *getSuccessor(unsigned i) const {
  3113. return cast<BasicBlock>(getOperand(i+1));
  3114. }
  3115. void setSuccessor(unsigned i, BasicBlock *NewSucc) {
  3116. setOperand(i + 1, NewSucc);
  3117. }
  3118. iterator_range<succ_op_iterator> successors() {
  3119. return make_range(succ_op_iterator(std::next(value_op_begin())),
  3120. succ_op_iterator(value_op_end()));
  3121. }
  3122. iterator_range<const_succ_op_iterator> successors() const {
  3123. return make_range(const_succ_op_iterator(std::next(value_op_begin())),
  3124. const_succ_op_iterator(value_op_end()));
  3125. }
  3126. // Methods for support type inquiry through isa, cast, and dyn_cast:
  3127. static bool classof(const Instruction *I) {
  3128. return I->getOpcode() == Instruction::IndirectBr;
  3129. }
  3130. static bool classof(const Value *V) {
  3131. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  3132. }
  3133. };
  3134. template <>
  3135. struct OperandTraits<IndirectBrInst> : public HungoffOperandTraits<1> {
  3136. };
  3137. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(IndirectBrInst, Value)
  3138. //===----------------------------------------------------------------------===//
  3139. // InvokeInst Class
  3140. //===----------------------------------------------------------------------===//
  3141. /// Invoke instruction. The SubclassData field is used to hold the
  3142. /// calling convention of the call.
  3143. ///
  3144. class InvokeInst : public CallBase {
  3145. /// The number of operands for this call beyond the called function,
  3146. /// arguments, and operand bundles.
  3147. static constexpr int NumExtraOperands = 2;
  3148. /// The index from the end of the operand array to the normal destination.
  3149. static constexpr int NormalDestOpEndIdx = -3;
  3150. /// The index from the end of the operand array to the unwind destination.
  3151. static constexpr int UnwindDestOpEndIdx = -2;
  3152. InvokeInst(const InvokeInst &BI);
  3153. /// Construct an InvokeInst given a range of arguments.
  3154. ///
  3155. /// Construct an InvokeInst from a range of arguments
  3156. inline InvokeInst(FunctionType *Ty, Value *Func, BasicBlock *IfNormal,
  3157. BasicBlock *IfException, ArrayRef<Value *> Args,
  3158. ArrayRef<OperandBundleDef> Bundles, int NumOperands,
  3159. const Twine &NameStr, Instruction *InsertBefore);
  3160. inline InvokeInst(FunctionType *Ty, Value *Func, BasicBlock *IfNormal,
  3161. BasicBlock *IfException, ArrayRef<Value *> Args,
  3162. ArrayRef<OperandBundleDef> Bundles, int NumOperands,
  3163. const Twine &NameStr, BasicBlock *InsertAtEnd);
  3164. void init(FunctionType *Ty, Value *Func, BasicBlock *IfNormal,
  3165. BasicBlock *IfException, ArrayRef<Value *> Args,
  3166. ArrayRef<OperandBundleDef> Bundles, const Twine &NameStr);
  3167. /// Compute the number of operands to allocate.
  3168. static int ComputeNumOperands(int NumArgs, int NumBundleInputs = 0) {
  3169. // We need one operand for the called function, plus our extra operands and
  3170. // the input operand counts provided.
  3171. return 1 + NumExtraOperands + NumArgs + NumBundleInputs;
  3172. }
  3173. protected:
  3174. // Note: Instruction needs to be a friend here to call cloneImpl.
  3175. friend class Instruction;
  3176. InvokeInst *cloneImpl() const;
  3177. public:
  3178. static InvokeInst *Create(FunctionType *Ty, Value *Func, BasicBlock *IfNormal,
  3179. BasicBlock *IfException, ArrayRef<Value *> Args,
  3180. const Twine &NameStr,
  3181. Instruction *InsertBefore = nullptr) {
  3182. int NumOperands = ComputeNumOperands(Args.size());
  3183. return new (NumOperands)
  3184. InvokeInst(Ty, Func, IfNormal, IfException, Args, None, NumOperands,
  3185. NameStr, InsertBefore);
  3186. }
  3187. static InvokeInst *Create(FunctionType *Ty, Value *Func, BasicBlock *IfNormal,
  3188. BasicBlock *IfException, ArrayRef<Value *> Args,
  3189. ArrayRef<OperandBundleDef> Bundles = None,
  3190. const Twine &NameStr = "",
  3191. Instruction *InsertBefore = nullptr) {
  3192. int NumOperands =
  3193. ComputeNumOperands(Args.size(), CountBundleInputs(Bundles));
  3194. unsigned DescriptorBytes = Bundles.size() * sizeof(BundleOpInfo);
  3195. return new (NumOperands, DescriptorBytes)
  3196. InvokeInst(Ty, Func, IfNormal, IfException, Args, Bundles, NumOperands,
  3197. NameStr, InsertBefore);
  3198. }
  3199. static InvokeInst *Create(FunctionType *Ty, Value *Func, BasicBlock *IfNormal,
  3200. BasicBlock *IfException, ArrayRef<Value *> Args,
  3201. const Twine &NameStr, BasicBlock *InsertAtEnd) {
  3202. int NumOperands = ComputeNumOperands(Args.size());
  3203. return new (NumOperands)
  3204. InvokeInst(Ty, Func, IfNormal, IfException, Args, None, NumOperands,
  3205. NameStr, InsertAtEnd);
  3206. }
  3207. static InvokeInst *Create(FunctionType *Ty, Value *Func, BasicBlock *IfNormal,
  3208. BasicBlock *IfException, ArrayRef<Value *> Args,
  3209. ArrayRef<OperandBundleDef> Bundles,
  3210. const Twine &NameStr, BasicBlock *InsertAtEnd) {
  3211. int NumOperands =
  3212. ComputeNumOperands(Args.size(), CountBundleInputs(Bundles));
  3213. unsigned DescriptorBytes = Bundles.size() * sizeof(BundleOpInfo);
  3214. return new (NumOperands, DescriptorBytes)
  3215. InvokeInst(Ty, Func, IfNormal, IfException, Args, Bundles, NumOperands,
  3216. NameStr, InsertAtEnd);
  3217. }
  3218. static InvokeInst *Create(FunctionCallee Func, BasicBlock *IfNormal,
  3219. BasicBlock *IfException, ArrayRef<Value *> Args,
  3220. const Twine &NameStr,
  3221. Instruction *InsertBefore = nullptr) {
  3222. return Create(Func.getFunctionType(), Func.getCallee(), IfNormal,
  3223. IfException, Args, None, NameStr, InsertBefore);
  3224. }
  3225. static InvokeInst *Create(FunctionCallee Func, BasicBlock *IfNormal,
  3226. BasicBlock *IfException, ArrayRef<Value *> Args,
  3227. ArrayRef<OperandBundleDef> Bundles = None,
  3228. const Twine &NameStr = "",
  3229. Instruction *InsertBefore = nullptr) {
  3230. return Create(Func.getFunctionType(), Func.getCallee(), IfNormal,
  3231. IfException, Args, Bundles, NameStr, InsertBefore);
  3232. }
  3233. static InvokeInst *Create(FunctionCallee Func, BasicBlock *IfNormal,
  3234. BasicBlock *IfException, ArrayRef<Value *> Args,
  3235. const Twine &NameStr, BasicBlock *InsertAtEnd) {
  3236. return Create(Func.getFunctionType(), Func.getCallee(), IfNormal,
  3237. IfException, Args, NameStr, InsertAtEnd);
  3238. }
  3239. static InvokeInst *Create(FunctionCallee Func, BasicBlock *IfNormal,
  3240. BasicBlock *IfException, ArrayRef<Value *> Args,
  3241. ArrayRef<OperandBundleDef> Bundles,
  3242. const Twine &NameStr, BasicBlock *InsertAtEnd) {
  3243. return Create(Func.getFunctionType(), Func.getCallee(), IfNormal,
  3244. IfException, Args, Bundles, NameStr, InsertAtEnd);
  3245. }
  3246. /// Create a clone of \p II with a different set of operand bundles and
  3247. /// insert it before \p InsertPt.
  3248. ///
  3249. /// The returned invoke instruction is identical to \p II in every way except
  3250. /// that the operand bundles for the new instruction are set to the operand
  3251. /// bundles in \p Bundles.
  3252. static InvokeInst *Create(InvokeInst *II, ArrayRef<OperandBundleDef> Bundles,
  3253. Instruction *InsertPt = nullptr);
  3254. // get*Dest - Return the destination basic blocks...
  3255. BasicBlock *getNormalDest() const {
  3256. return cast<BasicBlock>(Op<NormalDestOpEndIdx>());
  3257. }
  3258. BasicBlock *getUnwindDest() const {
  3259. return cast<BasicBlock>(Op<UnwindDestOpEndIdx>());
  3260. }
  3261. void setNormalDest(BasicBlock *B) {
  3262. Op<NormalDestOpEndIdx>() = reinterpret_cast<Value *>(B);
  3263. }
  3264. void setUnwindDest(BasicBlock *B) {
  3265. Op<UnwindDestOpEndIdx>() = reinterpret_cast<Value *>(B);
  3266. }
  3267. /// Get the landingpad instruction from the landing pad
  3268. /// block (the unwind destination).
  3269. LandingPadInst *getLandingPadInst() const;
  3270. BasicBlock *getSuccessor(unsigned i) const {
  3271. assert(i < 2 && "Successor # out of range for invoke!");
  3272. return i == 0 ? getNormalDest() : getUnwindDest();
  3273. }
  3274. void setSuccessor(unsigned i, BasicBlock *NewSucc) {
  3275. assert(i < 2 && "Successor # out of range for invoke!");
  3276. if (i == 0)
  3277. setNormalDest(NewSucc);
  3278. else
  3279. setUnwindDest(NewSucc);
  3280. }
  3281. unsigned getNumSuccessors() const { return 2; }
  3282. // Methods for support type inquiry through isa, cast, and dyn_cast:
  3283. static bool classof(const Instruction *I) {
  3284. return (I->getOpcode() == Instruction::Invoke);
  3285. }
  3286. static bool classof(const Value *V) {
  3287. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  3288. }
  3289. private:
  3290. // Shadow Instruction::setInstructionSubclassData with a private forwarding
  3291. // method so that subclasses cannot accidentally use it.
  3292. template <typename Bitfield>
  3293. void setSubclassData(typename Bitfield::Type Value) {
  3294. Instruction::setSubclassData<Bitfield>(Value);
  3295. }
  3296. };
  3297. InvokeInst::InvokeInst(FunctionType *Ty, Value *Func, BasicBlock *IfNormal,
  3298. BasicBlock *IfException, ArrayRef<Value *> Args,
  3299. ArrayRef<OperandBundleDef> Bundles, int NumOperands,
  3300. const Twine &NameStr, Instruction *InsertBefore)
  3301. : CallBase(Ty->getReturnType(), Instruction::Invoke,
  3302. OperandTraits<CallBase>::op_end(this) - NumOperands, NumOperands,
  3303. InsertBefore) {
  3304. init(Ty, Func, IfNormal, IfException, Args, Bundles, NameStr);
  3305. }
  3306. InvokeInst::InvokeInst(FunctionType *Ty, Value *Func, BasicBlock *IfNormal,
  3307. BasicBlock *IfException, ArrayRef<Value *> Args,
  3308. ArrayRef<OperandBundleDef> Bundles, int NumOperands,
  3309. const Twine &NameStr, BasicBlock *InsertAtEnd)
  3310. : CallBase(Ty->getReturnType(), Instruction::Invoke,
  3311. OperandTraits<CallBase>::op_end(this) - NumOperands, NumOperands,
  3312. InsertAtEnd) {
  3313. init(Ty, Func, IfNormal, IfException, Args, Bundles, NameStr);
  3314. }
  3315. //===----------------------------------------------------------------------===//
  3316. // CallBrInst Class
  3317. //===----------------------------------------------------------------------===//
  3318. /// CallBr instruction, tracking function calls that may not return control but
  3319. /// instead transfer it to a third location. The SubclassData field is used to
  3320. /// hold the calling convention of the call.
  3321. ///
  3322. class CallBrInst : public CallBase {
  3323. unsigned NumIndirectDests;
  3324. CallBrInst(const CallBrInst &BI);
  3325. /// Construct a CallBrInst given a range of arguments.
  3326. ///
  3327. /// Construct a CallBrInst from a range of arguments
  3328. inline CallBrInst(FunctionType *Ty, Value *Func, BasicBlock *DefaultDest,
  3329. ArrayRef<BasicBlock *> IndirectDests,
  3330. ArrayRef<Value *> Args,
  3331. ArrayRef<OperandBundleDef> Bundles, int NumOperands,
  3332. const Twine &NameStr, Instruction *InsertBefore);
  3333. inline CallBrInst(FunctionType *Ty, Value *Func, BasicBlock *DefaultDest,
  3334. ArrayRef<BasicBlock *> IndirectDests,
  3335. ArrayRef<Value *> Args,
  3336. ArrayRef<OperandBundleDef> Bundles, int NumOperands,
  3337. const Twine &NameStr, BasicBlock *InsertAtEnd);
  3338. void init(FunctionType *FTy, Value *Func, BasicBlock *DefaultDest,
  3339. ArrayRef<BasicBlock *> IndirectDests, ArrayRef<Value *> Args,
  3340. ArrayRef<OperandBundleDef> Bundles, const Twine &NameStr);
  3341. /// Should the Indirect Destinations change, scan + update the Arg list.
  3342. void updateArgBlockAddresses(unsigned i, BasicBlock *B);
  3343. /// Compute the number of operands to allocate.
  3344. static int ComputeNumOperands(int NumArgs, int NumIndirectDests,
  3345. int NumBundleInputs = 0) {
  3346. // We need one operand for the called function, plus our extra operands and
  3347. // the input operand counts provided.
  3348. return 2 + NumIndirectDests + NumArgs + NumBundleInputs;
  3349. }
  3350. protected:
  3351. // Note: Instruction needs to be a friend here to call cloneImpl.
  3352. friend class Instruction;
  3353. CallBrInst *cloneImpl() const;
  3354. public:
  3355. static CallBrInst *Create(FunctionType *Ty, Value *Func,
  3356. BasicBlock *DefaultDest,
  3357. ArrayRef<BasicBlock *> IndirectDests,
  3358. ArrayRef<Value *> Args, const Twine &NameStr,
  3359. Instruction *InsertBefore = nullptr) {
  3360. int NumOperands = ComputeNumOperands(Args.size(), IndirectDests.size());
  3361. return new (NumOperands)
  3362. CallBrInst(Ty, Func, DefaultDest, IndirectDests, Args, None,
  3363. NumOperands, NameStr, InsertBefore);
  3364. }
  3365. static CallBrInst *Create(FunctionType *Ty, Value *Func,
  3366. BasicBlock *DefaultDest,
  3367. ArrayRef<BasicBlock *> IndirectDests,
  3368. ArrayRef<Value *> Args,
  3369. ArrayRef<OperandBundleDef> Bundles = None,
  3370. const Twine &NameStr = "",
  3371. Instruction *InsertBefore = nullptr) {
  3372. int NumOperands = ComputeNumOperands(Args.size(), IndirectDests.size(),
  3373. CountBundleInputs(Bundles));
  3374. unsigned DescriptorBytes = Bundles.size() * sizeof(BundleOpInfo);
  3375. return new (NumOperands, DescriptorBytes)
  3376. CallBrInst(Ty, Func, DefaultDest, IndirectDests, Args, Bundles,
  3377. NumOperands, NameStr, InsertBefore);
  3378. }
  3379. static CallBrInst *Create(FunctionType *Ty, Value *Func,
  3380. BasicBlock *DefaultDest,
  3381. ArrayRef<BasicBlock *> IndirectDests,
  3382. ArrayRef<Value *> Args, const Twine &NameStr,
  3383. BasicBlock *InsertAtEnd) {
  3384. int NumOperands = ComputeNumOperands(Args.size(), IndirectDests.size());
  3385. return new (NumOperands)
  3386. CallBrInst(Ty, Func, DefaultDest, IndirectDests, Args, None,
  3387. NumOperands, NameStr, InsertAtEnd);
  3388. }
  3389. static CallBrInst *Create(FunctionType *Ty, Value *Func,
  3390. BasicBlock *DefaultDest,
  3391. ArrayRef<BasicBlock *> IndirectDests,
  3392. ArrayRef<Value *> Args,
  3393. ArrayRef<OperandBundleDef> Bundles,
  3394. const Twine &NameStr, BasicBlock *InsertAtEnd) {
  3395. int NumOperands = ComputeNumOperands(Args.size(), IndirectDests.size(),
  3396. CountBundleInputs(Bundles));
  3397. unsigned DescriptorBytes = Bundles.size() * sizeof(BundleOpInfo);
  3398. return new (NumOperands, DescriptorBytes)
  3399. CallBrInst(Ty, Func, DefaultDest, IndirectDests, Args, Bundles,
  3400. NumOperands, NameStr, InsertAtEnd);
  3401. }
  3402. static CallBrInst *Create(FunctionCallee Func, BasicBlock *DefaultDest,
  3403. ArrayRef<BasicBlock *> IndirectDests,
  3404. ArrayRef<Value *> Args, const Twine &NameStr,
  3405. Instruction *InsertBefore = nullptr) {
  3406. return Create(Func.getFunctionType(), Func.getCallee(), DefaultDest,
  3407. IndirectDests, Args, NameStr, InsertBefore);
  3408. }
  3409. static CallBrInst *Create(FunctionCallee Func, BasicBlock *DefaultDest,
  3410. ArrayRef<BasicBlock *> IndirectDests,
  3411. ArrayRef<Value *> Args,
  3412. ArrayRef<OperandBundleDef> Bundles = None,
  3413. const Twine &NameStr = "",
  3414. Instruction *InsertBefore = nullptr) {
  3415. return Create(Func.getFunctionType(), Func.getCallee(), DefaultDest,
  3416. IndirectDests, Args, Bundles, NameStr, InsertBefore);
  3417. }
  3418. static CallBrInst *Create(FunctionCallee Func, BasicBlock *DefaultDest,
  3419. ArrayRef<BasicBlock *> IndirectDests,
  3420. ArrayRef<Value *> Args, const Twine &NameStr,
  3421. BasicBlock *InsertAtEnd) {
  3422. return Create(Func.getFunctionType(), Func.getCallee(), DefaultDest,
  3423. IndirectDests, Args, NameStr, InsertAtEnd);
  3424. }
  3425. static CallBrInst *Create(FunctionCallee Func,
  3426. BasicBlock *DefaultDest,
  3427. ArrayRef<BasicBlock *> IndirectDests,
  3428. ArrayRef<Value *> Args,
  3429. ArrayRef<OperandBundleDef> Bundles,
  3430. const Twine &NameStr, BasicBlock *InsertAtEnd) {
  3431. return Create(Func.getFunctionType(), Func.getCallee(), DefaultDest,
  3432. IndirectDests, Args, Bundles, NameStr, InsertAtEnd);
  3433. }
  3434. /// Create a clone of \p CBI with a different set of operand bundles and
  3435. /// insert it before \p InsertPt.
  3436. ///
  3437. /// The returned callbr instruction is identical to \p CBI in every way
  3438. /// except that the operand bundles for the new instruction are set to the
  3439. /// operand bundles in \p Bundles.
  3440. static CallBrInst *Create(CallBrInst *CBI,
  3441. ArrayRef<OperandBundleDef> Bundles,
  3442. Instruction *InsertPt = nullptr);
  3443. /// Return the number of callbr indirect dest labels.
  3444. ///
  3445. unsigned getNumIndirectDests() const { return NumIndirectDests; }
  3446. /// getIndirectDestLabel - Return the i-th indirect dest label.
  3447. ///
  3448. Value *getIndirectDestLabel(unsigned i) const {
  3449. assert(i < getNumIndirectDests() && "Out of bounds!");
  3450. return getOperand(i + arg_size() + getNumTotalBundleOperands() + 1);
  3451. }
  3452. Value *getIndirectDestLabelUse(unsigned i) const {
  3453. assert(i < getNumIndirectDests() && "Out of bounds!");
  3454. return getOperandUse(i + arg_size() + getNumTotalBundleOperands() + 1);
  3455. }
  3456. // Return the destination basic blocks...
  3457. BasicBlock *getDefaultDest() const {
  3458. return cast<BasicBlock>(*(&Op<-1>() - getNumIndirectDests() - 1));
  3459. }
  3460. BasicBlock *getIndirectDest(unsigned i) const {
  3461. return cast_or_null<BasicBlock>(*(&Op<-1>() - getNumIndirectDests() + i));
  3462. }
  3463. SmallVector<BasicBlock *, 16> getIndirectDests() const {
  3464. SmallVector<BasicBlock *, 16> IndirectDests;
  3465. for (unsigned i = 0, e = getNumIndirectDests(); i < e; ++i)
  3466. IndirectDests.push_back(getIndirectDest(i));
  3467. return IndirectDests;
  3468. }
  3469. void setDefaultDest(BasicBlock *B) {
  3470. *(&Op<-1>() - getNumIndirectDests() - 1) = reinterpret_cast<Value *>(B);
  3471. }
  3472. void setIndirectDest(unsigned i, BasicBlock *B) {
  3473. updateArgBlockAddresses(i, B);
  3474. *(&Op<-1>() - getNumIndirectDests() + i) = reinterpret_cast<Value *>(B);
  3475. }
  3476. BasicBlock *getSuccessor(unsigned i) const {
  3477. assert(i < getNumSuccessors() + 1 &&
  3478. "Successor # out of range for callbr!");
  3479. return i == 0 ? getDefaultDest() : getIndirectDest(i - 1);
  3480. }
  3481. void setSuccessor(unsigned i, BasicBlock *NewSucc) {
  3482. assert(i < getNumIndirectDests() + 1 &&
  3483. "Successor # out of range for callbr!");
  3484. return i == 0 ? setDefaultDest(NewSucc) : setIndirectDest(i - 1, NewSucc);
  3485. }
  3486. unsigned getNumSuccessors() const { return getNumIndirectDests() + 1; }
  3487. // Methods for support type inquiry through isa, cast, and dyn_cast:
  3488. static bool classof(const Instruction *I) {
  3489. return (I->getOpcode() == Instruction::CallBr);
  3490. }
  3491. static bool classof(const Value *V) {
  3492. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  3493. }
  3494. private:
  3495. // Shadow Instruction::setInstructionSubclassData with a private forwarding
  3496. // method so that subclasses cannot accidentally use it.
  3497. template <typename Bitfield>
  3498. void setSubclassData(typename Bitfield::Type Value) {
  3499. Instruction::setSubclassData<Bitfield>(Value);
  3500. }
  3501. };
  3502. CallBrInst::CallBrInst(FunctionType *Ty, Value *Func, BasicBlock *DefaultDest,
  3503. ArrayRef<BasicBlock *> IndirectDests,
  3504. ArrayRef<Value *> Args,
  3505. ArrayRef<OperandBundleDef> Bundles, int NumOperands,
  3506. const Twine &NameStr, Instruction *InsertBefore)
  3507. : CallBase(Ty->getReturnType(), Instruction::CallBr,
  3508. OperandTraits<CallBase>::op_end(this) - NumOperands, NumOperands,
  3509. InsertBefore) {
  3510. init(Ty, Func, DefaultDest, IndirectDests, Args, Bundles, NameStr);
  3511. }
  3512. CallBrInst::CallBrInst(FunctionType *Ty, Value *Func, BasicBlock *DefaultDest,
  3513. ArrayRef<BasicBlock *> IndirectDests,
  3514. ArrayRef<Value *> Args,
  3515. ArrayRef<OperandBundleDef> Bundles, int NumOperands,
  3516. const Twine &NameStr, BasicBlock *InsertAtEnd)
  3517. : CallBase(Ty->getReturnType(), Instruction::CallBr,
  3518. OperandTraits<CallBase>::op_end(this) - NumOperands, NumOperands,
  3519. InsertAtEnd) {
  3520. init(Ty, Func, DefaultDest, IndirectDests, Args, Bundles, NameStr);
  3521. }
  3522. //===----------------------------------------------------------------------===//
  3523. // ResumeInst Class
  3524. //===----------------------------------------------------------------------===//
  3525. //===---------------------------------------------------------------------------
  3526. /// Resume the propagation of an exception.
  3527. ///
  3528. class ResumeInst : public Instruction {
  3529. ResumeInst(const ResumeInst &RI);
  3530. explicit ResumeInst(Value *Exn, Instruction *InsertBefore=nullptr);
  3531. ResumeInst(Value *Exn, BasicBlock *InsertAtEnd);
  3532. protected:
  3533. // Note: Instruction needs to be a friend here to call cloneImpl.
  3534. friend class Instruction;
  3535. ResumeInst *cloneImpl() const;
  3536. public:
  3537. static ResumeInst *Create(Value *Exn, Instruction *InsertBefore = nullptr) {
  3538. return new(1) ResumeInst(Exn, InsertBefore);
  3539. }
  3540. static ResumeInst *Create(Value *Exn, BasicBlock *InsertAtEnd) {
  3541. return new(1) ResumeInst(Exn, InsertAtEnd);
  3542. }
  3543. /// Provide fast operand accessors
  3544. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  3545. /// Convenience accessor.
  3546. Value *getValue() const { return Op<0>(); }
  3547. unsigned getNumSuccessors() const { return 0; }
  3548. // Methods for support type inquiry through isa, cast, and dyn_cast:
  3549. static bool classof(const Instruction *I) {
  3550. return I->getOpcode() == Instruction::Resume;
  3551. }
  3552. static bool classof(const Value *V) {
  3553. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  3554. }
  3555. private:
  3556. BasicBlock *getSuccessor(unsigned idx) const {
  3557. llvm_unreachable("ResumeInst has no successors!");
  3558. }
  3559. void setSuccessor(unsigned idx, BasicBlock *NewSucc) {
  3560. llvm_unreachable("ResumeInst has no successors!");
  3561. }
  3562. };
  3563. template <>
  3564. struct OperandTraits<ResumeInst> :
  3565. public FixedNumOperandTraits<ResumeInst, 1> {
  3566. };
  3567. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(ResumeInst, Value)
  3568. //===----------------------------------------------------------------------===//
  3569. // CatchSwitchInst Class
  3570. //===----------------------------------------------------------------------===//
  3571. class CatchSwitchInst : public Instruction {
  3572. using UnwindDestField = BoolBitfieldElementT<0>;
  3573. /// The number of operands actually allocated. NumOperands is
  3574. /// the number actually in use.
  3575. unsigned ReservedSpace;
  3576. // Operand[0] = Outer scope
  3577. // Operand[1] = Unwind block destination
  3578. // Operand[n] = BasicBlock to go to on match
  3579. CatchSwitchInst(const CatchSwitchInst &CSI);
  3580. /// Create a new switch instruction, specifying a
  3581. /// default destination. The number of additional handlers can be specified
  3582. /// here to make memory allocation more efficient.
  3583. /// This constructor can also autoinsert before another instruction.
  3584. CatchSwitchInst(Value *ParentPad, BasicBlock *UnwindDest,
  3585. unsigned NumHandlers, const Twine &NameStr,
  3586. Instruction *InsertBefore);
  3587. /// Create a new switch instruction, specifying a
  3588. /// default destination. The number of additional handlers can be specified
  3589. /// here to make memory allocation more efficient.
  3590. /// This constructor also autoinserts at the end of the specified BasicBlock.
  3591. CatchSwitchInst(Value *ParentPad, BasicBlock *UnwindDest,
  3592. unsigned NumHandlers, const Twine &NameStr,
  3593. BasicBlock *InsertAtEnd);
  3594. // allocate space for exactly zero operands
  3595. void *operator new(size_t S) { return User::operator new(S); }
  3596. void init(Value *ParentPad, BasicBlock *UnwindDest, unsigned NumReserved);
  3597. void growOperands(unsigned Size);
  3598. protected:
  3599. // Note: Instruction needs to be a friend here to call cloneImpl.
  3600. friend class Instruction;
  3601. CatchSwitchInst *cloneImpl() const;
  3602. public:
  3603. void operator delete(void *Ptr) { return User::operator delete(Ptr); }
  3604. static CatchSwitchInst *Create(Value *ParentPad, BasicBlock *UnwindDest,
  3605. unsigned NumHandlers,
  3606. const Twine &NameStr = "",
  3607. Instruction *InsertBefore = nullptr) {
  3608. return new CatchSwitchInst(ParentPad, UnwindDest, NumHandlers, NameStr,
  3609. InsertBefore);
  3610. }
  3611. static CatchSwitchInst *Create(Value *ParentPad, BasicBlock *UnwindDest,
  3612. unsigned NumHandlers, const Twine &NameStr,
  3613. BasicBlock *InsertAtEnd) {
  3614. return new CatchSwitchInst(ParentPad, UnwindDest, NumHandlers, NameStr,
  3615. InsertAtEnd);
  3616. }
  3617. /// Provide fast operand accessors
  3618. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  3619. // Accessor Methods for CatchSwitch stmt
  3620. Value *getParentPad() const { return getOperand(0); }
  3621. void setParentPad(Value *ParentPad) { setOperand(0, ParentPad); }
  3622. // Accessor Methods for CatchSwitch stmt
  3623. bool hasUnwindDest() const { return getSubclassData<UnwindDestField>(); }
  3624. bool unwindsToCaller() const { return !hasUnwindDest(); }
  3625. BasicBlock *getUnwindDest() const {
  3626. if (hasUnwindDest())
  3627. return cast<BasicBlock>(getOperand(1));
  3628. return nullptr;
  3629. }
  3630. void setUnwindDest(BasicBlock *UnwindDest) {
  3631. assert(UnwindDest);
  3632. assert(hasUnwindDest());
  3633. setOperand(1, UnwindDest);
  3634. }
  3635. /// return the number of 'handlers' in this catchswitch
  3636. /// instruction, except the default handler
  3637. unsigned getNumHandlers() const {
  3638. if (hasUnwindDest())
  3639. return getNumOperands() - 2;
  3640. return getNumOperands() - 1;
  3641. }
  3642. private:
  3643. static BasicBlock *handler_helper(Value *V) { return cast<BasicBlock>(V); }
  3644. static const BasicBlock *handler_helper(const Value *V) {
  3645. return cast<BasicBlock>(V);
  3646. }
  3647. public:
  3648. using DerefFnTy = BasicBlock *(*)(Value *);
  3649. using handler_iterator = mapped_iterator<op_iterator, DerefFnTy>;
  3650. using handler_range = iterator_range<handler_iterator>;
  3651. using ConstDerefFnTy = const BasicBlock *(*)(const Value *);
  3652. using const_handler_iterator =
  3653. mapped_iterator<const_op_iterator, ConstDerefFnTy>;
  3654. using const_handler_range = iterator_range<const_handler_iterator>;
  3655. /// Returns an iterator that points to the first handler in CatchSwitchInst.
  3656. handler_iterator handler_begin() {
  3657. op_iterator It = op_begin() + 1;
  3658. if (hasUnwindDest())
  3659. ++It;
  3660. return handler_iterator(It, DerefFnTy(handler_helper));
  3661. }
  3662. /// Returns an iterator that points to the first handler in the
  3663. /// CatchSwitchInst.
  3664. const_handler_iterator handler_begin() const {
  3665. const_op_iterator It = op_begin() + 1;
  3666. if (hasUnwindDest())
  3667. ++It;
  3668. return const_handler_iterator(It, ConstDerefFnTy(handler_helper));
  3669. }
  3670. /// Returns a read-only iterator that points one past the last
  3671. /// handler in the CatchSwitchInst.
  3672. handler_iterator handler_end() {
  3673. return handler_iterator(op_end(), DerefFnTy(handler_helper));
  3674. }
  3675. /// Returns an iterator that points one past the last handler in the
  3676. /// CatchSwitchInst.
  3677. const_handler_iterator handler_end() const {
  3678. return const_handler_iterator(op_end(), ConstDerefFnTy(handler_helper));
  3679. }
  3680. /// iteration adapter for range-for loops.
  3681. handler_range handlers() {
  3682. return make_range(handler_begin(), handler_end());
  3683. }
  3684. /// iteration adapter for range-for loops.
  3685. const_handler_range handlers() const {
  3686. return make_range(handler_begin(), handler_end());
  3687. }
  3688. /// Add an entry to the switch instruction...
  3689. /// Note:
  3690. /// This action invalidates handler_end(). Old handler_end() iterator will
  3691. /// point to the added handler.
  3692. void addHandler(BasicBlock *Dest);
  3693. void removeHandler(handler_iterator HI);
  3694. unsigned getNumSuccessors() const { return getNumOperands() - 1; }
  3695. BasicBlock *getSuccessor(unsigned Idx) const {
  3696. assert(Idx < getNumSuccessors() &&
  3697. "Successor # out of range for catchswitch!");
  3698. return cast<BasicBlock>(getOperand(Idx + 1));
  3699. }
  3700. void setSuccessor(unsigned Idx, BasicBlock *NewSucc) {
  3701. assert(Idx < getNumSuccessors() &&
  3702. "Successor # out of range for catchswitch!");
  3703. setOperand(Idx + 1, NewSucc);
  3704. }
  3705. // Methods for support type inquiry through isa, cast, and dyn_cast:
  3706. static bool classof(const Instruction *I) {
  3707. return I->getOpcode() == Instruction::CatchSwitch;
  3708. }
  3709. static bool classof(const Value *V) {
  3710. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  3711. }
  3712. };
  3713. template <>
  3714. struct OperandTraits<CatchSwitchInst> : public HungoffOperandTraits<2> {};
  3715. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(CatchSwitchInst, Value)
  3716. //===----------------------------------------------------------------------===//
  3717. // CleanupPadInst Class
  3718. //===----------------------------------------------------------------------===//
  3719. class CleanupPadInst : public FuncletPadInst {
  3720. private:
  3721. explicit CleanupPadInst(Value *ParentPad, ArrayRef<Value *> Args,
  3722. unsigned Values, const Twine &NameStr,
  3723. Instruction *InsertBefore)
  3724. : FuncletPadInst(Instruction::CleanupPad, ParentPad, Args, Values,
  3725. NameStr, InsertBefore) {}
  3726. explicit CleanupPadInst(Value *ParentPad, ArrayRef<Value *> Args,
  3727. unsigned Values, const Twine &NameStr,
  3728. BasicBlock *InsertAtEnd)
  3729. : FuncletPadInst(Instruction::CleanupPad, ParentPad, Args, Values,
  3730. NameStr, InsertAtEnd) {}
  3731. public:
  3732. static CleanupPadInst *Create(Value *ParentPad, ArrayRef<Value *> Args = None,
  3733. const Twine &NameStr = "",
  3734. Instruction *InsertBefore = nullptr) {
  3735. unsigned Values = 1 + Args.size();
  3736. return new (Values)
  3737. CleanupPadInst(ParentPad, Args, Values, NameStr, InsertBefore);
  3738. }
  3739. static CleanupPadInst *Create(Value *ParentPad, ArrayRef<Value *> Args,
  3740. const Twine &NameStr, BasicBlock *InsertAtEnd) {
  3741. unsigned Values = 1 + Args.size();
  3742. return new (Values)
  3743. CleanupPadInst(ParentPad, Args, Values, NameStr, InsertAtEnd);
  3744. }
  3745. /// Methods for support type inquiry through isa, cast, and dyn_cast:
  3746. static bool classof(const Instruction *I) {
  3747. return I->getOpcode() == Instruction::CleanupPad;
  3748. }
  3749. static bool classof(const Value *V) {
  3750. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  3751. }
  3752. };
  3753. //===----------------------------------------------------------------------===//
  3754. // CatchPadInst Class
  3755. //===----------------------------------------------------------------------===//
  3756. class CatchPadInst : public FuncletPadInst {
  3757. private:
  3758. explicit CatchPadInst(Value *CatchSwitch, ArrayRef<Value *> Args,
  3759. unsigned Values, const Twine &NameStr,
  3760. Instruction *InsertBefore)
  3761. : FuncletPadInst(Instruction::CatchPad, CatchSwitch, Args, Values,
  3762. NameStr, InsertBefore) {}
  3763. explicit CatchPadInst(Value *CatchSwitch, ArrayRef<Value *> Args,
  3764. unsigned Values, const Twine &NameStr,
  3765. BasicBlock *InsertAtEnd)
  3766. : FuncletPadInst(Instruction::CatchPad, CatchSwitch, Args, Values,
  3767. NameStr, InsertAtEnd) {}
  3768. public:
  3769. static CatchPadInst *Create(Value *CatchSwitch, ArrayRef<Value *> Args,
  3770. const Twine &NameStr = "",
  3771. Instruction *InsertBefore = nullptr) {
  3772. unsigned Values = 1 + Args.size();
  3773. return new (Values)
  3774. CatchPadInst(CatchSwitch, Args, Values, NameStr, InsertBefore);
  3775. }
  3776. static CatchPadInst *Create(Value *CatchSwitch, ArrayRef<Value *> Args,
  3777. const Twine &NameStr, BasicBlock *InsertAtEnd) {
  3778. unsigned Values = 1 + Args.size();
  3779. return new (Values)
  3780. CatchPadInst(CatchSwitch, Args, Values, NameStr, InsertAtEnd);
  3781. }
  3782. /// Convenience accessors
  3783. CatchSwitchInst *getCatchSwitch() const {
  3784. return cast<CatchSwitchInst>(Op<-1>());
  3785. }
  3786. void setCatchSwitch(Value *CatchSwitch) {
  3787. assert(CatchSwitch);
  3788. Op<-1>() = CatchSwitch;
  3789. }
  3790. /// Methods for support type inquiry through isa, cast, and dyn_cast:
  3791. static bool classof(const Instruction *I) {
  3792. return I->getOpcode() == Instruction::CatchPad;
  3793. }
  3794. static bool classof(const Value *V) {
  3795. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  3796. }
  3797. };
  3798. //===----------------------------------------------------------------------===//
  3799. // CatchReturnInst Class
  3800. //===----------------------------------------------------------------------===//
  3801. class CatchReturnInst : public Instruction {
  3802. CatchReturnInst(const CatchReturnInst &RI);
  3803. CatchReturnInst(Value *CatchPad, BasicBlock *BB, Instruction *InsertBefore);
  3804. CatchReturnInst(Value *CatchPad, BasicBlock *BB, BasicBlock *InsertAtEnd);
  3805. void init(Value *CatchPad, BasicBlock *BB);
  3806. protected:
  3807. // Note: Instruction needs to be a friend here to call cloneImpl.
  3808. friend class Instruction;
  3809. CatchReturnInst *cloneImpl() const;
  3810. public:
  3811. static CatchReturnInst *Create(Value *CatchPad, BasicBlock *BB,
  3812. Instruction *InsertBefore = nullptr) {
  3813. assert(CatchPad);
  3814. assert(BB);
  3815. return new (2) CatchReturnInst(CatchPad, BB, InsertBefore);
  3816. }
  3817. static CatchReturnInst *Create(Value *CatchPad, BasicBlock *BB,
  3818. BasicBlock *InsertAtEnd) {
  3819. assert(CatchPad);
  3820. assert(BB);
  3821. return new (2) CatchReturnInst(CatchPad, BB, InsertAtEnd);
  3822. }
  3823. /// Provide fast operand accessors
  3824. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  3825. /// Convenience accessors.
  3826. CatchPadInst *getCatchPad() const { return cast<CatchPadInst>(Op<0>()); }
  3827. void setCatchPad(CatchPadInst *CatchPad) {
  3828. assert(CatchPad);
  3829. Op<0>() = CatchPad;
  3830. }
  3831. BasicBlock *getSuccessor() const { return cast<BasicBlock>(Op<1>()); }
  3832. void setSuccessor(BasicBlock *NewSucc) {
  3833. assert(NewSucc);
  3834. Op<1>() = NewSucc;
  3835. }
  3836. unsigned getNumSuccessors() const { return 1; }
  3837. /// Get the parentPad of this catchret's catchpad's catchswitch.
  3838. /// The successor block is implicitly a member of this funclet.
  3839. Value *getCatchSwitchParentPad() const {
  3840. return getCatchPad()->getCatchSwitch()->getParentPad();
  3841. }
  3842. // Methods for support type inquiry through isa, cast, and dyn_cast:
  3843. static bool classof(const Instruction *I) {
  3844. return (I->getOpcode() == Instruction::CatchRet);
  3845. }
  3846. static bool classof(const Value *V) {
  3847. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  3848. }
  3849. private:
  3850. BasicBlock *getSuccessor(unsigned Idx) const {
  3851. assert(Idx < getNumSuccessors() && "Successor # out of range for catchret!");
  3852. return getSuccessor();
  3853. }
  3854. void setSuccessor(unsigned Idx, BasicBlock *B) {
  3855. assert(Idx < getNumSuccessors() && "Successor # out of range for catchret!");
  3856. setSuccessor(B);
  3857. }
  3858. };
  3859. template <>
  3860. struct OperandTraits<CatchReturnInst>
  3861. : public FixedNumOperandTraits<CatchReturnInst, 2> {};
  3862. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(CatchReturnInst, Value)
  3863. //===----------------------------------------------------------------------===//
  3864. // CleanupReturnInst Class
  3865. //===----------------------------------------------------------------------===//
  3866. class CleanupReturnInst : public Instruction {
  3867. using UnwindDestField = BoolBitfieldElementT<0>;
  3868. private:
  3869. CleanupReturnInst(const CleanupReturnInst &RI);
  3870. CleanupReturnInst(Value *CleanupPad, BasicBlock *UnwindBB, unsigned Values,
  3871. Instruction *InsertBefore = nullptr);
  3872. CleanupReturnInst(Value *CleanupPad, BasicBlock *UnwindBB, unsigned Values,
  3873. BasicBlock *InsertAtEnd);
  3874. void init(Value *CleanupPad, BasicBlock *UnwindBB);
  3875. protected:
  3876. // Note: Instruction needs to be a friend here to call cloneImpl.
  3877. friend class Instruction;
  3878. CleanupReturnInst *cloneImpl() const;
  3879. public:
  3880. static CleanupReturnInst *Create(Value *CleanupPad,
  3881. BasicBlock *UnwindBB = nullptr,
  3882. Instruction *InsertBefore = nullptr) {
  3883. assert(CleanupPad);
  3884. unsigned Values = 1;
  3885. if (UnwindBB)
  3886. ++Values;
  3887. return new (Values)
  3888. CleanupReturnInst(CleanupPad, UnwindBB, Values, InsertBefore);
  3889. }
  3890. static CleanupReturnInst *Create(Value *CleanupPad, BasicBlock *UnwindBB,
  3891. BasicBlock *InsertAtEnd) {
  3892. assert(CleanupPad);
  3893. unsigned Values = 1;
  3894. if (UnwindBB)
  3895. ++Values;
  3896. return new (Values)
  3897. CleanupReturnInst(CleanupPad, UnwindBB, Values, InsertAtEnd);
  3898. }
  3899. /// Provide fast operand accessors
  3900. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  3901. bool hasUnwindDest() const { return getSubclassData<UnwindDestField>(); }
  3902. bool unwindsToCaller() const { return !hasUnwindDest(); }
  3903. /// Convenience accessor.
  3904. CleanupPadInst *getCleanupPad() const {
  3905. return cast<CleanupPadInst>(Op<0>());
  3906. }
  3907. void setCleanupPad(CleanupPadInst *CleanupPad) {
  3908. assert(CleanupPad);
  3909. Op<0>() = CleanupPad;
  3910. }
  3911. unsigned getNumSuccessors() const { return hasUnwindDest() ? 1 : 0; }
  3912. BasicBlock *getUnwindDest() const {
  3913. return hasUnwindDest() ? cast<BasicBlock>(Op<1>()) : nullptr;
  3914. }
  3915. void setUnwindDest(BasicBlock *NewDest) {
  3916. assert(NewDest);
  3917. assert(hasUnwindDest());
  3918. Op<1>() = NewDest;
  3919. }
  3920. // Methods for support type inquiry through isa, cast, and dyn_cast:
  3921. static bool classof(const Instruction *I) {
  3922. return (I->getOpcode() == Instruction::CleanupRet);
  3923. }
  3924. static bool classof(const Value *V) {
  3925. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  3926. }
  3927. private:
  3928. BasicBlock *getSuccessor(unsigned Idx) const {
  3929. assert(Idx == 0);
  3930. return getUnwindDest();
  3931. }
  3932. void setSuccessor(unsigned Idx, BasicBlock *B) {
  3933. assert(Idx == 0);
  3934. setUnwindDest(B);
  3935. }
  3936. // Shadow Instruction::setInstructionSubclassData with a private forwarding
  3937. // method so that subclasses cannot accidentally use it.
  3938. template <typename Bitfield>
  3939. void setSubclassData(typename Bitfield::Type Value) {
  3940. Instruction::setSubclassData<Bitfield>(Value);
  3941. }
  3942. };
  3943. template <>
  3944. struct OperandTraits<CleanupReturnInst>
  3945. : public VariadicOperandTraits<CleanupReturnInst, /*MINARITY=*/1> {};
  3946. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(CleanupReturnInst, Value)
  3947. //===----------------------------------------------------------------------===//
  3948. // UnreachableInst Class
  3949. //===----------------------------------------------------------------------===//
  3950. //===---------------------------------------------------------------------------
  3951. /// This function has undefined behavior. In particular, the
  3952. /// presence of this instruction indicates some higher level knowledge that the
  3953. /// end of the block cannot be reached.
  3954. ///
  3955. class UnreachableInst : public Instruction {
  3956. protected:
  3957. // Note: Instruction needs to be a friend here to call cloneImpl.
  3958. friend class Instruction;
  3959. UnreachableInst *cloneImpl() const;
  3960. public:
  3961. explicit UnreachableInst(LLVMContext &C, Instruction *InsertBefore = nullptr);
  3962. explicit UnreachableInst(LLVMContext &C, BasicBlock *InsertAtEnd);
  3963. // allocate space for exactly zero operands
  3964. void *operator new(size_t S) { return User::operator new(S, 0); }
  3965. void operator delete(void *Ptr) { User::operator delete(Ptr); }
  3966. unsigned getNumSuccessors() const { return 0; }
  3967. // Methods for support type inquiry through isa, cast, and dyn_cast:
  3968. static bool classof(const Instruction *I) {
  3969. return I->getOpcode() == Instruction::Unreachable;
  3970. }
  3971. static bool classof(const Value *V) {
  3972. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  3973. }
  3974. private:
  3975. BasicBlock *getSuccessor(unsigned idx) const {
  3976. llvm_unreachable("UnreachableInst has no successors!");
  3977. }
  3978. void setSuccessor(unsigned idx, BasicBlock *B) {
  3979. llvm_unreachable("UnreachableInst has no successors!");
  3980. }
  3981. };
  3982. //===----------------------------------------------------------------------===//
  3983. // TruncInst Class
  3984. //===----------------------------------------------------------------------===//
  3985. /// This class represents a truncation of integer types.
  3986. class TruncInst : public CastInst {
  3987. protected:
  3988. // Note: Instruction needs to be a friend here to call cloneImpl.
  3989. friend class Instruction;
  3990. /// Clone an identical TruncInst
  3991. TruncInst *cloneImpl() const;
  3992. public:
  3993. /// Constructor with insert-before-instruction semantics
  3994. TruncInst(
  3995. Value *S, ///< The value to be truncated
  3996. Type *Ty, ///< The (smaller) type to truncate to
  3997. const Twine &NameStr = "", ///< A name for the new instruction
  3998. Instruction *InsertBefore = nullptr ///< Where to insert the new instruction
  3999. );
  4000. /// Constructor with insert-at-end-of-block semantics
  4001. TruncInst(
  4002. Value *S, ///< The value to be truncated
  4003. Type *Ty, ///< The (smaller) type to truncate to
  4004. const Twine &NameStr, ///< A name for the new instruction
  4005. BasicBlock *InsertAtEnd ///< The block to insert the instruction into
  4006. );
  4007. /// Methods for support type inquiry through isa, cast, and dyn_cast:
  4008. static bool classof(const Instruction *I) {
  4009. return I->getOpcode() == Trunc;
  4010. }
  4011. static bool classof(const Value *V) {
  4012. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4013. }
  4014. };
  4015. //===----------------------------------------------------------------------===//
  4016. // ZExtInst Class
  4017. //===----------------------------------------------------------------------===//
  4018. /// This class represents zero extension of integer types.
  4019. class ZExtInst : public CastInst {
  4020. protected:
  4021. // Note: Instruction needs to be a friend here to call cloneImpl.
  4022. friend class Instruction;
  4023. /// Clone an identical ZExtInst
  4024. ZExtInst *cloneImpl() const;
  4025. public:
  4026. /// Constructor with insert-before-instruction semantics
  4027. ZExtInst(
  4028. Value *S, ///< The value to be zero extended
  4029. Type *Ty, ///< The type to zero extend to
  4030. const Twine &NameStr = "", ///< A name for the new instruction
  4031. Instruction *InsertBefore = nullptr ///< Where to insert the new instruction
  4032. );
  4033. /// Constructor with insert-at-end semantics.
  4034. ZExtInst(
  4035. Value *S, ///< The value to be zero extended
  4036. Type *Ty, ///< The type to zero extend to
  4037. const Twine &NameStr, ///< A name for the new instruction
  4038. BasicBlock *InsertAtEnd ///< The block to insert the instruction into
  4039. );
  4040. /// Methods for support type inquiry through isa, cast, and dyn_cast:
  4041. static bool classof(const Instruction *I) {
  4042. return I->getOpcode() == ZExt;
  4043. }
  4044. static bool classof(const Value *V) {
  4045. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4046. }
  4047. };
  4048. //===----------------------------------------------------------------------===//
  4049. // SExtInst Class
  4050. //===----------------------------------------------------------------------===//
  4051. /// This class represents a sign extension of integer types.
  4052. class SExtInst : public CastInst {
  4053. protected:
  4054. // Note: Instruction needs to be a friend here to call cloneImpl.
  4055. friend class Instruction;
  4056. /// Clone an identical SExtInst
  4057. SExtInst *cloneImpl() const;
  4058. public:
  4059. /// Constructor with insert-before-instruction semantics
  4060. SExtInst(
  4061. Value *S, ///< The value to be sign extended
  4062. Type *Ty, ///< The type to sign extend to
  4063. const Twine &NameStr = "", ///< A name for the new instruction
  4064. Instruction *InsertBefore = nullptr ///< Where to insert the new instruction
  4065. );
  4066. /// Constructor with insert-at-end-of-block semantics
  4067. SExtInst(
  4068. Value *S, ///< The value to be sign extended
  4069. Type *Ty, ///< The type to sign extend to
  4070. const Twine &NameStr, ///< A name for the new instruction
  4071. BasicBlock *InsertAtEnd ///< The block to insert the instruction into
  4072. );
  4073. /// Methods for support type inquiry through isa, cast, and dyn_cast:
  4074. static bool classof(const Instruction *I) {
  4075. return I->getOpcode() == SExt;
  4076. }
  4077. static bool classof(const Value *V) {
  4078. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4079. }
  4080. };
  4081. //===----------------------------------------------------------------------===//
  4082. // FPTruncInst Class
  4083. //===----------------------------------------------------------------------===//
  4084. /// This class represents a truncation of floating point types.
  4085. class FPTruncInst : public CastInst {
  4086. protected:
  4087. // Note: Instruction needs to be a friend here to call cloneImpl.
  4088. friend class Instruction;
  4089. /// Clone an identical FPTruncInst
  4090. FPTruncInst *cloneImpl() const;
  4091. public:
  4092. /// Constructor with insert-before-instruction semantics
  4093. FPTruncInst(
  4094. Value *S, ///< The value to be truncated
  4095. Type *Ty, ///< The type to truncate to
  4096. const Twine &NameStr = "", ///< A name for the new instruction
  4097. Instruction *InsertBefore = nullptr ///< Where to insert the new instruction
  4098. );
  4099. /// Constructor with insert-before-instruction semantics
  4100. FPTruncInst(
  4101. Value *S, ///< The value to be truncated
  4102. Type *Ty, ///< The type to truncate to
  4103. const Twine &NameStr, ///< A name for the new instruction
  4104. BasicBlock *InsertAtEnd ///< The block to insert the instruction into
  4105. );
  4106. /// Methods for support type inquiry through isa, cast, and dyn_cast:
  4107. static bool classof(const Instruction *I) {
  4108. return I->getOpcode() == FPTrunc;
  4109. }
  4110. static bool classof(const Value *V) {
  4111. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4112. }
  4113. };
  4114. //===----------------------------------------------------------------------===//
  4115. // FPExtInst Class
  4116. //===----------------------------------------------------------------------===//
  4117. /// This class represents an extension of floating point types.
  4118. class FPExtInst : public CastInst {
  4119. protected:
  4120. // Note: Instruction needs to be a friend here to call cloneImpl.
  4121. friend class Instruction;
  4122. /// Clone an identical FPExtInst
  4123. FPExtInst *cloneImpl() const;
  4124. public:
  4125. /// Constructor with insert-before-instruction semantics
  4126. FPExtInst(
  4127. Value *S, ///< The value to be extended
  4128. Type *Ty, ///< The type to extend to
  4129. const Twine &NameStr = "", ///< A name for the new instruction
  4130. Instruction *InsertBefore = nullptr ///< Where to insert the new instruction
  4131. );
  4132. /// Constructor with insert-at-end-of-block semantics
  4133. FPExtInst(
  4134. Value *S, ///< The value to be extended
  4135. Type *Ty, ///< The type to extend to
  4136. const Twine &NameStr, ///< A name for the new instruction
  4137. BasicBlock *InsertAtEnd ///< The block to insert the instruction into
  4138. );
  4139. /// Methods for support type inquiry through isa, cast, and dyn_cast:
  4140. static bool classof(const Instruction *I) {
  4141. return I->getOpcode() == FPExt;
  4142. }
  4143. static bool classof(const Value *V) {
  4144. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4145. }
  4146. };
  4147. //===----------------------------------------------------------------------===//
  4148. // UIToFPInst Class
  4149. //===----------------------------------------------------------------------===//
  4150. /// This class represents a cast unsigned integer to floating point.
  4151. class UIToFPInst : public CastInst {
  4152. protected:
  4153. // Note: Instruction needs to be a friend here to call cloneImpl.
  4154. friend class Instruction;
  4155. /// Clone an identical UIToFPInst
  4156. UIToFPInst *cloneImpl() const;
  4157. public:
  4158. /// Constructor with insert-before-instruction semantics
  4159. UIToFPInst(
  4160. Value *S, ///< The value to be converted
  4161. Type *Ty, ///< The type to convert to
  4162. const Twine &NameStr = "", ///< A name for the new instruction
  4163. Instruction *InsertBefore = nullptr ///< Where to insert the new instruction
  4164. );
  4165. /// Constructor with insert-at-end-of-block semantics
  4166. UIToFPInst(
  4167. Value *S, ///< The value to be converted
  4168. Type *Ty, ///< The type to convert to
  4169. const Twine &NameStr, ///< A name for the new instruction
  4170. BasicBlock *InsertAtEnd ///< The block to insert the instruction into
  4171. );
  4172. /// Methods for support type inquiry through isa, cast, and dyn_cast:
  4173. static bool classof(const Instruction *I) {
  4174. return I->getOpcode() == UIToFP;
  4175. }
  4176. static bool classof(const Value *V) {
  4177. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4178. }
  4179. };
  4180. //===----------------------------------------------------------------------===//
  4181. // SIToFPInst Class
  4182. //===----------------------------------------------------------------------===//
  4183. /// This class represents a cast from signed integer to floating point.
  4184. class SIToFPInst : public CastInst {
  4185. protected:
  4186. // Note: Instruction needs to be a friend here to call cloneImpl.
  4187. friend class Instruction;
  4188. /// Clone an identical SIToFPInst
  4189. SIToFPInst *cloneImpl() const;
  4190. public:
  4191. /// Constructor with insert-before-instruction semantics
  4192. SIToFPInst(
  4193. Value *S, ///< The value to be converted
  4194. Type *Ty, ///< The type to convert to
  4195. const Twine &NameStr = "", ///< A name for the new instruction
  4196. Instruction *InsertBefore = nullptr ///< Where to insert the new instruction
  4197. );
  4198. /// Constructor with insert-at-end-of-block semantics
  4199. SIToFPInst(
  4200. Value *S, ///< The value to be converted
  4201. Type *Ty, ///< The type to convert to
  4202. const Twine &NameStr, ///< A name for the new instruction
  4203. BasicBlock *InsertAtEnd ///< The block to insert the instruction into
  4204. );
  4205. /// Methods for support type inquiry through isa, cast, and dyn_cast:
  4206. static bool classof(const Instruction *I) {
  4207. return I->getOpcode() == SIToFP;
  4208. }
  4209. static bool classof(const Value *V) {
  4210. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4211. }
  4212. };
  4213. //===----------------------------------------------------------------------===//
  4214. // FPToUIInst Class
  4215. //===----------------------------------------------------------------------===//
  4216. /// This class represents a cast from floating point to unsigned integer
  4217. class FPToUIInst : public CastInst {
  4218. protected:
  4219. // Note: Instruction needs to be a friend here to call cloneImpl.
  4220. friend class Instruction;
  4221. /// Clone an identical FPToUIInst
  4222. FPToUIInst *cloneImpl() const;
  4223. public:
  4224. /// Constructor with insert-before-instruction semantics
  4225. FPToUIInst(
  4226. Value *S, ///< The value to be converted
  4227. Type *Ty, ///< The type to convert to
  4228. const Twine &NameStr = "", ///< A name for the new instruction
  4229. Instruction *InsertBefore = nullptr ///< Where to insert the new instruction
  4230. );
  4231. /// Constructor with insert-at-end-of-block semantics
  4232. FPToUIInst(
  4233. Value *S, ///< The value to be converted
  4234. Type *Ty, ///< The type to convert to
  4235. const Twine &NameStr, ///< A name for the new instruction
  4236. BasicBlock *InsertAtEnd ///< Where to insert the new instruction
  4237. );
  4238. /// Methods for support type inquiry through isa, cast, and dyn_cast:
  4239. static bool classof(const Instruction *I) {
  4240. return I->getOpcode() == FPToUI;
  4241. }
  4242. static bool classof(const Value *V) {
  4243. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4244. }
  4245. };
  4246. //===----------------------------------------------------------------------===//
  4247. // FPToSIInst Class
  4248. //===----------------------------------------------------------------------===//
  4249. /// This class represents a cast from floating point to signed integer.
  4250. class FPToSIInst : public CastInst {
  4251. protected:
  4252. // Note: Instruction needs to be a friend here to call cloneImpl.
  4253. friend class Instruction;
  4254. /// Clone an identical FPToSIInst
  4255. FPToSIInst *cloneImpl() const;
  4256. public:
  4257. /// Constructor with insert-before-instruction semantics
  4258. FPToSIInst(
  4259. Value *S, ///< The value to be converted
  4260. Type *Ty, ///< The type to convert to
  4261. const Twine &NameStr = "", ///< A name for the new instruction
  4262. Instruction *InsertBefore = nullptr ///< Where to insert the new instruction
  4263. );
  4264. /// Constructor with insert-at-end-of-block semantics
  4265. FPToSIInst(
  4266. Value *S, ///< The value to be converted
  4267. Type *Ty, ///< The type to convert to
  4268. const Twine &NameStr, ///< A name for the new instruction
  4269. BasicBlock *InsertAtEnd ///< The block to insert the instruction into
  4270. );
  4271. /// Methods for support type inquiry through isa, cast, and dyn_cast:
  4272. static bool classof(const Instruction *I) {
  4273. return I->getOpcode() == FPToSI;
  4274. }
  4275. static bool classof(const Value *V) {
  4276. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4277. }
  4278. };
  4279. //===----------------------------------------------------------------------===//
  4280. // IntToPtrInst Class
  4281. //===----------------------------------------------------------------------===//
  4282. /// This class represents a cast from an integer to a pointer.
  4283. class IntToPtrInst : public CastInst {
  4284. public:
  4285. // Note: Instruction needs to be a friend here to call cloneImpl.
  4286. friend class Instruction;
  4287. /// Constructor with insert-before-instruction semantics
  4288. IntToPtrInst(
  4289. Value *S, ///< The value to be converted
  4290. Type *Ty, ///< The type to convert to
  4291. const Twine &NameStr = "", ///< A name for the new instruction
  4292. Instruction *InsertBefore = nullptr ///< Where to insert the new instruction
  4293. );
  4294. /// Constructor with insert-at-end-of-block semantics
  4295. IntToPtrInst(
  4296. Value *S, ///< The value to be converted
  4297. Type *Ty, ///< The type to convert to
  4298. const Twine &NameStr, ///< A name for the new instruction
  4299. BasicBlock *InsertAtEnd ///< The block to insert the instruction into
  4300. );
  4301. /// Clone an identical IntToPtrInst.
  4302. IntToPtrInst *cloneImpl() const;
  4303. /// Returns the address space of this instruction's pointer type.
  4304. unsigned getAddressSpace() const {
  4305. return getType()->getPointerAddressSpace();
  4306. }
  4307. // Methods for support type inquiry through isa, cast, and dyn_cast:
  4308. static bool classof(const Instruction *I) {
  4309. return I->getOpcode() == IntToPtr;
  4310. }
  4311. static bool classof(const Value *V) {
  4312. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4313. }
  4314. };
  4315. //===----------------------------------------------------------------------===//
  4316. // PtrToIntInst Class
  4317. //===----------------------------------------------------------------------===//
  4318. /// This class represents a cast from a pointer to an integer.
  4319. class PtrToIntInst : public CastInst {
  4320. protected:
  4321. // Note: Instruction needs to be a friend here to call cloneImpl.
  4322. friend class Instruction;
  4323. /// Clone an identical PtrToIntInst.
  4324. PtrToIntInst *cloneImpl() const;
  4325. public:
  4326. /// Constructor with insert-before-instruction semantics
  4327. PtrToIntInst(
  4328. Value *S, ///< The value to be converted
  4329. Type *Ty, ///< The type to convert to
  4330. const Twine &NameStr = "", ///< A name for the new instruction
  4331. Instruction *InsertBefore = nullptr ///< Where to insert the new instruction
  4332. );
  4333. /// Constructor with insert-at-end-of-block semantics
  4334. PtrToIntInst(
  4335. Value *S, ///< The value to be converted
  4336. Type *Ty, ///< The type to convert to
  4337. const Twine &NameStr, ///< A name for the new instruction
  4338. BasicBlock *InsertAtEnd ///< The block to insert the instruction into
  4339. );
  4340. /// Gets the pointer operand.
  4341. Value *getPointerOperand() { return getOperand(0); }
  4342. /// Gets the pointer operand.
  4343. const Value *getPointerOperand() const { return getOperand(0); }
  4344. /// Gets the operand index of the pointer operand.
  4345. static unsigned getPointerOperandIndex() { return 0U; }
  4346. /// Returns the address space of the pointer operand.
  4347. unsigned getPointerAddressSpace() const {
  4348. return getPointerOperand()->getType()->getPointerAddressSpace();
  4349. }
  4350. // Methods for support type inquiry through isa, cast, and dyn_cast:
  4351. static bool classof(const Instruction *I) {
  4352. return I->getOpcode() == PtrToInt;
  4353. }
  4354. static bool classof(const Value *V) {
  4355. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4356. }
  4357. };
  4358. //===----------------------------------------------------------------------===//
  4359. // BitCastInst Class
  4360. //===----------------------------------------------------------------------===//
  4361. /// This class represents a no-op cast from one type to another.
  4362. class BitCastInst : public CastInst {
  4363. protected:
  4364. // Note: Instruction needs to be a friend here to call cloneImpl.
  4365. friend class Instruction;
  4366. /// Clone an identical BitCastInst.
  4367. BitCastInst *cloneImpl() const;
  4368. public:
  4369. /// Constructor with insert-before-instruction semantics
  4370. BitCastInst(
  4371. Value *S, ///< The value to be casted
  4372. Type *Ty, ///< The type to casted to
  4373. const Twine &NameStr = "", ///< A name for the new instruction
  4374. Instruction *InsertBefore = nullptr ///< Where to insert the new instruction
  4375. );
  4376. /// Constructor with insert-at-end-of-block semantics
  4377. BitCastInst(
  4378. Value *S, ///< The value to be casted
  4379. Type *Ty, ///< The type to casted to
  4380. const Twine &NameStr, ///< A name for the new instruction
  4381. BasicBlock *InsertAtEnd ///< The block to insert the instruction into
  4382. );
  4383. // Methods for support type inquiry through isa, cast, and dyn_cast:
  4384. static bool classof(const Instruction *I) {
  4385. return I->getOpcode() == BitCast;
  4386. }
  4387. static bool classof(const Value *V) {
  4388. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4389. }
  4390. };
  4391. //===----------------------------------------------------------------------===//
  4392. // AddrSpaceCastInst Class
  4393. //===----------------------------------------------------------------------===//
  4394. /// This class represents a conversion between pointers from one address space
  4395. /// to another.
  4396. class AddrSpaceCastInst : public CastInst {
  4397. protected:
  4398. // Note: Instruction needs to be a friend here to call cloneImpl.
  4399. friend class Instruction;
  4400. /// Clone an identical AddrSpaceCastInst.
  4401. AddrSpaceCastInst *cloneImpl() const;
  4402. public:
  4403. /// Constructor with insert-before-instruction semantics
  4404. AddrSpaceCastInst(
  4405. Value *S, ///< The value to be casted
  4406. Type *Ty, ///< The type to casted to
  4407. const Twine &NameStr = "", ///< A name for the new instruction
  4408. Instruction *InsertBefore = nullptr ///< Where to insert the new instruction
  4409. );
  4410. /// Constructor with insert-at-end-of-block semantics
  4411. AddrSpaceCastInst(
  4412. Value *S, ///< The value to be casted
  4413. Type *Ty, ///< The type to casted to
  4414. const Twine &NameStr, ///< A name for the new instruction
  4415. BasicBlock *InsertAtEnd ///< The block to insert the instruction into
  4416. );
  4417. // Methods for support type inquiry through isa, cast, and dyn_cast:
  4418. static bool classof(const Instruction *I) {
  4419. return I->getOpcode() == AddrSpaceCast;
  4420. }
  4421. static bool classof(const Value *V) {
  4422. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4423. }
  4424. /// Gets the pointer operand.
  4425. Value *getPointerOperand() {
  4426. return getOperand(0);
  4427. }
  4428. /// Gets the pointer operand.
  4429. const Value *getPointerOperand() const {
  4430. return getOperand(0);
  4431. }
  4432. /// Gets the operand index of the pointer operand.
  4433. static unsigned getPointerOperandIndex() {
  4434. return 0U;
  4435. }
  4436. /// Returns the address space of the pointer operand.
  4437. unsigned getSrcAddressSpace() const {
  4438. return getPointerOperand()->getType()->getPointerAddressSpace();
  4439. }
  4440. /// Returns the address space of the result.
  4441. unsigned getDestAddressSpace() const {
  4442. return getType()->getPointerAddressSpace();
  4443. }
  4444. };
  4445. /// A helper function that returns the pointer operand of a load or store
  4446. /// instruction. Returns nullptr if not load or store.
  4447. inline const Value *getLoadStorePointerOperand(const Value *V) {
  4448. if (auto *Load = dyn_cast<LoadInst>(V))
  4449. return Load->getPointerOperand();
  4450. if (auto *Store = dyn_cast<StoreInst>(V))
  4451. return Store->getPointerOperand();
  4452. return nullptr;
  4453. }
  4454. inline Value *getLoadStorePointerOperand(Value *V) {
  4455. return const_cast<Value *>(
  4456. getLoadStorePointerOperand(static_cast<const Value *>(V)));
  4457. }
  4458. /// A helper function that returns the pointer operand of a load, store
  4459. /// or GEP instruction. Returns nullptr if not load, store, or GEP.
  4460. inline const Value *getPointerOperand(const Value *V) {
  4461. if (auto *Ptr = getLoadStorePointerOperand(V))
  4462. return Ptr;
  4463. if (auto *Gep = dyn_cast<GetElementPtrInst>(V))
  4464. return Gep->getPointerOperand();
  4465. return nullptr;
  4466. }
  4467. inline Value *getPointerOperand(Value *V) {
  4468. return const_cast<Value *>(getPointerOperand(static_cast<const Value *>(V)));
  4469. }
  4470. /// A helper function that returns the alignment of load or store instruction.
  4471. inline Align getLoadStoreAlignment(Value *I) {
  4472. assert((isa<LoadInst>(I) || isa<StoreInst>(I)) &&
  4473. "Expected Load or Store instruction");
  4474. if (auto *LI = dyn_cast<LoadInst>(I))
  4475. return LI->getAlign();
  4476. return cast<StoreInst>(I)->getAlign();
  4477. }
  4478. /// A helper function that returns the address space of the pointer operand of
  4479. /// load or store instruction.
  4480. inline unsigned getLoadStoreAddressSpace(Value *I) {
  4481. assert((isa<LoadInst>(I) || isa<StoreInst>(I)) &&
  4482. "Expected Load or Store instruction");
  4483. if (auto *LI = dyn_cast<LoadInst>(I))
  4484. return LI->getPointerAddressSpace();
  4485. return cast<StoreInst>(I)->getPointerAddressSpace();
  4486. }
  4487. /// A helper function that returns the type of a load or store instruction.
  4488. inline Type *getLoadStoreType(Value *I) {
  4489. assert((isa<LoadInst>(I) || isa<StoreInst>(I)) &&
  4490. "Expected Load or Store instruction");
  4491. if (auto *LI = dyn_cast<LoadInst>(I))
  4492. return LI->getType();
  4493. return cast<StoreInst>(I)->getValueOperand()->getType();
  4494. }
  4495. //===----------------------------------------------------------------------===//
  4496. // FreezeInst Class
  4497. //===----------------------------------------------------------------------===//
  4498. /// This class represents a freeze function that returns random concrete
  4499. /// value if an operand is either a poison value or an undef value
  4500. class FreezeInst : public UnaryInstruction {
  4501. protected:
  4502. // Note: Instruction needs to be a friend here to call cloneImpl.
  4503. friend class Instruction;
  4504. /// Clone an identical FreezeInst
  4505. FreezeInst *cloneImpl() const;
  4506. public:
  4507. explicit FreezeInst(Value *S,
  4508. const Twine &NameStr = "",
  4509. Instruction *InsertBefore = nullptr);
  4510. FreezeInst(Value *S, const Twine &NameStr, BasicBlock *InsertAtEnd);
  4511. // Methods for support type inquiry through isa, cast, and dyn_cast:
  4512. static inline bool classof(const Instruction *I) {
  4513. return I->getOpcode() == Freeze;
  4514. }
  4515. static inline bool classof(const Value *V) {
  4516. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4517. }
  4518. };
  4519. } // end namespace llvm
  4520. #endif // LLVM_IR_INSTRUCTIONS_H
  4521. #ifdef __GNUC__
  4522. #pragma GCC diagnostic pop
  4523. #endif