Instructions.h 205 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476
  1. #pragma once
  2. #ifdef __GNUC__
  3. #pragma GCC diagnostic push
  4. #pragma GCC diagnostic ignored "-Wunused-parameter"
  5. #endif
  6. //===- llvm/Instructions.h - Instruction subclass definitions ---*- C++ -*-===//
  7. //
  8. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  9. // See https://llvm.org/LICENSE.txt for license information.
  10. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  11. //
  12. //===----------------------------------------------------------------------===//
  13. //
  14. // This file exposes the class definitions of all of the subclasses of the
  15. // Instruction class. This is meant to be an easy way to get access to all
  16. // instruction subclasses.
  17. //
  18. //===----------------------------------------------------------------------===//
  19. #ifndef LLVM_IR_INSTRUCTIONS_H
  20. #define LLVM_IR_INSTRUCTIONS_H
  21. #include "llvm/ADT/ArrayRef.h"
  22. #include "llvm/ADT/Bitfields.h"
  23. #include "llvm/ADT/MapVector.h"
  24. #include "llvm/ADT/STLExtras.h"
  25. #include "llvm/ADT/SmallVector.h"
  26. #include "llvm/ADT/Twine.h"
  27. #include "llvm/ADT/iterator.h"
  28. #include "llvm/ADT/iterator_range.h"
  29. #include "llvm/IR/CFG.h"
  30. #include "llvm/IR/Constant.h"
  31. #include "llvm/IR/DerivedTypes.h"
  32. #include "llvm/IR/InstrTypes.h"
  33. #include "llvm/IR/Instruction.h"
  34. #include "llvm/IR/OperandTraits.h"
  35. #include "llvm/IR/Use.h"
  36. #include "llvm/IR/User.h"
  37. #include "llvm/Support/AtomicOrdering.h"
  38. #include "llvm/Support/ErrorHandling.h"
  39. #include <cassert>
  40. #include <cstddef>
  41. #include <cstdint>
  42. #include <iterator>
  43. #include <optional>
  44. namespace llvm {
  45. class APFloat;
  46. class APInt;
  47. class BasicBlock;
  48. class ConstantInt;
  49. class DataLayout;
  50. class StringRef;
  51. class Type;
  52. class Value;
  53. //===----------------------------------------------------------------------===//
  54. // AllocaInst Class
  55. //===----------------------------------------------------------------------===//
  56. /// an instruction to allocate memory on the stack
  57. class AllocaInst : public UnaryInstruction {
  58. Type *AllocatedType;
  59. using AlignmentField = AlignmentBitfieldElementT<0>;
  60. using UsedWithInAllocaField = BoolBitfieldElementT<AlignmentField::NextBit>;
  61. using SwiftErrorField = BoolBitfieldElementT<UsedWithInAllocaField::NextBit>;
  62. static_assert(Bitfield::areContiguous<AlignmentField, UsedWithInAllocaField,
  63. SwiftErrorField>(),
  64. "Bitfields must be contiguous");
  65. protected:
  66. // Note: Instruction needs to be a friend here to call cloneImpl.
  67. friend class Instruction;
  68. AllocaInst *cloneImpl() const;
  69. public:
  70. explicit AllocaInst(Type *Ty, unsigned AddrSpace, Value *ArraySize,
  71. const Twine &Name, Instruction *InsertBefore);
  72. AllocaInst(Type *Ty, unsigned AddrSpace, Value *ArraySize,
  73. const Twine &Name, BasicBlock *InsertAtEnd);
  74. AllocaInst(Type *Ty, unsigned AddrSpace, const Twine &Name,
  75. Instruction *InsertBefore);
  76. AllocaInst(Type *Ty, unsigned AddrSpace,
  77. const Twine &Name, BasicBlock *InsertAtEnd);
  78. AllocaInst(Type *Ty, unsigned AddrSpace, Value *ArraySize, Align Align,
  79. const Twine &Name = "", Instruction *InsertBefore = nullptr);
  80. AllocaInst(Type *Ty, unsigned AddrSpace, Value *ArraySize, Align Align,
  81. const Twine &Name, BasicBlock *InsertAtEnd);
  82. /// Return true if there is an allocation size parameter to the allocation
  83. /// instruction that is not 1.
  84. bool isArrayAllocation() const;
  85. /// Get the number of elements allocated. For a simple allocation of a single
  86. /// element, this will return a constant 1 value.
  87. const Value *getArraySize() const { return getOperand(0); }
  88. Value *getArraySize() { return getOperand(0); }
  89. /// Overload to return most specific pointer type.
  90. PointerType *getType() const {
  91. return cast<PointerType>(Instruction::getType());
  92. }
  93. /// Return the address space for the allocation.
  94. unsigned getAddressSpace() const {
  95. return getType()->getAddressSpace();
  96. }
  97. /// Get allocation size in bytes. Returns std::nullopt if size can't be
  98. /// determined, e.g. in case of a VLA.
  99. std::optional<TypeSize> getAllocationSize(const DataLayout &DL) const;
  100. /// Get allocation size in bits. Returns std::nullopt if size can't be
  101. /// determined, e.g. in case of a VLA.
  102. std::optional<TypeSize> getAllocationSizeInBits(const DataLayout &DL) const;
  103. /// Return the type that is being allocated by the instruction.
  104. Type *getAllocatedType() const { return AllocatedType; }
  105. /// for use only in special circumstances that need to generically
  106. /// transform a whole instruction (eg: IR linking and vectorization).
  107. void setAllocatedType(Type *Ty) { AllocatedType = Ty; }
  108. /// Return the alignment of the memory that is being allocated by the
  109. /// instruction.
  110. Align getAlign() const {
  111. return Align(1ULL << getSubclassData<AlignmentField>());
  112. }
  113. void setAlignment(Align Align) {
  114. setSubclassData<AlignmentField>(Log2(Align));
  115. }
  116. /// Return true if this alloca is in the entry block of the function and is a
  117. /// constant size. If so, the code generator will fold it into the
  118. /// prolog/epilog code, so it is basically free.
  119. bool isStaticAlloca() const;
  120. /// Return true if this alloca is used as an inalloca argument to a call. Such
  121. /// allocas are never considered static even if they are in the entry block.
  122. bool isUsedWithInAlloca() const {
  123. return getSubclassData<UsedWithInAllocaField>();
  124. }
  125. /// Specify whether this alloca is used to represent the arguments to a call.
  126. void setUsedWithInAlloca(bool V) {
  127. setSubclassData<UsedWithInAllocaField>(V);
  128. }
  129. /// Return true if this alloca is used as a swifterror argument to a call.
  130. bool isSwiftError() const { return getSubclassData<SwiftErrorField>(); }
  131. /// Specify whether this alloca is used to represent a swifterror.
  132. void setSwiftError(bool V) { setSubclassData<SwiftErrorField>(V); }
  133. // Methods for support type inquiry through isa, cast, and dyn_cast:
  134. static bool classof(const Instruction *I) {
  135. return (I->getOpcode() == Instruction::Alloca);
  136. }
  137. static bool classof(const Value *V) {
  138. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  139. }
  140. private:
  141. // Shadow Instruction::setInstructionSubclassData with a private forwarding
  142. // method so that subclasses cannot accidentally use it.
  143. template <typename Bitfield>
  144. void setSubclassData(typename Bitfield::Type Value) {
  145. Instruction::setSubclassData<Bitfield>(Value);
  146. }
  147. };
  148. //===----------------------------------------------------------------------===//
  149. // LoadInst Class
  150. //===----------------------------------------------------------------------===//
  151. /// An instruction for reading from memory. This uses the SubclassData field in
  152. /// Value to store whether or not the load is volatile.
  153. class LoadInst : public UnaryInstruction {
  154. using VolatileField = BoolBitfieldElementT<0>;
  155. using AlignmentField = AlignmentBitfieldElementT<VolatileField::NextBit>;
  156. using OrderingField = AtomicOrderingBitfieldElementT<AlignmentField::NextBit>;
  157. static_assert(
  158. Bitfield::areContiguous<VolatileField, AlignmentField, OrderingField>(),
  159. "Bitfields must be contiguous");
  160. void AssertOK();
  161. protected:
  162. // Note: Instruction needs to be a friend here to call cloneImpl.
  163. friend class Instruction;
  164. LoadInst *cloneImpl() const;
  165. public:
  166. LoadInst(Type *Ty, Value *Ptr, const Twine &NameStr,
  167. Instruction *InsertBefore);
  168. LoadInst(Type *Ty, Value *Ptr, const Twine &NameStr, BasicBlock *InsertAtEnd);
  169. LoadInst(Type *Ty, Value *Ptr, const Twine &NameStr, bool isVolatile,
  170. Instruction *InsertBefore);
  171. LoadInst(Type *Ty, Value *Ptr, const Twine &NameStr, bool isVolatile,
  172. BasicBlock *InsertAtEnd);
  173. LoadInst(Type *Ty, Value *Ptr, const Twine &NameStr, bool isVolatile,
  174. Align Align, Instruction *InsertBefore = nullptr);
  175. LoadInst(Type *Ty, Value *Ptr, const Twine &NameStr, bool isVolatile,
  176. Align Align, BasicBlock *InsertAtEnd);
  177. LoadInst(Type *Ty, Value *Ptr, const Twine &NameStr, bool isVolatile,
  178. Align Align, AtomicOrdering Order,
  179. SyncScope::ID SSID = SyncScope::System,
  180. Instruction *InsertBefore = nullptr);
  181. LoadInst(Type *Ty, Value *Ptr, const Twine &NameStr, bool isVolatile,
  182. Align Align, AtomicOrdering Order, SyncScope::ID SSID,
  183. BasicBlock *InsertAtEnd);
  184. /// Return true if this is a load from a volatile memory location.
  185. bool isVolatile() const { return getSubclassData<VolatileField>(); }
  186. /// Specify whether this is a volatile load or not.
  187. void setVolatile(bool V) { setSubclassData<VolatileField>(V); }
  188. /// Return the alignment of the access that is being performed.
  189. Align getAlign() const {
  190. return Align(1ULL << (getSubclassData<AlignmentField>()));
  191. }
  192. void setAlignment(Align Align) {
  193. setSubclassData<AlignmentField>(Log2(Align));
  194. }
  195. /// Returns the ordering constraint of this load instruction.
  196. AtomicOrdering getOrdering() const {
  197. return getSubclassData<OrderingField>();
  198. }
  199. /// Sets the ordering constraint of this load instruction. May not be Release
  200. /// or AcquireRelease.
  201. void setOrdering(AtomicOrdering Ordering) {
  202. setSubclassData<OrderingField>(Ordering);
  203. }
  204. /// Returns the synchronization scope ID of this load instruction.
  205. SyncScope::ID getSyncScopeID() const {
  206. return SSID;
  207. }
  208. /// Sets the synchronization scope ID of this load instruction.
  209. void setSyncScopeID(SyncScope::ID SSID) {
  210. this->SSID = SSID;
  211. }
  212. /// Sets the ordering constraint and the synchronization scope ID of this load
  213. /// instruction.
  214. void setAtomic(AtomicOrdering Ordering,
  215. SyncScope::ID SSID = SyncScope::System) {
  216. setOrdering(Ordering);
  217. setSyncScopeID(SSID);
  218. }
  219. bool isSimple() const { return !isAtomic() && !isVolatile(); }
  220. bool isUnordered() const {
  221. return (getOrdering() == AtomicOrdering::NotAtomic ||
  222. getOrdering() == AtomicOrdering::Unordered) &&
  223. !isVolatile();
  224. }
  225. Value *getPointerOperand() { return getOperand(0); }
  226. const Value *getPointerOperand() const { return getOperand(0); }
  227. static unsigned getPointerOperandIndex() { return 0U; }
  228. Type *getPointerOperandType() const { return getPointerOperand()->getType(); }
  229. /// Returns the address space of the pointer operand.
  230. unsigned getPointerAddressSpace() const {
  231. return getPointerOperandType()->getPointerAddressSpace();
  232. }
  233. // Methods for support type inquiry through isa, cast, and dyn_cast:
  234. static bool classof(const Instruction *I) {
  235. return I->getOpcode() == Instruction::Load;
  236. }
  237. static bool classof(const Value *V) {
  238. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  239. }
  240. private:
  241. // Shadow Instruction::setInstructionSubclassData with a private forwarding
  242. // method so that subclasses cannot accidentally use it.
  243. template <typename Bitfield>
  244. void setSubclassData(typename Bitfield::Type Value) {
  245. Instruction::setSubclassData<Bitfield>(Value);
  246. }
  247. /// The synchronization scope ID of this load instruction. Not quite enough
  248. /// room in SubClassData for everything, so synchronization scope ID gets its
  249. /// own field.
  250. SyncScope::ID SSID;
  251. };
  252. //===----------------------------------------------------------------------===//
  253. // StoreInst Class
  254. //===----------------------------------------------------------------------===//
  255. /// An instruction for storing to memory.
  256. class StoreInst : public Instruction {
  257. using VolatileField = BoolBitfieldElementT<0>;
  258. using AlignmentField = AlignmentBitfieldElementT<VolatileField::NextBit>;
  259. using OrderingField = AtomicOrderingBitfieldElementT<AlignmentField::NextBit>;
  260. static_assert(
  261. Bitfield::areContiguous<VolatileField, AlignmentField, OrderingField>(),
  262. "Bitfields must be contiguous");
  263. void AssertOK();
  264. protected:
  265. // Note: Instruction needs to be a friend here to call cloneImpl.
  266. friend class Instruction;
  267. StoreInst *cloneImpl() const;
  268. public:
  269. StoreInst(Value *Val, Value *Ptr, Instruction *InsertBefore);
  270. StoreInst(Value *Val, Value *Ptr, BasicBlock *InsertAtEnd);
  271. StoreInst(Value *Val, Value *Ptr, bool isVolatile, Instruction *InsertBefore);
  272. StoreInst(Value *Val, Value *Ptr, bool isVolatile, BasicBlock *InsertAtEnd);
  273. StoreInst(Value *Val, Value *Ptr, bool isVolatile, Align Align,
  274. Instruction *InsertBefore = nullptr);
  275. StoreInst(Value *Val, Value *Ptr, bool isVolatile, Align Align,
  276. BasicBlock *InsertAtEnd);
  277. StoreInst(Value *Val, Value *Ptr, bool isVolatile, Align Align,
  278. AtomicOrdering Order, SyncScope::ID SSID = SyncScope::System,
  279. Instruction *InsertBefore = nullptr);
  280. StoreInst(Value *Val, Value *Ptr, bool isVolatile, Align Align,
  281. AtomicOrdering Order, SyncScope::ID SSID, BasicBlock *InsertAtEnd);
  282. // allocate space for exactly two operands
  283. void *operator new(size_t S) { return User::operator new(S, 2); }
  284. void operator delete(void *Ptr) { User::operator delete(Ptr); }
  285. /// Return true if this is a store to a volatile memory location.
  286. bool isVolatile() const { return getSubclassData<VolatileField>(); }
  287. /// Specify whether this is a volatile store or not.
  288. void setVolatile(bool V) { setSubclassData<VolatileField>(V); }
  289. /// Transparently provide more efficient getOperand methods.
  290. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  291. Align getAlign() const {
  292. return Align(1ULL << (getSubclassData<AlignmentField>()));
  293. }
  294. void setAlignment(Align Align) {
  295. setSubclassData<AlignmentField>(Log2(Align));
  296. }
  297. /// Returns the ordering constraint of this store instruction.
  298. AtomicOrdering getOrdering() const {
  299. return getSubclassData<OrderingField>();
  300. }
  301. /// Sets the ordering constraint of this store instruction. May not be
  302. /// Acquire or AcquireRelease.
  303. void setOrdering(AtomicOrdering Ordering) {
  304. setSubclassData<OrderingField>(Ordering);
  305. }
  306. /// Returns the synchronization scope ID of this store instruction.
  307. SyncScope::ID getSyncScopeID() const {
  308. return SSID;
  309. }
  310. /// Sets the synchronization scope ID of this store instruction.
  311. void setSyncScopeID(SyncScope::ID SSID) {
  312. this->SSID = SSID;
  313. }
  314. /// Sets the ordering constraint and the synchronization scope ID of this
  315. /// store instruction.
  316. void setAtomic(AtomicOrdering Ordering,
  317. SyncScope::ID SSID = SyncScope::System) {
  318. setOrdering(Ordering);
  319. setSyncScopeID(SSID);
  320. }
  321. bool isSimple() const { return !isAtomic() && !isVolatile(); }
  322. bool isUnordered() const {
  323. return (getOrdering() == AtomicOrdering::NotAtomic ||
  324. getOrdering() == AtomicOrdering::Unordered) &&
  325. !isVolatile();
  326. }
  327. Value *getValueOperand() { return getOperand(0); }
  328. const Value *getValueOperand() const { return getOperand(0); }
  329. Value *getPointerOperand() { return getOperand(1); }
  330. const Value *getPointerOperand() const { return getOperand(1); }
  331. static unsigned getPointerOperandIndex() { return 1U; }
  332. Type *getPointerOperandType() const { return getPointerOperand()->getType(); }
  333. /// Returns the address space of the pointer operand.
  334. unsigned getPointerAddressSpace() const {
  335. return getPointerOperandType()->getPointerAddressSpace();
  336. }
  337. // Methods for support type inquiry through isa, cast, and dyn_cast:
  338. static bool classof(const Instruction *I) {
  339. return I->getOpcode() == Instruction::Store;
  340. }
  341. static bool classof(const Value *V) {
  342. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  343. }
  344. private:
  345. // Shadow Instruction::setInstructionSubclassData with a private forwarding
  346. // method so that subclasses cannot accidentally use it.
  347. template <typename Bitfield>
  348. void setSubclassData(typename Bitfield::Type Value) {
  349. Instruction::setSubclassData<Bitfield>(Value);
  350. }
  351. /// The synchronization scope ID of this store instruction. Not quite enough
  352. /// room in SubClassData for everything, so synchronization scope ID gets its
  353. /// own field.
  354. SyncScope::ID SSID;
  355. };
  356. template <>
  357. struct OperandTraits<StoreInst> : public FixedNumOperandTraits<StoreInst, 2> {
  358. };
  359. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(StoreInst, Value)
  360. //===----------------------------------------------------------------------===//
  361. // FenceInst Class
  362. //===----------------------------------------------------------------------===//
  363. /// An instruction for ordering other memory operations.
  364. class FenceInst : public Instruction {
  365. using OrderingField = AtomicOrderingBitfieldElementT<0>;
  366. void Init(AtomicOrdering Ordering, SyncScope::ID SSID);
  367. protected:
  368. // Note: Instruction needs to be a friend here to call cloneImpl.
  369. friend class Instruction;
  370. FenceInst *cloneImpl() const;
  371. public:
  372. // Ordering may only be Acquire, Release, AcquireRelease, or
  373. // SequentiallyConsistent.
  374. FenceInst(LLVMContext &C, AtomicOrdering Ordering,
  375. SyncScope::ID SSID = SyncScope::System,
  376. Instruction *InsertBefore = nullptr);
  377. FenceInst(LLVMContext &C, AtomicOrdering Ordering, SyncScope::ID SSID,
  378. BasicBlock *InsertAtEnd);
  379. // allocate space for exactly zero operands
  380. void *operator new(size_t S) { return User::operator new(S, 0); }
  381. void operator delete(void *Ptr) { User::operator delete(Ptr); }
  382. /// Returns the ordering constraint of this fence instruction.
  383. AtomicOrdering getOrdering() const {
  384. return getSubclassData<OrderingField>();
  385. }
  386. /// Sets the ordering constraint of this fence instruction. May only be
  387. /// Acquire, Release, AcquireRelease, or SequentiallyConsistent.
  388. void setOrdering(AtomicOrdering Ordering) {
  389. setSubclassData<OrderingField>(Ordering);
  390. }
  391. /// Returns the synchronization scope ID of this fence instruction.
  392. SyncScope::ID getSyncScopeID() const {
  393. return SSID;
  394. }
  395. /// Sets the synchronization scope ID of this fence instruction.
  396. void setSyncScopeID(SyncScope::ID SSID) {
  397. this->SSID = SSID;
  398. }
  399. // Methods for support type inquiry through isa, cast, and dyn_cast:
  400. static bool classof(const Instruction *I) {
  401. return I->getOpcode() == Instruction::Fence;
  402. }
  403. static bool classof(const Value *V) {
  404. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  405. }
  406. private:
  407. // Shadow Instruction::setInstructionSubclassData with a private forwarding
  408. // method so that subclasses cannot accidentally use it.
  409. template <typename Bitfield>
  410. void setSubclassData(typename Bitfield::Type Value) {
  411. Instruction::setSubclassData<Bitfield>(Value);
  412. }
  413. /// The synchronization scope ID of this fence instruction. Not quite enough
  414. /// room in SubClassData for everything, so synchronization scope ID gets its
  415. /// own field.
  416. SyncScope::ID SSID;
  417. };
  418. //===----------------------------------------------------------------------===//
  419. // AtomicCmpXchgInst Class
  420. //===----------------------------------------------------------------------===//
  421. /// An instruction that atomically checks whether a
  422. /// specified value is in a memory location, and, if it is, stores a new value
  423. /// there. The value returned by this instruction is a pair containing the
  424. /// original value as first element, and an i1 indicating success (true) or
  425. /// failure (false) as second element.
  426. ///
  427. class AtomicCmpXchgInst : public Instruction {
  428. void Init(Value *Ptr, Value *Cmp, Value *NewVal, Align Align,
  429. AtomicOrdering SuccessOrdering, AtomicOrdering FailureOrdering,
  430. SyncScope::ID SSID);
  431. template <unsigned Offset>
  432. using AtomicOrderingBitfieldElement =
  433. typename Bitfield::Element<AtomicOrdering, Offset, 3,
  434. AtomicOrdering::LAST>;
  435. protected:
  436. // Note: Instruction needs to be a friend here to call cloneImpl.
  437. friend class Instruction;
  438. AtomicCmpXchgInst *cloneImpl() const;
  439. public:
  440. AtomicCmpXchgInst(Value *Ptr, Value *Cmp, Value *NewVal, Align Alignment,
  441. AtomicOrdering SuccessOrdering,
  442. AtomicOrdering FailureOrdering, SyncScope::ID SSID,
  443. Instruction *InsertBefore = nullptr);
  444. AtomicCmpXchgInst(Value *Ptr, Value *Cmp, Value *NewVal, Align Alignment,
  445. AtomicOrdering SuccessOrdering,
  446. AtomicOrdering FailureOrdering, SyncScope::ID SSID,
  447. BasicBlock *InsertAtEnd);
  448. // allocate space for exactly three operands
  449. void *operator new(size_t S) { return User::operator new(S, 3); }
  450. void operator delete(void *Ptr) { User::operator delete(Ptr); }
  451. using VolatileField = BoolBitfieldElementT<0>;
  452. using WeakField = BoolBitfieldElementT<VolatileField::NextBit>;
  453. using SuccessOrderingField =
  454. AtomicOrderingBitfieldElementT<WeakField::NextBit>;
  455. using FailureOrderingField =
  456. AtomicOrderingBitfieldElementT<SuccessOrderingField::NextBit>;
  457. using AlignmentField =
  458. AlignmentBitfieldElementT<FailureOrderingField::NextBit>;
  459. static_assert(
  460. Bitfield::areContiguous<VolatileField, WeakField, SuccessOrderingField,
  461. FailureOrderingField, AlignmentField>(),
  462. "Bitfields must be contiguous");
  463. /// Return the alignment of the memory that is being allocated by the
  464. /// instruction.
  465. Align getAlign() const {
  466. return Align(1ULL << getSubclassData<AlignmentField>());
  467. }
  468. void setAlignment(Align Align) {
  469. setSubclassData<AlignmentField>(Log2(Align));
  470. }
  471. /// Return true if this is a cmpxchg from a volatile memory
  472. /// location.
  473. ///
  474. bool isVolatile() const { return getSubclassData<VolatileField>(); }
  475. /// Specify whether this is a volatile cmpxchg.
  476. ///
  477. void setVolatile(bool V) { setSubclassData<VolatileField>(V); }
  478. /// Return true if this cmpxchg may spuriously fail.
  479. bool isWeak() const { return getSubclassData<WeakField>(); }
  480. void setWeak(bool IsWeak) { setSubclassData<WeakField>(IsWeak); }
  481. /// Transparently provide more efficient getOperand methods.
  482. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  483. static bool isValidSuccessOrdering(AtomicOrdering Ordering) {
  484. return Ordering != AtomicOrdering::NotAtomic &&
  485. Ordering != AtomicOrdering::Unordered;
  486. }
  487. static bool isValidFailureOrdering(AtomicOrdering Ordering) {
  488. return Ordering != AtomicOrdering::NotAtomic &&
  489. Ordering != AtomicOrdering::Unordered &&
  490. Ordering != AtomicOrdering::AcquireRelease &&
  491. Ordering != AtomicOrdering::Release;
  492. }
  493. /// Returns the success ordering constraint of this cmpxchg instruction.
  494. AtomicOrdering getSuccessOrdering() const {
  495. return getSubclassData<SuccessOrderingField>();
  496. }
  497. /// Sets the success ordering constraint of this cmpxchg instruction.
  498. void setSuccessOrdering(AtomicOrdering Ordering) {
  499. assert(isValidSuccessOrdering(Ordering) &&
  500. "invalid CmpXchg success ordering");
  501. setSubclassData<SuccessOrderingField>(Ordering);
  502. }
  503. /// Returns the failure ordering constraint of this cmpxchg instruction.
  504. AtomicOrdering getFailureOrdering() const {
  505. return getSubclassData<FailureOrderingField>();
  506. }
  507. /// Sets the failure ordering constraint of this cmpxchg instruction.
  508. void setFailureOrdering(AtomicOrdering Ordering) {
  509. assert(isValidFailureOrdering(Ordering) &&
  510. "invalid CmpXchg failure ordering");
  511. setSubclassData<FailureOrderingField>(Ordering);
  512. }
  513. /// Returns a single ordering which is at least as strong as both the
  514. /// success and failure orderings for this cmpxchg.
  515. AtomicOrdering getMergedOrdering() const {
  516. if (getFailureOrdering() == AtomicOrdering::SequentiallyConsistent)
  517. return AtomicOrdering::SequentiallyConsistent;
  518. if (getFailureOrdering() == AtomicOrdering::Acquire) {
  519. if (getSuccessOrdering() == AtomicOrdering::Monotonic)
  520. return AtomicOrdering::Acquire;
  521. if (getSuccessOrdering() == AtomicOrdering::Release)
  522. return AtomicOrdering::AcquireRelease;
  523. }
  524. return getSuccessOrdering();
  525. }
  526. /// Returns the synchronization scope ID of this cmpxchg instruction.
  527. SyncScope::ID getSyncScopeID() const {
  528. return SSID;
  529. }
  530. /// Sets the synchronization scope ID of this cmpxchg instruction.
  531. void setSyncScopeID(SyncScope::ID SSID) {
  532. this->SSID = SSID;
  533. }
  534. Value *getPointerOperand() { return getOperand(0); }
  535. const Value *getPointerOperand() const { return getOperand(0); }
  536. static unsigned getPointerOperandIndex() { return 0U; }
  537. Value *getCompareOperand() { return getOperand(1); }
  538. const Value *getCompareOperand() const { return getOperand(1); }
  539. Value *getNewValOperand() { return getOperand(2); }
  540. const Value *getNewValOperand() const { return getOperand(2); }
  541. /// Returns the address space of the pointer operand.
  542. unsigned getPointerAddressSpace() const {
  543. return getPointerOperand()->getType()->getPointerAddressSpace();
  544. }
  545. /// Returns the strongest permitted ordering on failure, given the
  546. /// desired ordering on success.
  547. ///
  548. /// If the comparison in a cmpxchg operation fails, there is no atomic store
  549. /// so release semantics cannot be provided. So this function drops explicit
  550. /// Release requests from the AtomicOrdering. A SequentiallyConsistent
  551. /// operation would remain SequentiallyConsistent.
  552. static AtomicOrdering
  553. getStrongestFailureOrdering(AtomicOrdering SuccessOrdering) {
  554. switch (SuccessOrdering) {
  555. default:
  556. llvm_unreachable("invalid cmpxchg success ordering");
  557. case AtomicOrdering::Release:
  558. case AtomicOrdering::Monotonic:
  559. return AtomicOrdering::Monotonic;
  560. case AtomicOrdering::AcquireRelease:
  561. case AtomicOrdering::Acquire:
  562. return AtomicOrdering::Acquire;
  563. case AtomicOrdering::SequentiallyConsistent:
  564. return AtomicOrdering::SequentiallyConsistent;
  565. }
  566. }
  567. // Methods for support type inquiry through isa, cast, and dyn_cast:
  568. static bool classof(const Instruction *I) {
  569. return I->getOpcode() == Instruction::AtomicCmpXchg;
  570. }
  571. static bool classof(const Value *V) {
  572. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  573. }
  574. private:
  575. // Shadow Instruction::setInstructionSubclassData with a private forwarding
  576. // method so that subclasses cannot accidentally use it.
  577. template <typename Bitfield>
  578. void setSubclassData(typename Bitfield::Type Value) {
  579. Instruction::setSubclassData<Bitfield>(Value);
  580. }
  581. /// The synchronization scope ID of this cmpxchg instruction. Not quite
  582. /// enough room in SubClassData for everything, so synchronization scope ID
  583. /// gets its own field.
  584. SyncScope::ID SSID;
  585. };
  586. template <>
  587. struct OperandTraits<AtomicCmpXchgInst> :
  588. public FixedNumOperandTraits<AtomicCmpXchgInst, 3> {
  589. };
  590. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(AtomicCmpXchgInst, Value)
  591. //===----------------------------------------------------------------------===//
  592. // AtomicRMWInst Class
  593. //===----------------------------------------------------------------------===//
  594. /// an instruction that atomically reads a memory location,
  595. /// combines it with another value, and then stores the result back. Returns
  596. /// the old value.
  597. ///
  598. class AtomicRMWInst : public Instruction {
  599. protected:
  600. // Note: Instruction needs to be a friend here to call cloneImpl.
  601. friend class Instruction;
  602. AtomicRMWInst *cloneImpl() const;
  603. public:
  604. /// This enumeration lists the possible modifications atomicrmw can make. In
  605. /// the descriptions, 'p' is the pointer to the instruction's memory location,
  606. /// 'old' is the initial value of *p, and 'v' is the other value passed to the
  607. /// instruction. These instructions always return 'old'.
  608. enum BinOp : unsigned {
  609. /// *p = v
  610. Xchg,
  611. /// *p = old + v
  612. Add,
  613. /// *p = old - v
  614. Sub,
  615. /// *p = old & v
  616. And,
  617. /// *p = ~(old & v)
  618. Nand,
  619. /// *p = old | v
  620. Or,
  621. /// *p = old ^ v
  622. Xor,
  623. /// *p = old >signed v ? old : v
  624. Max,
  625. /// *p = old <signed v ? old : v
  626. Min,
  627. /// *p = old >unsigned v ? old : v
  628. UMax,
  629. /// *p = old <unsigned v ? old : v
  630. UMin,
  631. /// *p = old + v
  632. FAdd,
  633. /// *p = old - v
  634. FSub,
  635. /// *p = maxnum(old, v)
  636. /// \p maxnum matches the behavior of \p llvm.maxnum.*.
  637. FMax,
  638. /// *p = minnum(old, v)
  639. /// \p minnum matches the behavior of \p llvm.minnum.*.
  640. FMin,
  641. /// Increment one up to a maximum value.
  642. /// *p = (old u>= v) ? 0 : (old + 1)
  643. UIncWrap,
  644. /// Decrement one until a minimum value or zero.
  645. /// *p = ((old == 0) || (old u> v)) ? v : (old - 1)
  646. UDecWrap,
  647. FIRST_BINOP = Xchg,
  648. LAST_BINOP = UDecWrap,
  649. BAD_BINOP
  650. };
  651. private:
  652. template <unsigned Offset>
  653. using AtomicOrderingBitfieldElement =
  654. typename Bitfield::Element<AtomicOrdering, Offset, 3,
  655. AtomicOrdering::LAST>;
  656. template <unsigned Offset>
  657. using BinOpBitfieldElement =
  658. typename Bitfield::Element<BinOp, Offset, 5, BinOp::LAST_BINOP>;
  659. public:
  660. AtomicRMWInst(BinOp Operation, Value *Ptr, Value *Val, Align Alignment,
  661. AtomicOrdering Ordering, SyncScope::ID SSID,
  662. Instruction *InsertBefore = nullptr);
  663. AtomicRMWInst(BinOp Operation, Value *Ptr, Value *Val, Align Alignment,
  664. AtomicOrdering Ordering, SyncScope::ID SSID,
  665. BasicBlock *InsertAtEnd);
  666. // allocate space for exactly two operands
  667. void *operator new(size_t S) { return User::operator new(S, 2); }
  668. void operator delete(void *Ptr) { User::operator delete(Ptr); }
  669. using VolatileField = BoolBitfieldElementT<0>;
  670. using AtomicOrderingField =
  671. AtomicOrderingBitfieldElementT<VolatileField::NextBit>;
  672. using OperationField = BinOpBitfieldElement<AtomicOrderingField::NextBit>;
  673. using AlignmentField = AlignmentBitfieldElementT<OperationField::NextBit>;
  674. static_assert(Bitfield::areContiguous<VolatileField, AtomicOrderingField,
  675. OperationField, AlignmentField>(),
  676. "Bitfields must be contiguous");
  677. BinOp getOperation() const { return getSubclassData<OperationField>(); }
  678. static StringRef getOperationName(BinOp Op);
  679. static bool isFPOperation(BinOp Op) {
  680. switch (Op) {
  681. case AtomicRMWInst::FAdd:
  682. case AtomicRMWInst::FSub:
  683. case AtomicRMWInst::FMax:
  684. case AtomicRMWInst::FMin:
  685. return true;
  686. default:
  687. return false;
  688. }
  689. }
  690. void setOperation(BinOp Operation) {
  691. setSubclassData<OperationField>(Operation);
  692. }
  693. /// Return the alignment of the memory that is being allocated by the
  694. /// instruction.
  695. Align getAlign() const {
  696. return Align(1ULL << getSubclassData<AlignmentField>());
  697. }
  698. void setAlignment(Align Align) {
  699. setSubclassData<AlignmentField>(Log2(Align));
  700. }
  701. /// Return true if this is a RMW on a volatile memory location.
  702. ///
  703. bool isVolatile() const { return getSubclassData<VolatileField>(); }
  704. /// Specify whether this is a volatile RMW or not.
  705. ///
  706. void setVolatile(bool V) { setSubclassData<VolatileField>(V); }
  707. /// Transparently provide more efficient getOperand methods.
  708. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  709. /// Returns the ordering constraint of this rmw instruction.
  710. AtomicOrdering getOrdering() const {
  711. return getSubclassData<AtomicOrderingField>();
  712. }
  713. /// Sets the ordering constraint of this rmw instruction.
  714. void setOrdering(AtomicOrdering Ordering) {
  715. assert(Ordering != AtomicOrdering::NotAtomic &&
  716. "atomicrmw instructions can only be atomic.");
  717. assert(Ordering != AtomicOrdering::Unordered &&
  718. "atomicrmw instructions cannot be unordered.");
  719. setSubclassData<AtomicOrderingField>(Ordering);
  720. }
  721. /// Returns the synchronization scope ID of this rmw instruction.
  722. SyncScope::ID getSyncScopeID() const {
  723. return SSID;
  724. }
  725. /// Sets the synchronization scope ID of this rmw instruction.
  726. void setSyncScopeID(SyncScope::ID SSID) {
  727. this->SSID = SSID;
  728. }
  729. Value *getPointerOperand() { return getOperand(0); }
  730. const Value *getPointerOperand() const { return getOperand(0); }
  731. static unsigned getPointerOperandIndex() { return 0U; }
  732. Value *getValOperand() { return getOperand(1); }
  733. const Value *getValOperand() const { return getOperand(1); }
  734. /// Returns the address space of the pointer operand.
  735. unsigned getPointerAddressSpace() const {
  736. return getPointerOperand()->getType()->getPointerAddressSpace();
  737. }
  738. bool isFloatingPointOperation() const {
  739. return isFPOperation(getOperation());
  740. }
  741. // Methods for support type inquiry through isa, cast, and dyn_cast:
  742. static bool classof(const Instruction *I) {
  743. return I->getOpcode() == Instruction::AtomicRMW;
  744. }
  745. static bool classof(const Value *V) {
  746. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  747. }
  748. private:
  749. void Init(BinOp Operation, Value *Ptr, Value *Val, Align Align,
  750. AtomicOrdering Ordering, SyncScope::ID SSID);
  751. // Shadow Instruction::setInstructionSubclassData with a private forwarding
  752. // method so that subclasses cannot accidentally use it.
  753. template <typename Bitfield>
  754. void setSubclassData(typename Bitfield::Type Value) {
  755. Instruction::setSubclassData<Bitfield>(Value);
  756. }
  757. /// The synchronization scope ID of this rmw instruction. Not quite enough
  758. /// room in SubClassData for everything, so synchronization scope ID gets its
  759. /// own field.
  760. SyncScope::ID SSID;
  761. };
  762. template <>
  763. struct OperandTraits<AtomicRMWInst>
  764. : public FixedNumOperandTraits<AtomicRMWInst,2> {
  765. };
  766. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(AtomicRMWInst, Value)
  767. //===----------------------------------------------------------------------===//
  768. // GetElementPtrInst Class
  769. //===----------------------------------------------------------------------===//
  770. // checkGEPType - Simple wrapper function to give a better assertion failure
  771. // message on bad indexes for a gep instruction.
  772. //
  773. inline Type *checkGEPType(Type *Ty) {
  774. assert(Ty && "Invalid GetElementPtrInst indices for type!");
  775. return Ty;
  776. }
  777. /// an instruction for type-safe pointer arithmetic to
  778. /// access elements of arrays and structs
  779. ///
  780. class GetElementPtrInst : public Instruction {
  781. Type *SourceElementType;
  782. Type *ResultElementType;
  783. GetElementPtrInst(const GetElementPtrInst &GEPI);
  784. /// Constructors - Create a getelementptr instruction with a base pointer an
  785. /// list of indices. The first ctor can optionally insert before an existing
  786. /// instruction, the second appends the new instruction to the specified
  787. /// BasicBlock.
  788. inline GetElementPtrInst(Type *PointeeType, Value *Ptr,
  789. ArrayRef<Value *> IdxList, unsigned Values,
  790. const Twine &NameStr, Instruction *InsertBefore);
  791. inline GetElementPtrInst(Type *PointeeType, Value *Ptr,
  792. ArrayRef<Value *> IdxList, unsigned Values,
  793. const Twine &NameStr, BasicBlock *InsertAtEnd);
  794. void init(Value *Ptr, ArrayRef<Value *> IdxList, const Twine &NameStr);
  795. protected:
  796. // Note: Instruction needs to be a friend here to call cloneImpl.
  797. friend class Instruction;
  798. GetElementPtrInst *cloneImpl() const;
  799. public:
  800. static GetElementPtrInst *Create(Type *PointeeType, Value *Ptr,
  801. ArrayRef<Value *> IdxList,
  802. const Twine &NameStr = "",
  803. Instruction *InsertBefore = nullptr) {
  804. unsigned Values = 1 + unsigned(IdxList.size());
  805. assert(PointeeType && "Must specify element type");
  806. assert(cast<PointerType>(Ptr->getType()->getScalarType())
  807. ->isOpaqueOrPointeeTypeMatches(PointeeType));
  808. return new (Values) GetElementPtrInst(PointeeType, Ptr, IdxList, Values,
  809. NameStr, InsertBefore);
  810. }
  811. static GetElementPtrInst *Create(Type *PointeeType, Value *Ptr,
  812. ArrayRef<Value *> IdxList,
  813. const Twine &NameStr,
  814. BasicBlock *InsertAtEnd) {
  815. unsigned Values = 1 + unsigned(IdxList.size());
  816. assert(PointeeType && "Must specify element type");
  817. assert(cast<PointerType>(Ptr->getType()->getScalarType())
  818. ->isOpaqueOrPointeeTypeMatches(PointeeType));
  819. return new (Values) GetElementPtrInst(PointeeType, Ptr, IdxList, Values,
  820. NameStr, InsertAtEnd);
  821. }
  822. /// Create an "inbounds" getelementptr. See the documentation for the
  823. /// "inbounds" flag in LangRef.html for details.
  824. static GetElementPtrInst *
  825. CreateInBounds(Type *PointeeType, Value *Ptr, ArrayRef<Value *> IdxList,
  826. const Twine &NameStr = "",
  827. Instruction *InsertBefore = nullptr) {
  828. GetElementPtrInst *GEP =
  829. Create(PointeeType, Ptr, IdxList, NameStr, InsertBefore);
  830. GEP->setIsInBounds(true);
  831. return GEP;
  832. }
  833. static GetElementPtrInst *CreateInBounds(Type *PointeeType, Value *Ptr,
  834. ArrayRef<Value *> IdxList,
  835. const Twine &NameStr,
  836. BasicBlock *InsertAtEnd) {
  837. GetElementPtrInst *GEP =
  838. Create(PointeeType, Ptr, IdxList, NameStr, InsertAtEnd);
  839. GEP->setIsInBounds(true);
  840. return GEP;
  841. }
  842. /// Transparently provide more efficient getOperand methods.
  843. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  844. Type *getSourceElementType() const { return SourceElementType; }
  845. void setSourceElementType(Type *Ty) { SourceElementType = Ty; }
  846. void setResultElementType(Type *Ty) { ResultElementType = Ty; }
  847. Type *getResultElementType() const {
  848. assert(cast<PointerType>(getType()->getScalarType())
  849. ->isOpaqueOrPointeeTypeMatches(ResultElementType));
  850. return ResultElementType;
  851. }
  852. /// Returns the address space of this instruction's pointer type.
  853. unsigned getAddressSpace() const {
  854. // Note that this is always the same as the pointer operand's address space
  855. // and that is cheaper to compute, so cheat here.
  856. return getPointerAddressSpace();
  857. }
  858. /// Returns the result type of a getelementptr with the given source
  859. /// element type and indexes.
  860. ///
  861. /// Null is returned if the indices are invalid for the specified
  862. /// source element type.
  863. static Type *getIndexedType(Type *Ty, ArrayRef<Value *> IdxList);
  864. static Type *getIndexedType(Type *Ty, ArrayRef<Constant *> IdxList);
  865. static Type *getIndexedType(Type *Ty, ArrayRef<uint64_t> IdxList);
  866. /// Return the type of the element at the given index of an indexable
  867. /// type. This is equivalent to "getIndexedType(Agg, {Zero, Idx})".
  868. ///
  869. /// Returns null if the type can't be indexed, or the given index is not
  870. /// legal for the given type.
  871. static Type *getTypeAtIndex(Type *Ty, Value *Idx);
  872. static Type *getTypeAtIndex(Type *Ty, uint64_t Idx);
  873. inline op_iterator idx_begin() { return op_begin()+1; }
  874. inline const_op_iterator idx_begin() const { return op_begin()+1; }
  875. inline op_iterator idx_end() { return op_end(); }
  876. inline const_op_iterator idx_end() const { return op_end(); }
  877. inline iterator_range<op_iterator> indices() {
  878. return make_range(idx_begin(), idx_end());
  879. }
  880. inline iterator_range<const_op_iterator> indices() const {
  881. return make_range(idx_begin(), idx_end());
  882. }
  883. Value *getPointerOperand() {
  884. return getOperand(0);
  885. }
  886. const Value *getPointerOperand() const {
  887. return getOperand(0);
  888. }
  889. static unsigned getPointerOperandIndex() {
  890. return 0U; // get index for modifying correct operand.
  891. }
  892. /// Method to return the pointer operand as a
  893. /// PointerType.
  894. Type *getPointerOperandType() const {
  895. return getPointerOperand()->getType();
  896. }
  897. /// Returns the address space of the pointer operand.
  898. unsigned getPointerAddressSpace() const {
  899. return getPointerOperandType()->getPointerAddressSpace();
  900. }
  901. /// Returns the pointer type returned by the GEP
  902. /// instruction, which may be a vector of pointers.
  903. static Type *getGEPReturnType(Type *ElTy, Value *Ptr,
  904. ArrayRef<Value *> IdxList) {
  905. PointerType *OrigPtrTy = cast<PointerType>(Ptr->getType()->getScalarType());
  906. unsigned AddrSpace = OrigPtrTy->getAddressSpace();
  907. Type *ResultElemTy = checkGEPType(getIndexedType(ElTy, IdxList));
  908. Type *PtrTy = OrigPtrTy->isOpaque()
  909. ? PointerType::get(OrigPtrTy->getContext(), AddrSpace)
  910. : PointerType::get(ResultElemTy, AddrSpace);
  911. // Vector GEP
  912. if (auto *PtrVTy = dyn_cast<VectorType>(Ptr->getType())) {
  913. ElementCount EltCount = PtrVTy->getElementCount();
  914. return VectorType::get(PtrTy, EltCount);
  915. }
  916. for (Value *Index : IdxList)
  917. if (auto *IndexVTy = dyn_cast<VectorType>(Index->getType())) {
  918. ElementCount EltCount = IndexVTy->getElementCount();
  919. return VectorType::get(PtrTy, EltCount);
  920. }
  921. // Scalar GEP
  922. return PtrTy;
  923. }
  924. unsigned getNumIndices() const { // Note: always non-negative
  925. return getNumOperands() - 1;
  926. }
  927. bool hasIndices() const {
  928. return getNumOperands() > 1;
  929. }
  930. /// Return true if all of the indices of this GEP are
  931. /// zeros. If so, the result pointer and the first operand have the same
  932. /// value, just potentially different types.
  933. bool hasAllZeroIndices() const;
  934. /// Return true if all of the indices of this GEP are
  935. /// constant integers. If so, the result pointer and the first operand have
  936. /// a constant offset between them.
  937. bool hasAllConstantIndices() const;
  938. /// Set or clear the inbounds flag on this GEP instruction.
  939. /// See LangRef.html for the meaning of inbounds on a getelementptr.
  940. void setIsInBounds(bool b = true);
  941. /// Determine whether the GEP has the inbounds flag.
  942. bool isInBounds() const;
  943. /// Accumulate the constant address offset of this GEP if possible.
  944. ///
  945. /// This routine accepts an APInt into which it will accumulate the constant
  946. /// offset of this GEP if the GEP is in fact constant. If the GEP is not
  947. /// all-constant, it returns false and the value of the offset APInt is
  948. /// undefined (it is *not* preserved!). The APInt passed into this routine
  949. /// must be at least as wide as the IntPtr type for the address space of
  950. /// the base GEP pointer.
  951. bool accumulateConstantOffset(const DataLayout &DL, APInt &Offset) const;
  952. bool collectOffset(const DataLayout &DL, unsigned BitWidth,
  953. MapVector<Value *, APInt> &VariableOffsets,
  954. APInt &ConstantOffset) const;
  955. // Methods for support type inquiry through isa, cast, and dyn_cast:
  956. static bool classof(const Instruction *I) {
  957. return (I->getOpcode() == Instruction::GetElementPtr);
  958. }
  959. static bool classof(const Value *V) {
  960. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  961. }
  962. };
  963. template <>
  964. struct OperandTraits<GetElementPtrInst> :
  965. public VariadicOperandTraits<GetElementPtrInst, 1> {
  966. };
  967. GetElementPtrInst::GetElementPtrInst(Type *PointeeType, Value *Ptr,
  968. ArrayRef<Value *> IdxList, unsigned Values,
  969. const Twine &NameStr,
  970. Instruction *InsertBefore)
  971. : Instruction(getGEPReturnType(PointeeType, Ptr, IdxList), GetElementPtr,
  972. OperandTraits<GetElementPtrInst>::op_end(this) - Values,
  973. Values, InsertBefore),
  974. SourceElementType(PointeeType),
  975. ResultElementType(getIndexedType(PointeeType, IdxList)) {
  976. assert(cast<PointerType>(getType()->getScalarType())
  977. ->isOpaqueOrPointeeTypeMatches(ResultElementType));
  978. init(Ptr, IdxList, NameStr);
  979. }
  980. GetElementPtrInst::GetElementPtrInst(Type *PointeeType, Value *Ptr,
  981. ArrayRef<Value *> IdxList, unsigned Values,
  982. const Twine &NameStr,
  983. BasicBlock *InsertAtEnd)
  984. : Instruction(getGEPReturnType(PointeeType, Ptr, IdxList), GetElementPtr,
  985. OperandTraits<GetElementPtrInst>::op_end(this) - Values,
  986. Values, InsertAtEnd),
  987. SourceElementType(PointeeType),
  988. ResultElementType(getIndexedType(PointeeType, IdxList)) {
  989. assert(cast<PointerType>(getType()->getScalarType())
  990. ->isOpaqueOrPointeeTypeMatches(ResultElementType));
  991. init(Ptr, IdxList, NameStr);
  992. }
  993. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(GetElementPtrInst, Value)
  994. //===----------------------------------------------------------------------===//
  995. // ICmpInst Class
  996. //===----------------------------------------------------------------------===//
  997. /// This instruction compares its operands according to the predicate given
  998. /// to the constructor. It only operates on integers or pointers. The operands
  999. /// must be identical types.
  1000. /// Represent an integer comparison operator.
  1001. class ICmpInst: public CmpInst {
  1002. void AssertOK() {
  1003. assert(isIntPredicate() &&
  1004. "Invalid ICmp predicate value");
  1005. assert(getOperand(0)->getType() == getOperand(1)->getType() &&
  1006. "Both operands to ICmp instruction are not of the same type!");
  1007. // Check that the operands are the right type
  1008. assert((getOperand(0)->getType()->isIntOrIntVectorTy() ||
  1009. getOperand(0)->getType()->isPtrOrPtrVectorTy()) &&
  1010. "Invalid operand types for ICmp instruction");
  1011. }
  1012. protected:
  1013. // Note: Instruction needs to be a friend here to call cloneImpl.
  1014. friend class Instruction;
  1015. /// Clone an identical ICmpInst
  1016. ICmpInst *cloneImpl() const;
  1017. public:
  1018. /// Constructor with insert-before-instruction semantics.
  1019. ICmpInst(
  1020. Instruction *InsertBefore, ///< Where to insert
  1021. Predicate pred, ///< The predicate to use for the comparison
  1022. Value *LHS, ///< The left-hand-side of the expression
  1023. Value *RHS, ///< The right-hand-side of the expression
  1024. const Twine &NameStr = "" ///< Name of the instruction
  1025. ) : CmpInst(makeCmpResultType(LHS->getType()),
  1026. Instruction::ICmp, pred, LHS, RHS, NameStr,
  1027. InsertBefore) {
  1028. #ifndef NDEBUG
  1029. AssertOK();
  1030. #endif
  1031. }
  1032. /// Constructor with insert-at-end semantics.
  1033. ICmpInst(
  1034. BasicBlock &InsertAtEnd, ///< Block to insert into.
  1035. Predicate pred, ///< The predicate to use for the comparison
  1036. Value *LHS, ///< The left-hand-side of the expression
  1037. Value *RHS, ///< The right-hand-side of the expression
  1038. const Twine &NameStr = "" ///< Name of the instruction
  1039. ) : CmpInst(makeCmpResultType(LHS->getType()),
  1040. Instruction::ICmp, pred, LHS, RHS, NameStr,
  1041. &InsertAtEnd) {
  1042. #ifndef NDEBUG
  1043. AssertOK();
  1044. #endif
  1045. }
  1046. /// Constructor with no-insertion semantics
  1047. ICmpInst(
  1048. Predicate pred, ///< The predicate to use for the comparison
  1049. Value *LHS, ///< The left-hand-side of the expression
  1050. Value *RHS, ///< The right-hand-side of the expression
  1051. const Twine &NameStr = "" ///< Name of the instruction
  1052. ) : CmpInst(makeCmpResultType(LHS->getType()),
  1053. Instruction::ICmp, pred, LHS, RHS, NameStr) {
  1054. #ifndef NDEBUG
  1055. AssertOK();
  1056. #endif
  1057. }
  1058. /// For example, EQ->EQ, SLE->SLE, UGT->SGT, etc.
  1059. /// @returns the predicate that would be the result if the operand were
  1060. /// regarded as signed.
  1061. /// Return the signed version of the predicate
  1062. Predicate getSignedPredicate() const {
  1063. return getSignedPredicate(getPredicate());
  1064. }
  1065. /// This is a static version that you can use without an instruction.
  1066. /// Return the signed version of the predicate.
  1067. static Predicate getSignedPredicate(Predicate pred);
  1068. /// For example, EQ->EQ, SLE->ULE, UGT->UGT, etc.
  1069. /// @returns the predicate that would be the result if the operand were
  1070. /// regarded as unsigned.
  1071. /// Return the unsigned version of the predicate
  1072. Predicate getUnsignedPredicate() const {
  1073. return getUnsignedPredicate(getPredicate());
  1074. }
  1075. /// This is a static version that you can use without an instruction.
  1076. /// Return the unsigned version of the predicate.
  1077. static Predicate getUnsignedPredicate(Predicate pred);
  1078. /// Return true if this predicate is either EQ or NE. This also
  1079. /// tests for commutativity.
  1080. static bool isEquality(Predicate P) {
  1081. return P == ICMP_EQ || P == ICMP_NE;
  1082. }
  1083. /// Return true if this predicate is either EQ or NE. This also
  1084. /// tests for commutativity.
  1085. bool isEquality() const {
  1086. return isEquality(getPredicate());
  1087. }
  1088. /// @returns true if the predicate of this ICmpInst is commutative
  1089. /// Determine if this relation is commutative.
  1090. bool isCommutative() const { return isEquality(); }
  1091. /// Return true if the predicate is relational (not EQ or NE).
  1092. ///
  1093. bool isRelational() const {
  1094. return !isEquality();
  1095. }
  1096. /// Return true if the predicate is relational (not EQ or NE).
  1097. ///
  1098. static bool isRelational(Predicate P) {
  1099. return !isEquality(P);
  1100. }
  1101. /// Return true if the predicate is SGT or UGT.
  1102. ///
  1103. static bool isGT(Predicate P) {
  1104. return P == ICMP_SGT || P == ICMP_UGT;
  1105. }
  1106. /// Return true if the predicate is SLT or ULT.
  1107. ///
  1108. static bool isLT(Predicate P) {
  1109. return P == ICMP_SLT || P == ICMP_ULT;
  1110. }
  1111. /// Return true if the predicate is SGE or UGE.
  1112. ///
  1113. static bool isGE(Predicate P) {
  1114. return P == ICMP_SGE || P == ICMP_UGE;
  1115. }
  1116. /// Return true if the predicate is SLE or ULE.
  1117. ///
  1118. static bool isLE(Predicate P) {
  1119. return P == ICMP_SLE || P == ICMP_ULE;
  1120. }
  1121. /// Returns the sequence of all ICmp predicates.
  1122. ///
  1123. static auto predicates() { return ICmpPredicates(); }
  1124. /// Exchange the two operands to this instruction in such a way that it does
  1125. /// not modify the semantics of the instruction. The predicate value may be
  1126. /// changed to retain the same result if the predicate is order dependent
  1127. /// (e.g. ult).
  1128. /// Swap operands and adjust predicate.
  1129. void swapOperands() {
  1130. setPredicate(getSwappedPredicate());
  1131. Op<0>().swap(Op<1>());
  1132. }
  1133. /// Return result of `LHS Pred RHS` comparison.
  1134. static bool compare(const APInt &LHS, const APInt &RHS,
  1135. ICmpInst::Predicate Pred);
  1136. // Methods for support type inquiry through isa, cast, and dyn_cast:
  1137. static bool classof(const Instruction *I) {
  1138. return I->getOpcode() == Instruction::ICmp;
  1139. }
  1140. static bool classof(const Value *V) {
  1141. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  1142. }
  1143. };
  1144. //===----------------------------------------------------------------------===//
  1145. // FCmpInst Class
  1146. //===----------------------------------------------------------------------===//
  1147. /// This instruction compares its operands according to the predicate given
  1148. /// to the constructor. It only operates on floating point values or packed
  1149. /// vectors of floating point values. The operands must be identical types.
  1150. /// Represents a floating point comparison operator.
  1151. class FCmpInst: public CmpInst {
  1152. void AssertOK() {
  1153. assert(isFPPredicate() && "Invalid FCmp predicate value");
  1154. assert(getOperand(0)->getType() == getOperand(1)->getType() &&
  1155. "Both operands to FCmp instruction are not of the same type!");
  1156. // Check that the operands are the right type
  1157. assert(getOperand(0)->getType()->isFPOrFPVectorTy() &&
  1158. "Invalid operand types for FCmp instruction");
  1159. }
  1160. protected:
  1161. // Note: Instruction needs to be a friend here to call cloneImpl.
  1162. friend class Instruction;
  1163. /// Clone an identical FCmpInst
  1164. FCmpInst *cloneImpl() const;
  1165. public:
  1166. /// Constructor with insert-before-instruction semantics.
  1167. FCmpInst(
  1168. Instruction *InsertBefore, ///< Where to insert
  1169. Predicate pred, ///< The predicate to use for the comparison
  1170. Value *LHS, ///< The left-hand-side of the expression
  1171. Value *RHS, ///< The right-hand-side of the expression
  1172. const Twine &NameStr = "" ///< Name of the instruction
  1173. ) : CmpInst(makeCmpResultType(LHS->getType()),
  1174. Instruction::FCmp, pred, LHS, RHS, NameStr,
  1175. InsertBefore) {
  1176. AssertOK();
  1177. }
  1178. /// Constructor with insert-at-end semantics.
  1179. FCmpInst(
  1180. BasicBlock &InsertAtEnd, ///< Block to insert into.
  1181. Predicate pred, ///< The predicate to use for the comparison
  1182. Value *LHS, ///< The left-hand-side of the expression
  1183. Value *RHS, ///< The right-hand-side of the expression
  1184. const Twine &NameStr = "" ///< Name of the instruction
  1185. ) : CmpInst(makeCmpResultType(LHS->getType()),
  1186. Instruction::FCmp, pred, LHS, RHS, NameStr,
  1187. &InsertAtEnd) {
  1188. AssertOK();
  1189. }
  1190. /// Constructor with no-insertion semantics
  1191. FCmpInst(
  1192. Predicate Pred, ///< The predicate to use for the comparison
  1193. Value *LHS, ///< The left-hand-side of the expression
  1194. Value *RHS, ///< The right-hand-side of the expression
  1195. const Twine &NameStr = "", ///< Name of the instruction
  1196. Instruction *FlagsSource = nullptr
  1197. ) : CmpInst(makeCmpResultType(LHS->getType()), Instruction::FCmp, Pred, LHS,
  1198. RHS, NameStr, nullptr, FlagsSource) {
  1199. AssertOK();
  1200. }
  1201. /// @returns true if the predicate of this instruction is EQ or NE.
  1202. /// Determine if this is an equality predicate.
  1203. static bool isEquality(Predicate Pred) {
  1204. return Pred == FCMP_OEQ || Pred == FCMP_ONE || Pred == FCMP_UEQ ||
  1205. Pred == FCMP_UNE;
  1206. }
  1207. /// @returns true if the predicate of this instruction is EQ or NE.
  1208. /// Determine if this is an equality predicate.
  1209. bool isEquality() const { return isEquality(getPredicate()); }
  1210. /// @returns true if the predicate of this instruction is commutative.
  1211. /// Determine if this is a commutative predicate.
  1212. bool isCommutative() const {
  1213. return isEquality() ||
  1214. getPredicate() == FCMP_FALSE ||
  1215. getPredicate() == FCMP_TRUE ||
  1216. getPredicate() == FCMP_ORD ||
  1217. getPredicate() == FCMP_UNO;
  1218. }
  1219. /// @returns true if the predicate is relational (not EQ or NE).
  1220. /// Determine if this a relational predicate.
  1221. bool isRelational() const { return !isEquality(); }
  1222. /// Exchange the two operands to this instruction in such a way that it does
  1223. /// not modify the semantics of the instruction. The predicate value may be
  1224. /// changed to retain the same result if the predicate is order dependent
  1225. /// (e.g. ult).
  1226. /// Swap operands and adjust predicate.
  1227. void swapOperands() {
  1228. setPredicate(getSwappedPredicate());
  1229. Op<0>().swap(Op<1>());
  1230. }
  1231. /// Returns the sequence of all FCmp predicates.
  1232. ///
  1233. static auto predicates() { return FCmpPredicates(); }
  1234. /// Return result of `LHS Pred RHS` comparison.
  1235. static bool compare(const APFloat &LHS, const APFloat &RHS,
  1236. FCmpInst::Predicate Pred);
  1237. /// Methods for support type inquiry through isa, cast, and dyn_cast:
  1238. static bool classof(const Instruction *I) {
  1239. return I->getOpcode() == Instruction::FCmp;
  1240. }
  1241. static bool classof(const Value *V) {
  1242. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  1243. }
  1244. };
  1245. //===----------------------------------------------------------------------===//
  1246. /// This class represents a function call, abstracting a target
  1247. /// machine's calling convention. This class uses low bit of the SubClassData
  1248. /// field to indicate whether or not this is a tail call. The rest of the bits
  1249. /// hold the calling convention of the call.
  1250. ///
  1251. class CallInst : public CallBase {
  1252. CallInst(const CallInst &CI);
  1253. /// Construct a CallInst given a range of arguments.
  1254. /// Construct a CallInst from a range of arguments
  1255. inline CallInst(FunctionType *Ty, Value *Func, ArrayRef<Value *> Args,
  1256. ArrayRef<OperandBundleDef> Bundles, const Twine &NameStr,
  1257. Instruction *InsertBefore);
  1258. inline CallInst(FunctionType *Ty, Value *Func, ArrayRef<Value *> Args,
  1259. const Twine &NameStr, Instruction *InsertBefore)
  1260. : CallInst(Ty, Func, Args, std::nullopt, NameStr, InsertBefore) {}
  1261. /// Construct a CallInst given a range of arguments.
  1262. /// Construct a CallInst from a range of arguments
  1263. inline CallInst(FunctionType *Ty, Value *Func, ArrayRef<Value *> Args,
  1264. ArrayRef<OperandBundleDef> Bundles, const Twine &NameStr,
  1265. BasicBlock *InsertAtEnd);
  1266. explicit CallInst(FunctionType *Ty, Value *F, const Twine &NameStr,
  1267. Instruction *InsertBefore);
  1268. CallInst(FunctionType *ty, Value *F, const Twine &NameStr,
  1269. BasicBlock *InsertAtEnd);
  1270. void init(FunctionType *FTy, Value *Func, ArrayRef<Value *> Args,
  1271. ArrayRef<OperandBundleDef> Bundles, const Twine &NameStr);
  1272. void init(FunctionType *FTy, Value *Func, const Twine &NameStr);
  1273. /// Compute the number of operands to allocate.
  1274. static int ComputeNumOperands(int NumArgs, int NumBundleInputs = 0) {
  1275. // We need one operand for the called function, plus the input operand
  1276. // counts provided.
  1277. return 1 + NumArgs + NumBundleInputs;
  1278. }
  1279. protected:
  1280. // Note: Instruction needs to be a friend here to call cloneImpl.
  1281. friend class Instruction;
  1282. CallInst *cloneImpl() const;
  1283. public:
  1284. static CallInst *Create(FunctionType *Ty, Value *F, const Twine &NameStr = "",
  1285. Instruction *InsertBefore = nullptr) {
  1286. return new (ComputeNumOperands(0)) CallInst(Ty, F, NameStr, InsertBefore);
  1287. }
  1288. static CallInst *Create(FunctionType *Ty, Value *Func, ArrayRef<Value *> Args,
  1289. const Twine &NameStr,
  1290. Instruction *InsertBefore = nullptr) {
  1291. return new (ComputeNumOperands(Args.size()))
  1292. CallInst(Ty, Func, Args, std::nullopt, NameStr, InsertBefore);
  1293. }
  1294. static CallInst *Create(FunctionType *Ty, Value *Func, ArrayRef<Value *> Args,
  1295. ArrayRef<OperandBundleDef> Bundles = std::nullopt,
  1296. const Twine &NameStr = "",
  1297. Instruction *InsertBefore = nullptr) {
  1298. const int NumOperands =
  1299. ComputeNumOperands(Args.size(), CountBundleInputs(Bundles));
  1300. const unsigned DescriptorBytes = Bundles.size() * sizeof(BundleOpInfo);
  1301. return new (NumOperands, DescriptorBytes)
  1302. CallInst(Ty, Func, Args, Bundles, NameStr, InsertBefore);
  1303. }
  1304. static CallInst *Create(FunctionType *Ty, Value *F, const Twine &NameStr,
  1305. BasicBlock *InsertAtEnd) {
  1306. return new (ComputeNumOperands(0)) CallInst(Ty, F, NameStr, InsertAtEnd);
  1307. }
  1308. static CallInst *Create(FunctionType *Ty, Value *Func, ArrayRef<Value *> Args,
  1309. const Twine &NameStr, BasicBlock *InsertAtEnd) {
  1310. return new (ComputeNumOperands(Args.size()))
  1311. CallInst(Ty, Func, Args, std::nullopt, NameStr, InsertAtEnd);
  1312. }
  1313. static CallInst *Create(FunctionType *Ty, Value *Func, ArrayRef<Value *> Args,
  1314. ArrayRef<OperandBundleDef> Bundles,
  1315. const Twine &NameStr, BasicBlock *InsertAtEnd) {
  1316. const int NumOperands =
  1317. ComputeNumOperands(Args.size(), CountBundleInputs(Bundles));
  1318. const unsigned DescriptorBytes = Bundles.size() * sizeof(BundleOpInfo);
  1319. return new (NumOperands, DescriptorBytes)
  1320. CallInst(Ty, Func, Args, Bundles, NameStr, InsertAtEnd);
  1321. }
  1322. static CallInst *Create(FunctionCallee Func, const Twine &NameStr = "",
  1323. Instruction *InsertBefore = nullptr) {
  1324. return Create(Func.getFunctionType(), Func.getCallee(), NameStr,
  1325. InsertBefore);
  1326. }
  1327. static CallInst *Create(FunctionCallee Func, ArrayRef<Value *> Args,
  1328. ArrayRef<OperandBundleDef> Bundles = std::nullopt,
  1329. const Twine &NameStr = "",
  1330. Instruction *InsertBefore = nullptr) {
  1331. return Create(Func.getFunctionType(), Func.getCallee(), Args, Bundles,
  1332. NameStr, InsertBefore);
  1333. }
  1334. static CallInst *Create(FunctionCallee Func, ArrayRef<Value *> Args,
  1335. const Twine &NameStr,
  1336. Instruction *InsertBefore = nullptr) {
  1337. return Create(Func.getFunctionType(), Func.getCallee(), Args, NameStr,
  1338. InsertBefore);
  1339. }
  1340. static CallInst *Create(FunctionCallee Func, const Twine &NameStr,
  1341. BasicBlock *InsertAtEnd) {
  1342. return Create(Func.getFunctionType(), Func.getCallee(), NameStr,
  1343. InsertAtEnd);
  1344. }
  1345. static CallInst *Create(FunctionCallee Func, ArrayRef<Value *> Args,
  1346. const Twine &NameStr, BasicBlock *InsertAtEnd) {
  1347. return Create(Func.getFunctionType(), Func.getCallee(), Args, NameStr,
  1348. InsertAtEnd);
  1349. }
  1350. static CallInst *Create(FunctionCallee Func, ArrayRef<Value *> Args,
  1351. ArrayRef<OperandBundleDef> Bundles,
  1352. const Twine &NameStr, BasicBlock *InsertAtEnd) {
  1353. return Create(Func.getFunctionType(), Func.getCallee(), Args, Bundles,
  1354. NameStr, InsertAtEnd);
  1355. }
  1356. /// Create a clone of \p CI with a different set of operand bundles and
  1357. /// insert it before \p InsertPt.
  1358. ///
  1359. /// The returned call instruction is identical \p CI in every way except that
  1360. /// the operand bundles for the new instruction are set to the operand bundles
  1361. /// in \p Bundles.
  1362. static CallInst *Create(CallInst *CI, ArrayRef<OperandBundleDef> Bundles,
  1363. Instruction *InsertPt = nullptr);
  1364. /// Generate the IR for a call to malloc:
  1365. /// 1. Compute the malloc call's argument as the specified type's size,
  1366. /// possibly multiplied by the array size if the array size is not
  1367. /// constant 1.
  1368. /// 2. Call malloc with that argument.
  1369. /// 3. Bitcast the result of the malloc call to the specified type.
  1370. static Instruction *CreateMalloc(Instruction *InsertBefore, Type *IntPtrTy,
  1371. Type *AllocTy, Value *AllocSize,
  1372. Value *ArraySize = nullptr,
  1373. Function *MallocF = nullptr,
  1374. const Twine &Name = "");
  1375. static Instruction *CreateMalloc(BasicBlock *InsertAtEnd, Type *IntPtrTy,
  1376. Type *AllocTy, Value *AllocSize,
  1377. Value *ArraySize = nullptr,
  1378. Function *MallocF = nullptr,
  1379. const Twine &Name = "");
  1380. static Instruction *
  1381. CreateMalloc(Instruction *InsertBefore, Type *IntPtrTy, Type *AllocTy,
  1382. Value *AllocSize, Value *ArraySize = nullptr,
  1383. ArrayRef<OperandBundleDef> Bundles = std::nullopt,
  1384. Function *MallocF = nullptr, const Twine &Name = "");
  1385. static Instruction *
  1386. CreateMalloc(BasicBlock *InsertAtEnd, Type *IntPtrTy, Type *AllocTy,
  1387. Value *AllocSize, Value *ArraySize = nullptr,
  1388. ArrayRef<OperandBundleDef> Bundles = std::nullopt,
  1389. Function *MallocF = nullptr, const Twine &Name = "");
  1390. /// Generate the IR for a call to the builtin free function.
  1391. static Instruction *CreateFree(Value *Source, Instruction *InsertBefore);
  1392. static Instruction *CreateFree(Value *Source, BasicBlock *InsertAtEnd);
  1393. static Instruction *CreateFree(Value *Source,
  1394. ArrayRef<OperandBundleDef> Bundles,
  1395. Instruction *InsertBefore);
  1396. static Instruction *CreateFree(Value *Source,
  1397. ArrayRef<OperandBundleDef> Bundles,
  1398. BasicBlock *InsertAtEnd);
  1399. // Note that 'musttail' implies 'tail'.
  1400. enum TailCallKind : unsigned {
  1401. TCK_None = 0,
  1402. TCK_Tail = 1,
  1403. TCK_MustTail = 2,
  1404. TCK_NoTail = 3,
  1405. TCK_LAST = TCK_NoTail
  1406. };
  1407. using TailCallKindField = Bitfield::Element<TailCallKind, 0, 2, TCK_LAST>;
  1408. static_assert(
  1409. Bitfield::areContiguous<TailCallKindField, CallBase::CallingConvField>(),
  1410. "Bitfields must be contiguous");
  1411. TailCallKind getTailCallKind() const {
  1412. return getSubclassData<TailCallKindField>();
  1413. }
  1414. bool isTailCall() const {
  1415. TailCallKind Kind = getTailCallKind();
  1416. return Kind == TCK_Tail || Kind == TCK_MustTail;
  1417. }
  1418. bool isMustTailCall() const { return getTailCallKind() == TCK_MustTail; }
  1419. bool isNoTailCall() const { return getTailCallKind() == TCK_NoTail; }
  1420. void setTailCallKind(TailCallKind TCK) {
  1421. setSubclassData<TailCallKindField>(TCK);
  1422. }
  1423. void setTailCall(bool IsTc = true) {
  1424. setTailCallKind(IsTc ? TCK_Tail : TCK_None);
  1425. }
  1426. /// Return true if the call can return twice
  1427. bool canReturnTwice() const { return hasFnAttr(Attribute::ReturnsTwice); }
  1428. void setCanReturnTwice() { addFnAttr(Attribute::ReturnsTwice); }
  1429. // Methods for support type inquiry through isa, cast, and dyn_cast:
  1430. static bool classof(const Instruction *I) {
  1431. return I->getOpcode() == Instruction::Call;
  1432. }
  1433. static bool classof(const Value *V) {
  1434. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  1435. }
  1436. /// Updates profile metadata by scaling it by \p S / \p T.
  1437. void updateProfWeight(uint64_t S, uint64_t T);
  1438. private:
  1439. // Shadow Instruction::setInstructionSubclassData with a private forwarding
  1440. // method so that subclasses cannot accidentally use it.
  1441. template <typename Bitfield>
  1442. void setSubclassData(typename Bitfield::Type Value) {
  1443. Instruction::setSubclassData<Bitfield>(Value);
  1444. }
  1445. };
  1446. CallInst::CallInst(FunctionType *Ty, Value *Func, ArrayRef<Value *> Args,
  1447. ArrayRef<OperandBundleDef> Bundles, const Twine &NameStr,
  1448. BasicBlock *InsertAtEnd)
  1449. : CallBase(Ty->getReturnType(), Instruction::Call,
  1450. OperandTraits<CallBase>::op_end(this) -
  1451. (Args.size() + CountBundleInputs(Bundles) + 1),
  1452. unsigned(Args.size() + CountBundleInputs(Bundles) + 1),
  1453. InsertAtEnd) {
  1454. init(Ty, Func, Args, Bundles, NameStr);
  1455. }
  1456. CallInst::CallInst(FunctionType *Ty, Value *Func, ArrayRef<Value *> Args,
  1457. ArrayRef<OperandBundleDef> Bundles, const Twine &NameStr,
  1458. Instruction *InsertBefore)
  1459. : CallBase(Ty->getReturnType(), Instruction::Call,
  1460. OperandTraits<CallBase>::op_end(this) -
  1461. (Args.size() + CountBundleInputs(Bundles) + 1),
  1462. unsigned(Args.size() + CountBundleInputs(Bundles) + 1),
  1463. InsertBefore) {
  1464. init(Ty, Func, Args, Bundles, NameStr);
  1465. }
  1466. //===----------------------------------------------------------------------===//
  1467. // SelectInst Class
  1468. //===----------------------------------------------------------------------===//
  1469. /// This class represents the LLVM 'select' instruction.
  1470. ///
  1471. class SelectInst : public Instruction {
  1472. SelectInst(Value *C, Value *S1, Value *S2, const Twine &NameStr,
  1473. Instruction *InsertBefore)
  1474. : Instruction(S1->getType(), Instruction::Select,
  1475. &Op<0>(), 3, InsertBefore) {
  1476. init(C, S1, S2);
  1477. setName(NameStr);
  1478. }
  1479. SelectInst(Value *C, Value *S1, Value *S2, const Twine &NameStr,
  1480. BasicBlock *InsertAtEnd)
  1481. : Instruction(S1->getType(), Instruction::Select,
  1482. &Op<0>(), 3, InsertAtEnd) {
  1483. init(C, S1, S2);
  1484. setName(NameStr);
  1485. }
  1486. void init(Value *C, Value *S1, Value *S2) {
  1487. assert(!areInvalidOperands(C, S1, S2) && "Invalid operands for select");
  1488. Op<0>() = C;
  1489. Op<1>() = S1;
  1490. Op<2>() = S2;
  1491. }
  1492. protected:
  1493. // Note: Instruction needs to be a friend here to call cloneImpl.
  1494. friend class Instruction;
  1495. SelectInst *cloneImpl() const;
  1496. public:
  1497. static SelectInst *Create(Value *C, Value *S1, Value *S2,
  1498. const Twine &NameStr = "",
  1499. Instruction *InsertBefore = nullptr,
  1500. Instruction *MDFrom = nullptr) {
  1501. SelectInst *Sel = new(3) SelectInst(C, S1, S2, NameStr, InsertBefore);
  1502. if (MDFrom)
  1503. Sel->copyMetadata(*MDFrom);
  1504. return Sel;
  1505. }
  1506. static SelectInst *Create(Value *C, Value *S1, Value *S2,
  1507. const Twine &NameStr,
  1508. BasicBlock *InsertAtEnd) {
  1509. return new(3) SelectInst(C, S1, S2, NameStr, InsertAtEnd);
  1510. }
  1511. const Value *getCondition() const { return Op<0>(); }
  1512. const Value *getTrueValue() const { return Op<1>(); }
  1513. const Value *getFalseValue() const { return Op<2>(); }
  1514. Value *getCondition() { return Op<0>(); }
  1515. Value *getTrueValue() { return Op<1>(); }
  1516. Value *getFalseValue() { return Op<2>(); }
  1517. void setCondition(Value *V) { Op<0>() = V; }
  1518. void setTrueValue(Value *V) { Op<1>() = V; }
  1519. void setFalseValue(Value *V) { Op<2>() = V; }
  1520. /// Swap the true and false values of the select instruction.
  1521. /// This doesn't swap prof metadata.
  1522. void swapValues() { Op<1>().swap(Op<2>()); }
  1523. /// Return a string if the specified operands are invalid
  1524. /// for a select operation, otherwise return null.
  1525. static const char *areInvalidOperands(Value *Cond, Value *True, Value *False);
  1526. /// Transparently provide more efficient getOperand methods.
  1527. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  1528. OtherOps getOpcode() const {
  1529. return static_cast<OtherOps>(Instruction::getOpcode());
  1530. }
  1531. // Methods for support type inquiry through isa, cast, and dyn_cast:
  1532. static bool classof(const Instruction *I) {
  1533. return I->getOpcode() == Instruction::Select;
  1534. }
  1535. static bool classof(const Value *V) {
  1536. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  1537. }
  1538. };
  1539. template <>
  1540. struct OperandTraits<SelectInst> : public FixedNumOperandTraits<SelectInst, 3> {
  1541. };
  1542. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(SelectInst, Value)
  1543. //===----------------------------------------------------------------------===//
  1544. // VAArgInst Class
  1545. //===----------------------------------------------------------------------===//
  1546. /// This class represents the va_arg llvm instruction, which returns
  1547. /// an argument of the specified type given a va_list and increments that list
  1548. ///
  1549. class VAArgInst : public UnaryInstruction {
  1550. protected:
  1551. // Note: Instruction needs to be a friend here to call cloneImpl.
  1552. friend class Instruction;
  1553. VAArgInst *cloneImpl() const;
  1554. public:
  1555. VAArgInst(Value *List, Type *Ty, const Twine &NameStr = "",
  1556. Instruction *InsertBefore = nullptr)
  1557. : UnaryInstruction(Ty, VAArg, List, InsertBefore) {
  1558. setName(NameStr);
  1559. }
  1560. VAArgInst(Value *List, Type *Ty, const Twine &NameStr,
  1561. BasicBlock *InsertAtEnd)
  1562. : UnaryInstruction(Ty, VAArg, List, InsertAtEnd) {
  1563. setName(NameStr);
  1564. }
  1565. Value *getPointerOperand() { return getOperand(0); }
  1566. const Value *getPointerOperand() const { return getOperand(0); }
  1567. static unsigned getPointerOperandIndex() { return 0U; }
  1568. // Methods for support type inquiry through isa, cast, and dyn_cast:
  1569. static bool classof(const Instruction *I) {
  1570. return I->getOpcode() == VAArg;
  1571. }
  1572. static bool classof(const Value *V) {
  1573. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  1574. }
  1575. };
  1576. //===----------------------------------------------------------------------===//
  1577. // ExtractElementInst Class
  1578. //===----------------------------------------------------------------------===//
  1579. /// This instruction extracts a single (scalar)
  1580. /// element from a VectorType value
  1581. ///
  1582. class ExtractElementInst : public Instruction {
  1583. ExtractElementInst(Value *Vec, Value *Idx, const Twine &NameStr = "",
  1584. Instruction *InsertBefore = nullptr);
  1585. ExtractElementInst(Value *Vec, Value *Idx, const Twine &NameStr,
  1586. BasicBlock *InsertAtEnd);
  1587. protected:
  1588. // Note: Instruction needs to be a friend here to call cloneImpl.
  1589. friend class Instruction;
  1590. ExtractElementInst *cloneImpl() const;
  1591. public:
  1592. static ExtractElementInst *Create(Value *Vec, Value *Idx,
  1593. const Twine &NameStr = "",
  1594. Instruction *InsertBefore = nullptr) {
  1595. return new(2) ExtractElementInst(Vec, Idx, NameStr, InsertBefore);
  1596. }
  1597. static ExtractElementInst *Create(Value *Vec, Value *Idx,
  1598. const Twine &NameStr,
  1599. BasicBlock *InsertAtEnd) {
  1600. return new(2) ExtractElementInst(Vec, Idx, NameStr, InsertAtEnd);
  1601. }
  1602. /// Return true if an extractelement instruction can be
  1603. /// formed with the specified operands.
  1604. static bool isValidOperands(const Value *Vec, const Value *Idx);
  1605. Value *getVectorOperand() { return Op<0>(); }
  1606. Value *getIndexOperand() { return Op<1>(); }
  1607. const Value *getVectorOperand() const { return Op<0>(); }
  1608. const Value *getIndexOperand() const { return Op<1>(); }
  1609. VectorType *getVectorOperandType() const {
  1610. return cast<VectorType>(getVectorOperand()->getType());
  1611. }
  1612. /// Transparently provide more efficient getOperand methods.
  1613. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  1614. // Methods for support type inquiry through isa, cast, and dyn_cast:
  1615. static bool classof(const Instruction *I) {
  1616. return I->getOpcode() == Instruction::ExtractElement;
  1617. }
  1618. static bool classof(const Value *V) {
  1619. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  1620. }
  1621. };
  1622. template <>
  1623. struct OperandTraits<ExtractElementInst> :
  1624. public FixedNumOperandTraits<ExtractElementInst, 2> {
  1625. };
  1626. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(ExtractElementInst, Value)
  1627. //===----------------------------------------------------------------------===//
  1628. // InsertElementInst Class
  1629. //===----------------------------------------------------------------------===//
  1630. /// This instruction inserts a single (scalar)
  1631. /// element into a VectorType value
  1632. ///
  1633. class InsertElementInst : public Instruction {
  1634. InsertElementInst(Value *Vec, Value *NewElt, Value *Idx,
  1635. const Twine &NameStr = "",
  1636. Instruction *InsertBefore = nullptr);
  1637. InsertElementInst(Value *Vec, Value *NewElt, Value *Idx, const Twine &NameStr,
  1638. BasicBlock *InsertAtEnd);
  1639. protected:
  1640. // Note: Instruction needs to be a friend here to call cloneImpl.
  1641. friend class Instruction;
  1642. InsertElementInst *cloneImpl() const;
  1643. public:
  1644. static InsertElementInst *Create(Value *Vec, Value *NewElt, Value *Idx,
  1645. const Twine &NameStr = "",
  1646. Instruction *InsertBefore = nullptr) {
  1647. return new(3) InsertElementInst(Vec, NewElt, Idx, NameStr, InsertBefore);
  1648. }
  1649. static InsertElementInst *Create(Value *Vec, Value *NewElt, Value *Idx,
  1650. const Twine &NameStr,
  1651. BasicBlock *InsertAtEnd) {
  1652. return new(3) InsertElementInst(Vec, NewElt, Idx, NameStr, InsertAtEnd);
  1653. }
  1654. /// Return true if an insertelement instruction can be
  1655. /// formed with the specified operands.
  1656. static bool isValidOperands(const Value *Vec, const Value *NewElt,
  1657. const Value *Idx);
  1658. /// Overload to return most specific vector type.
  1659. ///
  1660. VectorType *getType() const {
  1661. return cast<VectorType>(Instruction::getType());
  1662. }
  1663. /// Transparently provide more efficient getOperand methods.
  1664. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  1665. // Methods for support type inquiry through isa, cast, and dyn_cast:
  1666. static bool classof(const Instruction *I) {
  1667. return I->getOpcode() == Instruction::InsertElement;
  1668. }
  1669. static bool classof(const Value *V) {
  1670. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  1671. }
  1672. };
  1673. template <>
  1674. struct OperandTraits<InsertElementInst> :
  1675. public FixedNumOperandTraits<InsertElementInst, 3> {
  1676. };
  1677. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(InsertElementInst, Value)
  1678. //===----------------------------------------------------------------------===//
  1679. // ShuffleVectorInst Class
  1680. //===----------------------------------------------------------------------===//
  1681. constexpr int UndefMaskElem = -1;
  1682. /// This instruction constructs a fixed permutation of two
  1683. /// input vectors.
  1684. ///
  1685. /// For each element of the result vector, the shuffle mask selects an element
  1686. /// from one of the input vectors to copy to the result. Non-negative elements
  1687. /// in the mask represent an index into the concatenated pair of input vectors.
  1688. /// UndefMaskElem (-1) specifies that the result element is undefined.
  1689. ///
  1690. /// For scalable vectors, all the elements of the mask must be 0 or -1. This
  1691. /// requirement may be relaxed in the future.
  1692. class ShuffleVectorInst : public Instruction {
  1693. SmallVector<int, 4> ShuffleMask;
  1694. Constant *ShuffleMaskForBitcode;
  1695. protected:
  1696. // Note: Instruction needs to be a friend here to call cloneImpl.
  1697. friend class Instruction;
  1698. ShuffleVectorInst *cloneImpl() const;
  1699. public:
  1700. ShuffleVectorInst(Value *V1, Value *Mask, const Twine &NameStr = "",
  1701. Instruction *InsertBefore = nullptr);
  1702. ShuffleVectorInst(Value *V1, Value *Mask, const Twine &NameStr,
  1703. BasicBlock *InsertAtEnd);
  1704. ShuffleVectorInst(Value *V1, ArrayRef<int> Mask, const Twine &NameStr = "",
  1705. Instruction *InsertBefore = nullptr);
  1706. ShuffleVectorInst(Value *V1, ArrayRef<int> Mask, const Twine &NameStr,
  1707. BasicBlock *InsertAtEnd);
  1708. ShuffleVectorInst(Value *V1, Value *V2, Value *Mask,
  1709. const Twine &NameStr = "",
  1710. Instruction *InsertBefor = nullptr);
  1711. ShuffleVectorInst(Value *V1, Value *V2, Value *Mask,
  1712. const Twine &NameStr, BasicBlock *InsertAtEnd);
  1713. ShuffleVectorInst(Value *V1, Value *V2, ArrayRef<int> Mask,
  1714. const Twine &NameStr = "",
  1715. Instruction *InsertBefor = nullptr);
  1716. ShuffleVectorInst(Value *V1, Value *V2, ArrayRef<int> Mask,
  1717. const Twine &NameStr, BasicBlock *InsertAtEnd);
  1718. void *operator new(size_t S) { return User::operator new(S, 2); }
  1719. void operator delete(void *Ptr) { return User::operator delete(Ptr); }
  1720. /// Swap the operands and adjust the mask to preserve the semantics
  1721. /// of the instruction.
  1722. void commute();
  1723. /// Return true if a shufflevector instruction can be
  1724. /// formed with the specified operands.
  1725. static bool isValidOperands(const Value *V1, const Value *V2,
  1726. const Value *Mask);
  1727. static bool isValidOperands(const Value *V1, const Value *V2,
  1728. ArrayRef<int> Mask);
  1729. /// Overload to return most specific vector type.
  1730. ///
  1731. VectorType *getType() const {
  1732. return cast<VectorType>(Instruction::getType());
  1733. }
  1734. /// Transparently provide more efficient getOperand methods.
  1735. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  1736. /// Return the shuffle mask value of this instruction for the given element
  1737. /// index. Return UndefMaskElem if the element is undef.
  1738. int getMaskValue(unsigned Elt) const { return ShuffleMask[Elt]; }
  1739. /// Convert the input shuffle mask operand to a vector of integers. Undefined
  1740. /// elements of the mask are returned as UndefMaskElem.
  1741. static void getShuffleMask(const Constant *Mask,
  1742. SmallVectorImpl<int> &Result);
  1743. /// Return the mask for this instruction as a vector of integers. Undefined
  1744. /// elements of the mask are returned as UndefMaskElem.
  1745. void getShuffleMask(SmallVectorImpl<int> &Result) const {
  1746. Result.assign(ShuffleMask.begin(), ShuffleMask.end());
  1747. }
  1748. /// Return the mask for this instruction, for use in bitcode.
  1749. ///
  1750. /// TODO: This is temporary until we decide a new bitcode encoding for
  1751. /// shufflevector.
  1752. Constant *getShuffleMaskForBitcode() const { return ShuffleMaskForBitcode; }
  1753. static Constant *convertShuffleMaskForBitcode(ArrayRef<int> Mask,
  1754. Type *ResultTy);
  1755. void setShuffleMask(ArrayRef<int> Mask);
  1756. ArrayRef<int> getShuffleMask() const { return ShuffleMask; }
  1757. /// Return true if this shuffle returns a vector with a different number of
  1758. /// elements than its source vectors.
  1759. /// Examples: shufflevector <4 x n> A, <4 x n> B, <1,2,3>
  1760. /// shufflevector <4 x n> A, <4 x n> B, <1,2,3,4,5>
  1761. bool changesLength() const {
  1762. unsigned NumSourceElts = cast<VectorType>(Op<0>()->getType())
  1763. ->getElementCount()
  1764. .getKnownMinValue();
  1765. unsigned NumMaskElts = ShuffleMask.size();
  1766. return NumSourceElts != NumMaskElts;
  1767. }
  1768. /// Return true if this shuffle returns a vector with a greater number of
  1769. /// elements than its source vectors.
  1770. /// Example: shufflevector <2 x n> A, <2 x n> B, <1,2,3>
  1771. bool increasesLength() const {
  1772. unsigned NumSourceElts = cast<VectorType>(Op<0>()->getType())
  1773. ->getElementCount()
  1774. .getKnownMinValue();
  1775. unsigned NumMaskElts = ShuffleMask.size();
  1776. return NumSourceElts < NumMaskElts;
  1777. }
  1778. /// Return true if this shuffle mask chooses elements from exactly one source
  1779. /// vector.
  1780. /// Example: <7,5,undef,7>
  1781. /// This assumes that vector operands are the same length as the mask.
  1782. static bool isSingleSourceMask(ArrayRef<int> Mask);
  1783. static bool isSingleSourceMask(const Constant *Mask) {
  1784. assert(Mask->getType()->isVectorTy() && "Shuffle needs vector constant.");
  1785. SmallVector<int, 16> MaskAsInts;
  1786. getShuffleMask(Mask, MaskAsInts);
  1787. return isSingleSourceMask(MaskAsInts);
  1788. }
  1789. /// Return true if this shuffle chooses elements from exactly one source
  1790. /// vector without changing the length of that vector.
  1791. /// Example: shufflevector <4 x n> A, <4 x n> B, <3,0,undef,3>
  1792. /// TODO: Optionally allow length-changing shuffles.
  1793. bool isSingleSource() const {
  1794. return !changesLength() && isSingleSourceMask(ShuffleMask);
  1795. }
  1796. /// Return true if this shuffle mask chooses elements from exactly one source
  1797. /// vector without lane crossings. A shuffle using this mask is not
  1798. /// necessarily a no-op because it may change the number of elements from its
  1799. /// input vectors or it may provide demanded bits knowledge via undef lanes.
  1800. /// Example: <undef,undef,2,3>
  1801. static bool isIdentityMask(ArrayRef<int> Mask);
  1802. static bool isIdentityMask(const Constant *Mask) {
  1803. assert(Mask->getType()->isVectorTy() && "Shuffle needs vector constant.");
  1804. // Not possible to express a shuffle mask for a scalable vector for this
  1805. // case.
  1806. if (isa<ScalableVectorType>(Mask->getType()))
  1807. return false;
  1808. SmallVector<int, 16> MaskAsInts;
  1809. getShuffleMask(Mask, MaskAsInts);
  1810. return isIdentityMask(MaskAsInts);
  1811. }
  1812. /// Return true if this shuffle chooses elements from exactly one source
  1813. /// vector without lane crossings and does not change the number of elements
  1814. /// from its input vectors.
  1815. /// Example: shufflevector <4 x n> A, <4 x n> B, <4,undef,6,undef>
  1816. bool isIdentity() const {
  1817. // Not possible to express a shuffle mask for a scalable vector for this
  1818. // case.
  1819. if (isa<ScalableVectorType>(getType()))
  1820. return false;
  1821. return !changesLength() && isIdentityMask(ShuffleMask);
  1822. }
  1823. /// Return true if this shuffle lengthens exactly one source vector with
  1824. /// undefs in the high elements.
  1825. bool isIdentityWithPadding() const;
  1826. /// Return true if this shuffle extracts the first N elements of exactly one
  1827. /// source vector.
  1828. bool isIdentityWithExtract() const;
  1829. /// Return true if this shuffle concatenates its 2 source vectors. This
  1830. /// returns false if either input is undefined. In that case, the shuffle is
  1831. /// is better classified as an identity with padding operation.
  1832. bool isConcat() const;
  1833. /// Return true if this shuffle mask chooses elements from its source vectors
  1834. /// without lane crossings. A shuffle using this mask would be
  1835. /// equivalent to a vector select with a constant condition operand.
  1836. /// Example: <4,1,6,undef>
  1837. /// This returns false if the mask does not choose from both input vectors.
  1838. /// In that case, the shuffle is better classified as an identity shuffle.
  1839. /// This assumes that vector operands are the same length as the mask
  1840. /// (a length-changing shuffle can never be equivalent to a vector select).
  1841. static bool isSelectMask(ArrayRef<int> Mask);
  1842. static bool isSelectMask(const Constant *Mask) {
  1843. assert(Mask->getType()->isVectorTy() && "Shuffle needs vector constant.");
  1844. SmallVector<int, 16> MaskAsInts;
  1845. getShuffleMask(Mask, MaskAsInts);
  1846. return isSelectMask(MaskAsInts);
  1847. }
  1848. /// Return true if this shuffle chooses elements from its source vectors
  1849. /// without lane crossings and all operands have the same number of elements.
  1850. /// In other words, this shuffle is equivalent to a vector select with a
  1851. /// constant condition operand.
  1852. /// Example: shufflevector <4 x n> A, <4 x n> B, <undef,1,6,3>
  1853. /// This returns false if the mask does not choose from both input vectors.
  1854. /// In that case, the shuffle is better classified as an identity shuffle.
  1855. /// TODO: Optionally allow length-changing shuffles.
  1856. bool isSelect() const {
  1857. return !changesLength() && isSelectMask(ShuffleMask);
  1858. }
  1859. /// Return true if this shuffle mask swaps the order of elements from exactly
  1860. /// one source vector.
  1861. /// Example: <7,6,undef,4>
  1862. /// This assumes that vector operands are the same length as the mask.
  1863. static bool isReverseMask(ArrayRef<int> Mask);
  1864. static bool isReverseMask(const Constant *Mask) {
  1865. assert(Mask->getType()->isVectorTy() && "Shuffle needs vector constant.");
  1866. SmallVector<int, 16> MaskAsInts;
  1867. getShuffleMask(Mask, MaskAsInts);
  1868. return isReverseMask(MaskAsInts);
  1869. }
  1870. /// Return true if this shuffle swaps the order of elements from exactly
  1871. /// one source vector.
  1872. /// Example: shufflevector <4 x n> A, <4 x n> B, <3,undef,1,undef>
  1873. /// TODO: Optionally allow length-changing shuffles.
  1874. bool isReverse() const {
  1875. return !changesLength() && isReverseMask(ShuffleMask);
  1876. }
  1877. /// Return true if this shuffle mask chooses all elements with the same value
  1878. /// as the first element of exactly one source vector.
  1879. /// Example: <4,undef,undef,4>
  1880. /// This assumes that vector operands are the same length as the mask.
  1881. static bool isZeroEltSplatMask(ArrayRef<int> Mask);
  1882. static bool isZeroEltSplatMask(const Constant *Mask) {
  1883. assert(Mask->getType()->isVectorTy() && "Shuffle needs vector constant.");
  1884. SmallVector<int, 16> MaskAsInts;
  1885. getShuffleMask(Mask, MaskAsInts);
  1886. return isZeroEltSplatMask(MaskAsInts);
  1887. }
  1888. /// Return true if all elements of this shuffle are the same value as the
  1889. /// first element of exactly one source vector without changing the length
  1890. /// of that vector.
  1891. /// Example: shufflevector <4 x n> A, <4 x n> B, <undef,0,undef,0>
  1892. /// TODO: Optionally allow length-changing shuffles.
  1893. /// TODO: Optionally allow splats from other elements.
  1894. bool isZeroEltSplat() const {
  1895. return !changesLength() && isZeroEltSplatMask(ShuffleMask);
  1896. }
  1897. /// Return true if this shuffle mask is a transpose mask.
  1898. /// Transpose vector masks transpose a 2xn matrix. They read corresponding
  1899. /// even- or odd-numbered vector elements from two n-dimensional source
  1900. /// vectors and write each result into consecutive elements of an
  1901. /// n-dimensional destination vector. Two shuffles are necessary to complete
  1902. /// the transpose, one for the even elements and another for the odd elements.
  1903. /// This description closely follows how the TRN1 and TRN2 AArch64
  1904. /// instructions operate.
  1905. ///
  1906. /// For example, a simple 2x2 matrix can be transposed with:
  1907. ///
  1908. /// ; Original matrix
  1909. /// m0 = < a, b >
  1910. /// m1 = < c, d >
  1911. ///
  1912. /// ; Transposed matrix
  1913. /// t0 = < a, c > = shufflevector m0, m1, < 0, 2 >
  1914. /// t1 = < b, d > = shufflevector m0, m1, < 1, 3 >
  1915. ///
  1916. /// For matrices having greater than n columns, the resulting nx2 transposed
  1917. /// matrix is stored in two result vectors such that one vector contains
  1918. /// interleaved elements from all the even-numbered rows and the other vector
  1919. /// contains interleaved elements from all the odd-numbered rows. For example,
  1920. /// a 2x4 matrix can be transposed with:
  1921. ///
  1922. /// ; Original matrix
  1923. /// m0 = < a, b, c, d >
  1924. /// m1 = < e, f, g, h >
  1925. ///
  1926. /// ; Transposed matrix
  1927. /// t0 = < a, e, c, g > = shufflevector m0, m1 < 0, 4, 2, 6 >
  1928. /// t1 = < b, f, d, h > = shufflevector m0, m1 < 1, 5, 3, 7 >
  1929. static bool isTransposeMask(ArrayRef<int> Mask);
  1930. static bool isTransposeMask(const Constant *Mask) {
  1931. assert(Mask->getType()->isVectorTy() && "Shuffle needs vector constant.");
  1932. SmallVector<int, 16> MaskAsInts;
  1933. getShuffleMask(Mask, MaskAsInts);
  1934. return isTransposeMask(MaskAsInts);
  1935. }
  1936. /// Return true if this shuffle transposes the elements of its inputs without
  1937. /// changing the length of the vectors. This operation may also be known as a
  1938. /// merge or interleave. See the description for isTransposeMask() for the
  1939. /// exact specification.
  1940. /// Example: shufflevector <4 x n> A, <4 x n> B, <0,4,2,6>
  1941. bool isTranspose() const {
  1942. return !changesLength() && isTransposeMask(ShuffleMask);
  1943. }
  1944. /// Return true if this shuffle mask is a splice mask, concatenating the two
  1945. /// inputs together and then extracts an original width vector starting from
  1946. /// the splice index.
  1947. /// Example: shufflevector <4 x n> A, <4 x n> B, <1,2,3,4>
  1948. static bool isSpliceMask(ArrayRef<int> Mask, int &Index);
  1949. static bool isSpliceMask(const Constant *Mask, int &Index) {
  1950. assert(Mask->getType()->isVectorTy() && "Shuffle needs vector constant.");
  1951. SmallVector<int, 16> MaskAsInts;
  1952. getShuffleMask(Mask, MaskAsInts);
  1953. return isSpliceMask(MaskAsInts, Index);
  1954. }
  1955. /// Return true if this shuffle splices two inputs without changing the length
  1956. /// of the vectors. This operation concatenates the two inputs together and
  1957. /// then extracts an original width vector starting from the splice index.
  1958. /// Example: shufflevector <4 x n> A, <4 x n> B, <1,2,3,4>
  1959. bool isSplice(int &Index) const {
  1960. return !changesLength() && isSpliceMask(ShuffleMask, Index);
  1961. }
  1962. /// Return true if this shuffle mask is an extract subvector mask.
  1963. /// A valid extract subvector mask returns a smaller vector from a single
  1964. /// source operand. The base extraction index is returned as well.
  1965. static bool isExtractSubvectorMask(ArrayRef<int> Mask, int NumSrcElts,
  1966. int &Index);
  1967. static bool isExtractSubvectorMask(const Constant *Mask, int NumSrcElts,
  1968. int &Index) {
  1969. assert(Mask->getType()->isVectorTy() && "Shuffle needs vector constant.");
  1970. // Not possible to express a shuffle mask for a scalable vector for this
  1971. // case.
  1972. if (isa<ScalableVectorType>(Mask->getType()))
  1973. return false;
  1974. SmallVector<int, 16> MaskAsInts;
  1975. getShuffleMask(Mask, MaskAsInts);
  1976. return isExtractSubvectorMask(MaskAsInts, NumSrcElts, Index);
  1977. }
  1978. /// Return true if this shuffle mask is an extract subvector mask.
  1979. bool isExtractSubvectorMask(int &Index) const {
  1980. // Not possible to express a shuffle mask for a scalable vector for this
  1981. // case.
  1982. if (isa<ScalableVectorType>(getType()))
  1983. return false;
  1984. int NumSrcElts =
  1985. cast<FixedVectorType>(Op<0>()->getType())->getNumElements();
  1986. return isExtractSubvectorMask(ShuffleMask, NumSrcElts, Index);
  1987. }
  1988. /// Return true if this shuffle mask is an insert subvector mask.
  1989. /// A valid insert subvector mask inserts the lowest elements of a second
  1990. /// source operand into an in-place first source operand operand.
  1991. /// Both the sub vector width and the insertion index is returned.
  1992. static bool isInsertSubvectorMask(ArrayRef<int> Mask, int NumSrcElts,
  1993. int &NumSubElts, int &Index);
  1994. static bool isInsertSubvectorMask(const Constant *Mask, int NumSrcElts,
  1995. int &NumSubElts, int &Index) {
  1996. assert(Mask->getType()->isVectorTy() && "Shuffle needs vector constant.");
  1997. // Not possible to express a shuffle mask for a scalable vector for this
  1998. // case.
  1999. if (isa<ScalableVectorType>(Mask->getType()))
  2000. return false;
  2001. SmallVector<int, 16> MaskAsInts;
  2002. getShuffleMask(Mask, MaskAsInts);
  2003. return isInsertSubvectorMask(MaskAsInts, NumSrcElts, NumSubElts, Index);
  2004. }
  2005. /// Return true if this shuffle mask is an insert subvector mask.
  2006. bool isInsertSubvectorMask(int &NumSubElts, int &Index) const {
  2007. // Not possible to express a shuffle mask for a scalable vector for this
  2008. // case.
  2009. if (isa<ScalableVectorType>(getType()))
  2010. return false;
  2011. int NumSrcElts =
  2012. cast<FixedVectorType>(Op<0>()->getType())->getNumElements();
  2013. return isInsertSubvectorMask(ShuffleMask, NumSrcElts, NumSubElts, Index);
  2014. }
  2015. /// Return true if this shuffle mask replicates each of the \p VF elements
  2016. /// in a vector \p ReplicationFactor times.
  2017. /// For example, the mask for \p ReplicationFactor=3 and \p VF=4 is:
  2018. /// <0,0,0,1,1,1,2,2,2,3,3,3>
  2019. static bool isReplicationMask(ArrayRef<int> Mask, int &ReplicationFactor,
  2020. int &VF);
  2021. static bool isReplicationMask(const Constant *Mask, int &ReplicationFactor,
  2022. int &VF) {
  2023. assert(Mask->getType()->isVectorTy() && "Shuffle needs vector constant.");
  2024. // Not possible to express a shuffle mask for a scalable vector for this
  2025. // case.
  2026. if (isa<ScalableVectorType>(Mask->getType()))
  2027. return false;
  2028. SmallVector<int, 16> MaskAsInts;
  2029. getShuffleMask(Mask, MaskAsInts);
  2030. return isReplicationMask(MaskAsInts, ReplicationFactor, VF);
  2031. }
  2032. /// Return true if this shuffle mask is a replication mask.
  2033. bool isReplicationMask(int &ReplicationFactor, int &VF) const;
  2034. /// Return true if this shuffle mask represents "clustered" mask of size VF,
  2035. /// i.e. each index between [0..VF) is used exactly once in each submask of
  2036. /// size VF.
  2037. /// For example, the mask for \p VF=4 is:
  2038. /// 0, 1, 2, 3, 3, 2, 0, 1 - "clustered", because each submask of size 4
  2039. /// (0,1,2,3 and 3,2,0,1) uses indices [0..VF) exactly one time.
  2040. /// 0, 1, 2, 3, 3, 3, 1, 0 - not "clustered", because
  2041. /// element 3 is used twice in the second submask
  2042. /// (3,3,1,0) and index 2 is not used at all.
  2043. static bool isOneUseSingleSourceMask(ArrayRef<int> Mask, int VF);
  2044. /// Return true if this shuffle mask is a one-use-single-source("clustered")
  2045. /// mask.
  2046. bool isOneUseSingleSourceMask(int VF) const;
  2047. /// Change values in a shuffle permute mask assuming the two vector operands
  2048. /// of length InVecNumElts have swapped position.
  2049. static void commuteShuffleMask(MutableArrayRef<int> Mask,
  2050. unsigned InVecNumElts) {
  2051. for (int &Idx : Mask) {
  2052. if (Idx == -1)
  2053. continue;
  2054. Idx = Idx < (int)InVecNumElts ? Idx + InVecNumElts : Idx - InVecNumElts;
  2055. assert(Idx >= 0 && Idx < (int)InVecNumElts * 2 &&
  2056. "shufflevector mask index out of range");
  2057. }
  2058. }
  2059. // Methods for support type inquiry through isa, cast, and dyn_cast:
  2060. static bool classof(const Instruction *I) {
  2061. return I->getOpcode() == Instruction::ShuffleVector;
  2062. }
  2063. static bool classof(const Value *V) {
  2064. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  2065. }
  2066. };
  2067. template <>
  2068. struct OperandTraits<ShuffleVectorInst>
  2069. : public FixedNumOperandTraits<ShuffleVectorInst, 2> {};
  2070. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(ShuffleVectorInst, Value)
  2071. //===----------------------------------------------------------------------===//
  2072. // ExtractValueInst Class
  2073. //===----------------------------------------------------------------------===//
  2074. /// This instruction extracts a struct member or array
  2075. /// element value from an aggregate value.
  2076. ///
  2077. class ExtractValueInst : public UnaryInstruction {
  2078. SmallVector<unsigned, 4> Indices;
  2079. ExtractValueInst(const ExtractValueInst &EVI);
  2080. /// Constructors - Create a extractvalue instruction with a base aggregate
  2081. /// value and a list of indices. The first ctor can optionally insert before
  2082. /// an existing instruction, the second appends the new instruction to the
  2083. /// specified BasicBlock.
  2084. inline ExtractValueInst(Value *Agg,
  2085. ArrayRef<unsigned> Idxs,
  2086. const Twine &NameStr,
  2087. Instruction *InsertBefore);
  2088. inline ExtractValueInst(Value *Agg,
  2089. ArrayRef<unsigned> Idxs,
  2090. const Twine &NameStr, BasicBlock *InsertAtEnd);
  2091. void init(ArrayRef<unsigned> Idxs, const Twine &NameStr);
  2092. protected:
  2093. // Note: Instruction needs to be a friend here to call cloneImpl.
  2094. friend class Instruction;
  2095. ExtractValueInst *cloneImpl() const;
  2096. public:
  2097. static ExtractValueInst *Create(Value *Agg,
  2098. ArrayRef<unsigned> Idxs,
  2099. const Twine &NameStr = "",
  2100. Instruction *InsertBefore = nullptr) {
  2101. return new
  2102. ExtractValueInst(Agg, Idxs, NameStr, InsertBefore);
  2103. }
  2104. static ExtractValueInst *Create(Value *Agg,
  2105. ArrayRef<unsigned> Idxs,
  2106. const Twine &NameStr,
  2107. BasicBlock *InsertAtEnd) {
  2108. return new ExtractValueInst(Agg, Idxs, NameStr, InsertAtEnd);
  2109. }
  2110. /// Returns the type of the element that would be extracted
  2111. /// with an extractvalue instruction with the specified parameters.
  2112. ///
  2113. /// Null is returned if the indices are invalid for the specified type.
  2114. static Type *getIndexedType(Type *Agg, ArrayRef<unsigned> Idxs);
  2115. using idx_iterator = const unsigned*;
  2116. inline idx_iterator idx_begin() const { return Indices.begin(); }
  2117. inline idx_iterator idx_end() const { return Indices.end(); }
  2118. inline iterator_range<idx_iterator> indices() const {
  2119. return make_range(idx_begin(), idx_end());
  2120. }
  2121. Value *getAggregateOperand() {
  2122. return getOperand(0);
  2123. }
  2124. const Value *getAggregateOperand() const {
  2125. return getOperand(0);
  2126. }
  2127. static unsigned getAggregateOperandIndex() {
  2128. return 0U; // get index for modifying correct operand
  2129. }
  2130. ArrayRef<unsigned> getIndices() const {
  2131. return Indices;
  2132. }
  2133. unsigned getNumIndices() const {
  2134. return (unsigned)Indices.size();
  2135. }
  2136. bool hasIndices() const {
  2137. return true;
  2138. }
  2139. // Methods for support type inquiry through isa, cast, and dyn_cast:
  2140. static bool classof(const Instruction *I) {
  2141. return I->getOpcode() == Instruction::ExtractValue;
  2142. }
  2143. static bool classof(const Value *V) {
  2144. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  2145. }
  2146. };
  2147. ExtractValueInst::ExtractValueInst(Value *Agg,
  2148. ArrayRef<unsigned> Idxs,
  2149. const Twine &NameStr,
  2150. Instruction *InsertBefore)
  2151. : UnaryInstruction(checkGEPType(getIndexedType(Agg->getType(), Idxs)),
  2152. ExtractValue, Agg, InsertBefore) {
  2153. init(Idxs, NameStr);
  2154. }
  2155. ExtractValueInst::ExtractValueInst(Value *Agg,
  2156. ArrayRef<unsigned> Idxs,
  2157. const Twine &NameStr,
  2158. BasicBlock *InsertAtEnd)
  2159. : UnaryInstruction(checkGEPType(getIndexedType(Agg->getType(), Idxs)),
  2160. ExtractValue, Agg, InsertAtEnd) {
  2161. init(Idxs, NameStr);
  2162. }
  2163. //===----------------------------------------------------------------------===//
  2164. // InsertValueInst Class
  2165. //===----------------------------------------------------------------------===//
  2166. /// This instruction inserts a struct field of array element
  2167. /// value into an aggregate value.
  2168. ///
  2169. class InsertValueInst : public Instruction {
  2170. SmallVector<unsigned, 4> Indices;
  2171. InsertValueInst(const InsertValueInst &IVI);
  2172. /// Constructors - Create a insertvalue instruction with a base aggregate
  2173. /// value, a value to insert, and a list of indices. The first ctor can
  2174. /// optionally insert before an existing instruction, the second appends
  2175. /// the new instruction to the specified BasicBlock.
  2176. inline InsertValueInst(Value *Agg, Value *Val,
  2177. ArrayRef<unsigned> Idxs,
  2178. const Twine &NameStr,
  2179. Instruction *InsertBefore);
  2180. inline InsertValueInst(Value *Agg, Value *Val,
  2181. ArrayRef<unsigned> Idxs,
  2182. const Twine &NameStr, BasicBlock *InsertAtEnd);
  2183. /// Constructors - These two constructors are convenience methods because one
  2184. /// and two index insertvalue instructions are so common.
  2185. InsertValueInst(Value *Agg, Value *Val, unsigned Idx,
  2186. const Twine &NameStr = "",
  2187. Instruction *InsertBefore = nullptr);
  2188. InsertValueInst(Value *Agg, Value *Val, unsigned Idx, const Twine &NameStr,
  2189. BasicBlock *InsertAtEnd);
  2190. void init(Value *Agg, Value *Val, ArrayRef<unsigned> Idxs,
  2191. const Twine &NameStr);
  2192. protected:
  2193. // Note: Instruction needs to be a friend here to call cloneImpl.
  2194. friend class Instruction;
  2195. InsertValueInst *cloneImpl() const;
  2196. public:
  2197. // allocate space for exactly two operands
  2198. void *operator new(size_t S) { return User::operator new(S, 2); }
  2199. void operator delete(void *Ptr) { User::operator delete(Ptr); }
  2200. static InsertValueInst *Create(Value *Agg, Value *Val,
  2201. ArrayRef<unsigned> Idxs,
  2202. const Twine &NameStr = "",
  2203. Instruction *InsertBefore = nullptr) {
  2204. return new InsertValueInst(Agg, Val, Idxs, NameStr, InsertBefore);
  2205. }
  2206. static InsertValueInst *Create(Value *Agg, Value *Val,
  2207. ArrayRef<unsigned> Idxs,
  2208. const Twine &NameStr,
  2209. BasicBlock *InsertAtEnd) {
  2210. return new InsertValueInst(Agg, Val, Idxs, NameStr, InsertAtEnd);
  2211. }
  2212. /// Transparently provide more efficient getOperand methods.
  2213. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  2214. using idx_iterator = const unsigned*;
  2215. inline idx_iterator idx_begin() const { return Indices.begin(); }
  2216. inline idx_iterator idx_end() const { return Indices.end(); }
  2217. inline iterator_range<idx_iterator> indices() const {
  2218. return make_range(idx_begin(), idx_end());
  2219. }
  2220. Value *getAggregateOperand() {
  2221. return getOperand(0);
  2222. }
  2223. const Value *getAggregateOperand() const {
  2224. return getOperand(0);
  2225. }
  2226. static unsigned getAggregateOperandIndex() {
  2227. return 0U; // get index for modifying correct operand
  2228. }
  2229. Value *getInsertedValueOperand() {
  2230. return getOperand(1);
  2231. }
  2232. const Value *getInsertedValueOperand() const {
  2233. return getOperand(1);
  2234. }
  2235. static unsigned getInsertedValueOperandIndex() {
  2236. return 1U; // get index for modifying correct operand
  2237. }
  2238. ArrayRef<unsigned> getIndices() const {
  2239. return Indices;
  2240. }
  2241. unsigned getNumIndices() const {
  2242. return (unsigned)Indices.size();
  2243. }
  2244. bool hasIndices() const {
  2245. return true;
  2246. }
  2247. // Methods for support type inquiry through isa, cast, and dyn_cast:
  2248. static bool classof(const Instruction *I) {
  2249. return I->getOpcode() == Instruction::InsertValue;
  2250. }
  2251. static bool classof(const Value *V) {
  2252. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  2253. }
  2254. };
  2255. template <>
  2256. struct OperandTraits<InsertValueInst> :
  2257. public FixedNumOperandTraits<InsertValueInst, 2> {
  2258. };
  2259. InsertValueInst::InsertValueInst(Value *Agg,
  2260. Value *Val,
  2261. ArrayRef<unsigned> Idxs,
  2262. const Twine &NameStr,
  2263. Instruction *InsertBefore)
  2264. : Instruction(Agg->getType(), InsertValue,
  2265. OperandTraits<InsertValueInst>::op_begin(this),
  2266. 2, InsertBefore) {
  2267. init(Agg, Val, Idxs, NameStr);
  2268. }
  2269. InsertValueInst::InsertValueInst(Value *Agg,
  2270. Value *Val,
  2271. ArrayRef<unsigned> Idxs,
  2272. const Twine &NameStr,
  2273. BasicBlock *InsertAtEnd)
  2274. : Instruction(Agg->getType(), InsertValue,
  2275. OperandTraits<InsertValueInst>::op_begin(this),
  2276. 2, InsertAtEnd) {
  2277. init(Agg, Val, Idxs, NameStr);
  2278. }
  2279. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(InsertValueInst, Value)
  2280. //===----------------------------------------------------------------------===//
  2281. // PHINode Class
  2282. //===----------------------------------------------------------------------===//
  2283. // PHINode - The PHINode class is used to represent the magical mystical PHI
  2284. // node, that can not exist in nature, but can be synthesized in a computer
  2285. // scientist's overactive imagination.
  2286. //
  2287. class PHINode : public Instruction {
  2288. /// The number of operands actually allocated. NumOperands is
  2289. /// the number actually in use.
  2290. unsigned ReservedSpace;
  2291. PHINode(const PHINode &PN);
  2292. explicit PHINode(Type *Ty, unsigned NumReservedValues,
  2293. const Twine &NameStr = "",
  2294. Instruction *InsertBefore = nullptr)
  2295. : Instruction(Ty, Instruction::PHI, nullptr, 0, InsertBefore),
  2296. ReservedSpace(NumReservedValues) {
  2297. assert(!Ty->isTokenTy() && "PHI nodes cannot have token type!");
  2298. setName(NameStr);
  2299. allocHungoffUses(ReservedSpace);
  2300. }
  2301. PHINode(Type *Ty, unsigned NumReservedValues, const Twine &NameStr,
  2302. BasicBlock *InsertAtEnd)
  2303. : Instruction(Ty, Instruction::PHI, nullptr, 0, InsertAtEnd),
  2304. ReservedSpace(NumReservedValues) {
  2305. assert(!Ty->isTokenTy() && "PHI nodes cannot have token type!");
  2306. setName(NameStr);
  2307. allocHungoffUses(ReservedSpace);
  2308. }
  2309. protected:
  2310. // Note: Instruction needs to be a friend here to call cloneImpl.
  2311. friend class Instruction;
  2312. PHINode *cloneImpl() const;
  2313. // allocHungoffUses - this is more complicated than the generic
  2314. // User::allocHungoffUses, because we have to allocate Uses for the incoming
  2315. // values and pointers to the incoming blocks, all in one allocation.
  2316. void allocHungoffUses(unsigned N) {
  2317. User::allocHungoffUses(N, /* IsPhi */ true);
  2318. }
  2319. public:
  2320. /// Constructors - NumReservedValues is a hint for the number of incoming
  2321. /// edges that this phi node will have (use 0 if you really have no idea).
  2322. static PHINode *Create(Type *Ty, unsigned NumReservedValues,
  2323. const Twine &NameStr = "",
  2324. Instruction *InsertBefore = nullptr) {
  2325. return new PHINode(Ty, NumReservedValues, NameStr, InsertBefore);
  2326. }
  2327. static PHINode *Create(Type *Ty, unsigned NumReservedValues,
  2328. const Twine &NameStr, BasicBlock *InsertAtEnd) {
  2329. return new PHINode(Ty, NumReservedValues, NameStr, InsertAtEnd);
  2330. }
  2331. /// Provide fast operand accessors
  2332. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  2333. // Block iterator interface. This provides access to the list of incoming
  2334. // basic blocks, which parallels the list of incoming values.
  2335. // Please note that we are not providing non-const iterators for blocks to
  2336. // force all updates go through an interface function.
  2337. using block_iterator = BasicBlock **;
  2338. using const_block_iterator = BasicBlock * const *;
  2339. const_block_iterator block_begin() const {
  2340. return reinterpret_cast<const_block_iterator>(op_begin() + ReservedSpace);
  2341. }
  2342. const_block_iterator block_end() const {
  2343. return block_begin() + getNumOperands();
  2344. }
  2345. iterator_range<const_block_iterator> blocks() const {
  2346. return make_range(block_begin(), block_end());
  2347. }
  2348. op_range incoming_values() { return operands(); }
  2349. const_op_range incoming_values() const { return operands(); }
  2350. /// Return the number of incoming edges
  2351. ///
  2352. unsigned getNumIncomingValues() const { return getNumOperands(); }
  2353. /// Return incoming value number x
  2354. ///
  2355. Value *getIncomingValue(unsigned i) const {
  2356. return getOperand(i);
  2357. }
  2358. void setIncomingValue(unsigned i, Value *V) {
  2359. assert(V && "PHI node got a null value!");
  2360. assert(getType() == V->getType() &&
  2361. "All operands to PHI node must be the same type as the PHI node!");
  2362. setOperand(i, V);
  2363. }
  2364. static unsigned getOperandNumForIncomingValue(unsigned i) {
  2365. return i;
  2366. }
  2367. static unsigned getIncomingValueNumForOperand(unsigned i) {
  2368. return i;
  2369. }
  2370. /// Return incoming basic block number @p i.
  2371. ///
  2372. BasicBlock *getIncomingBlock(unsigned i) const {
  2373. return block_begin()[i];
  2374. }
  2375. /// Return incoming basic block corresponding
  2376. /// to an operand of the PHI.
  2377. ///
  2378. BasicBlock *getIncomingBlock(const Use &U) const {
  2379. assert(this == U.getUser() && "Iterator doesn't point to PHI's Uses?");
  2380. return getIncomingBlock(unsigned(&U - op_begin()));
  2381. }
  2382. /// Return incoming basic block corresponding
  2383. /// to value use iterator.
  2384. ///
  2385. BasicBlock *getIncomingBlock(Value::const_user_iterator I) const {
  2386. return getIncomingBlock(I.getUse());
  2387. }
  2388. void setIncomingBlock(unsigned i, BasicBlock *BB) {
  2389. const_cast<block_iterator>(block_begin())[i] = BB;
  2390. }
  2391. /// Copies the basic blocks from \p BBRange to the incoming basic block list
  2392. /// of this PHINode, starting at \p ToIdx.
  2393. void copyIncomingBlocks(iterator_range<const_block_iterator> BBRange,
  2394. uint32_t ToIdx = 0) {
  2395. copy(BBRange, const_cast<block_iterator>(block_begin()) + ToIdx);
  2396. }
  2397. /// Replace every incoming basic block \p Old to basic block \p New.
  2398. void replaceIncomingBlockWith(const BasicBlock *Old, BasicBlock *New) {
  2399. assert(New && Old && "PHI node got a null basic block!");
  2400. for (unsigned Op = 0, NumOps = getNumOperands(); Op != NumOps; ++Op)
  2401. if (getIncomingBlock(Op) == Old)
  2402. setIncomingBlock(Op, New);
  2403. }
  2404. /// Add an incoming value to the end of the PHI list
  2405. ///
  2406. void addIncoming(Value *V, BasicBlock *BB) {
  2407. if (getNumOperands() == ReservedSpace)
  2408. growOperands(); // Get more space!
  2409. // Initialize some new operands.
  2410. setNumHungOffUseOperands(getNumOperands() + 1);
  2411. setIncomingValue(getNumOperands() - 1, V);
  2412. setIncomingBlock(getNumOperands() - 1, BB);
  2413. }
  2414. /// Remove an incoming value. This is useful if a
  2415. /// predecessor basic block is deleted. The value removed is returned.
  2416. ///
  2417. /// If the last incoming value for a PHI node is removed (and DeletePHIIfEmpty
  2418. /// is true), the PHI node is destroyed and any uses of it are replaced with
  2419. /// dummy values. The only time there should be zero incoming values to a PHI
  2420. /// node is when the block is dead, so this strategy is sound.
  2421. ///
  2422. Value *removeIncomingValue(unsigned Idx, bool DeletePHIIfEmpty = true);
  2423. Value *removeIncomingValue(const BasicBlock *BB, bool DeletePHIIfEmpty=true) {
  2424. int Idx = getBasicBlockIndex(BB);
  2425. assert(Idx >= 0 && "Invalid basic block argument to remove!");
  2426. return removeIncomingValue(Idx, DeletePHIIfEmpty);
  2427. }
  2428. /// Return the first index of the specified basic
  2429. /// block in the value list for this PHI. Returns -1 if no instance.
  2430. ///
  2431. int getBasicBlockIndex(const BasicBlock *BB) const {
  2432. for (unsigned i = 0, e = getNumOperands(); i != e; ++i)
  2433. if (block_begin()[i] == BB)
  2434. return i;
  2435. return -1;
  2436. }
  2437. Value *getIncomingValueForBlock(const BasicBlock *BB) const {
  2438. int Idx = getBasicBlockIndex(BB);
  2439. assert(Idx >= 0 && "Invalid basic block argument!");
  2440. return getIncomingValue(Idx);
  2441. }
  2442. /// Set every incoming value(s) for block \p BB to \p V.
  2443. void setIncomingValueForBlock(const BasicBlock *BB, Value *V) {
  2444. assert(BB && "PHI node got a null basic block!");
  2445. bool Found = false;
  2446. for (unsigned Op = 0, NumOps = getNumOperands(); Op != NumOps; ++Op)
  2447. if (getIncomingBlock(Op) == BB) {
  2448. Found = true;
  2449. setIncomingValue(Op, V);
  2450. }
  2451. (void)Found;
  2452. assert(Found && "Invalid basic block argument to set!");
  2453. }
  2454. /// If the specified PHI node always merges together the
  2455. /// same value, return the value, otherwise return null.
  2456. Value *hasConstantValue() const;
  2457. /// Whether the specified PHI node always merges
  2458. /// together the same value, assuming undefs are equal to a unique
  2459. /// non-undef value.
  2460. bool hasConstantOrUndefValue() const;
  2461. /// If the PHI node is complete which means all of its parent's predecessors
  2462. /// have incoming value in this PHI, return true, otherwise return false.
  2463. bool isComplete() const {
  2464. return llvm::all_of(predecessors(getParent()),
  2465. [this](const BasicBlock *Pred) {
  2466. return getBasicBlockIndex(Pred) >= 0;
  2467. });
  2468. }
  2469. /// Methods for support type inquiry through isa, cast, and dyn_cast:
  2470. static bool classof(const Instruction *I) {
  2471. return I->getOpcode() == Instruction::PHI;
  2472. }
  2473. static bool classof(const Value *V) {
  2474. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  2475. }
  2476. private:
  2477. void growOperands();
  2478. };
  2479. template <>
  2480. struct OperandTraits<PHINode> : public HungoffOperandTraits<2> {
  2481. };
  2482. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(PHINode, Value)
  2483. //===----------------------------------------------------------------------===//
  2484. // LandingPadInst Class
  2485. //===----------------------------------------------------------------------===//
  2486. //===---------------------------------------------------------------------------
  2487. /// The landingpad instruction holds all of the information
  2488. /// necessary to generate correct exception handling. The landingpad instruction
  2489. /// cannot be moved from the top of a landing pad block, which itself is
  2490. /// accessible only from the 'unwind' edge of an invoke. This uses the
  2491. /// SubclassData field in Value to store whether or not the landingpad is a
  2492. /// cleanup.
  2493. ///
  2494. class LandingPadInst : public Instruction {
  2495. using CleanupField = BoolBitfieldElementT<0>;
  2496. /// The number of operands actually allocated. NumOperands is
  2497. /// the number actually in use.
  2498. unsigned ReservedSpace;
  2499. LandingPadInst(const LandingPadInst &LP);
  2500. public:
  2501. enum ClauseType { Catch, Filter };
  2502. private:
  2503. explicit LandingPadInst(Type *RetTy, unsigned NumReservedValues,
  2504. const Twine &NameStr, Instruction *InsertBefore);
  2505. explicit LandingPadInst(Type *RetTy, unsigned NumReservedValues,
  2506. const Twine &NameStr, BasicBlock *InsertAtEnd);
  2507. // Allocate space for exactly zero operands.
  2508. void *operator new(size_t S) { return User::operator new(S); }
  2509. void growOperands(unsigned Size);
  2510. void init(unsigned NumReservedValues, const Twine &NameStr);
  2511. protected:
  2512. // Note: Instruction needs to be a friend here to call cloneImpl.
  2513. friend class Instruction;
  2514. LandingPadInst *cloneImpl() const;
  2515. public:
  2516. void operator delete(void *Ptr) { User::operator delete(Ptr); }
  2517. /// Constructors - NumReservedClauses is a hint for the number of incoming
  2518. /// clauses that this landingpad will have (use 0 if you really have no idea).
  2519. static LandingPadInst *Create(Type *RetTy, unsigned NumReservedClauses,
  2520. const Twine &NameStr = "",
  2521. Instruction *InsertBefore = nullptr);
  2522. static LandingPadInst *Create(Type *RetTy, unsigned NumReservedClauses,
  2523. const Twine &NameStr, BasicBlock *InsertAtEnd);
  2524. /// Provide fast operand accessors
  2525. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  2526. /// Return 'true' if this landingpad instruction is a
  2527. /// cleanup. I.e., it should be run when unwinding even if its landing pad
  2528. /// doesn't catch the exception.
  2529. bool isCleanup() const { return getSubclassData<CleanupField>(); }
  2530. /// Indicate that this landingpad instruction is a cleanup.
  2531. void setCleanup(bool V) { setSubclassData<CleanupField>(V); }
  2532. /// Add a catch or filter clause to the landing pad.
  2533. void addClause(Constant *ClauseVal);
  2534. /// Get the value of the clause at index Idx. Use isCatch/isFilter to
  2535. /// determine what type of clause this is.
  2536. Constant *getClause(unsigned Idx) const {
  2537. return cast<Constant>(getOperandList()[Idx]);
  2538. }
  2539. /// Return 'true' if the clause and index Idx is a catch clause.
  2540. bool isCatch(unsigned Idx) const {
  2541. return !isa<ArrayType>(getOperandList()[Idx]->getType());
  2542. }
  2543. /// Return 'true' if the clause and index Idx is a filter clause.
  2544. bool isFilter(unsigned Idx) const {
  2545. return isa<ArrayType>(getOperandList()[Idx]->getType());
  2546. }
  2547. /// Get the number of clauses for this landing pad.
  2548. unsigned getNumClauses() const { return getNumOperands(); }
  2549. /// Grow the size of the operand list to accommodate the new
  2550. /// number of clauses.
  2551. void reserveClauses(unsigned Size) { growOperands(Size); }
  2552. // Methods for support type inquiry through isa, cast, and dyn_cast:
  2553. static bool classof(const Instruction *I) {
  2554. return I->getOpcode() == Instruction::LandingPad;
  2555. }
  2556. static bool classof(const Value *V) {
  2557. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  2558. }
  2559. };
  2560. template <>
  2561. struct OperandTraits<LandingPadInst> : public HungoffOperandTraits<1> {
  2562. };
  2563. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(LandingPadInst, Value)
  2564. //===----------------------------------------------------------------------===//
  2565. // ReturnInst Class
  2566. //===----------------------------------------------------------------------===//
  2567. //===---------------------------------------------------------------------------
  2568. /// Return a value (possibly void), from a function. Execution
  2569. /// does not continue in this function any longer.
  2570. ///
  2571. class ReturnInst : public Instruction {
  2572. ReturnInst(const ReturnInst &RI);
  2573. private:
  2574. // ReturnInst constructors:
  2575. // ReturnInst() - 'ret void' instruction
  2576. // ReturnInst( null) - 'ret void' instruction
  2577. // ReturnInst(Value* X) - 'ret X' instruction
  2578. // ReturnInst( null, Inst *I) - 'ret void' instruction, insert before I
  2579. // ReturnInst(Value* X, Inst *I) - 'ret X' instruction, insert before I
  2580. // ReturnInst( null, BB *B) - 'ret void' instruction, insert @ end of B
  2581. // ReturnInst(Value* X, BB *B) - 'ret X' instruction, insert @ end of B
  2582. //
  2583. // NOTE: If the Value* passed is of type void then the constructor behaves as
  2584. // if it was passed NULL.
  2585. explicit ReturnInst(LLVMContext &C, Value *retVal = nullptr,
  2586. Instruction *InsertBefore = nullptr);
  2587. ReturnInst(LLVMContext &C, Value *retVal, BasicBlock *InsertAtEnd);
  2588. explicit ReturnInst(LLVMContext &C, BasicBlock *InsertAtEnd);
  2589. protected:
  2590. // Note: Instruction needs to be a friend here to call cloneImpl.
  2591. friend class Instruction;
  2592. ReturnInst *cloneImpl() const;
  2593. public:
  2594. static ReturnInst* Create(LLVMContext &C, Value *retVal = nullptr,
  2595. Instruction *InsertBefore = nullptr) {
  2596. return new(!!retVal) ReturnInst(C, retVal, InsertBefore);
  2597. }
  2598. static ReturnInst* Create(LLVMContext &C, Value *retVal,
  2599. BasicBlock *InsertAtEnd) {
  2600. return new(!!retVal) ReturnInst(C, retVal, InsertAtEnd);
  2601. }
  2602. static ReturnInst* Create(LLVMContext &C, BasicBlock *InsertAtEnd) {
  2603. return new(0) ReturnInst(C, InsertAtEnd);
  2604. }
  2605. /// Provide fast operand accessors
  2606. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  2607. /// Convenience accessor. Returns null if there is no return value.
  2608. Value *getReturnValue() const {
  2609. return getNumOperands() != 0 ? getOperand(0) : nullptr;
  2610. }
  2611. unsigned getNumSuccessors() const { return 0; }
  2612. // Methods for support type inquiry through isa, cast, and dyn_cast:
  2613. static bool classof(const Instruction *I) {
  2614. return (I->getOpcode() == Instruction::Ret);
  2615. }
  2616. static bool classof(const Value *V) {
  2617. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  2618. }
  2619. private:
  2620. BasicBlock *getSuccessor(unsigned idx) const {
  2621. llvm_unreachable("ReturnInst has no successors!");
  2622. }
  2623. void setSuccessor(unsigned idx, BasicBlock *B) {
  2624. llvm_unreachable("ReturnInst has no successors!");
  2625. }
  2626. };
  2627. template <>
  2628. struct OperandTraits<ReturnInst> : public VariadicOperandTraits<ReturnInst> {
  2629. };
  2630. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(ReturnInst, Value)
  2631. //===----------------------------------------------------------------------===//
  2632. // BranchInst Class
  2633. //===----------------------------------------------------------------------===//
  2634. //===---------------------------------------------------------------------------
  2635. /// Conditional or Unconditional Branch instruction.
  2636. ///
  2637. class BranchInst : public Instruction {
  2638. /// Ops list - Branches are strange. The operands are ordered:
  2639. /// [Cond, FalseDest,] TrueDest. This makes some accessors faster because
  2640. /// they don't have to check for cond/uncond branchness. These are mostly
  2641. /// accessed relative from op_end().
  2642. BranchInst(const BranchInst &BI);
  2643. // BranchInst constructors (where {B, T, F} are blocks, and C is a condition):
  2644. // BranchInst(BB *B) - 'br B'
  2645. // BranchInst(BB* T, BB *F, Value *C) - 'br C, T, F'
  2646. // BranchInst(BB* B, Inst *I) - 'br B' insert before I
  2647. // BranchInst(BB* T, BB *F, Value *C, Inst *I) - 'br C, T, F', insert before I
  2648. // BranchInst(BB* B, BB *I) - 'br B' insert at end
  2649. // BranchInst(BB* T, BB *F, Value *C, BB *I) - 'br C, T, F', insert at end
  2650. explicit BranchInst(BasicBlock *IfTrue, Instruction *InsertBefore = nullptr);
  2651. BranchInst(BasicBlock *IfTrue, BasicBlock *IfFalse, Value *Cond,
  2652. Instruction *InsertBefore = nullptr);
  2653. BranchInst(BasicBlock *IfTrue, BasicBlock *InsertAtEnd);
  2654. BranchInst(BasicBlock *IfTrue, BasicBlock *IfFalse, Value *Cond,
  2655. BasicBlock *InsertAtEnd);
  2656. void AssertOK();
  2657. protected:
  2658. // Note: Instruction needs to be a friend here to call cloneImpl.
  2659. friend class Instruction;
  2660. BranchInst *cloneImpl() const;
  2661. public:
  2662. /// Iterator type that casts an operand to a basic block.
  2663. ///
  2664. /// This only makes sense because the successors are stored as adjacent
  2665. /// operands for branch instructions.
  2666. struct succ_op_iterator
  2667. : iterator_adaptor_base<succ_op_iterator, value_op_iterator,
  2668. std::random_access_iterator_tag, BasicBlock *,
  2669. ptrdiff_t, BasicBlock *, BasicBlock *> {
  2670. explicit succ_op_iterator(value_op_iterator I) : iterator_adaptor_base(I) {}
  2671. BasicBlock *operator*() const { return cast<BasicBlock>(*I); }
  2672. BasicBlock *operator->() const { return operator*(); }
  2673. };
  2674. /// The const version of `succ_op_iterator`.
  2675. struct const_succ_op_iterator
  2676. : iterator_adaptor_base<const_succ_op_iterator, const_value_op_iterator,
  2677. std::random_access_iterator_tag,
  2678. const BasicBlock *, ptrdiff_t, const BasicBlock *,
  2679. const BasicBlock *> {
  2680. explicit const_succ_op_iterator(const_value_op_iterator I)
  2681. : iterator_adaptor_base(I) {}
  2682. const BasicBlock *operator*() const { return cast<BasicBlock>(*I); }
  2683. const BasicBlock *operator->() const { return operator*(); }
  2684. };
  2685. static BranchInst *Create(BasicBlock *IfTrue,
  2686. Instruction *InsertBefore = nullptr) {
  2687. return new(1) BranchInst(IfTrue, InsertBefore);
  2688. }
  2689. static BranchInst *Create(BasicBlock *IfTrue, BasicBlock *IfFalse,
  2690. Value *Cond, Instruction *InsertBefore = nullptr) {
  2691. return new(3) BranchInst(IfTrue, IfFalse, Cond, InsertBefore);
  2692. }
  2693. static BranchInst *Create(BasicBlock *IfTrue, BasicBlock *InsertAtEnd) {
  2694. return new(1) BranchInst(IfTrue, InsertAtEnd);
  2695. }
  2696. static BranchInst *Create(BasicBlock *IfTrue, BasicBlock *IfFalse,
  2697. Value *Cond, BasicBlock *InsertAtEnd) {
  2698. return new(3) BranchInst(IfTrue, IfFalse, Cond, InsertAtEnd);
  2699. }
  2700. /// Transparently provide more efficient getOperand methods.
  2701. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  2702. bool isUnconditional() const { return getNumOperands() == 1; }
  2703. bool isConditional() const { return getNumOperands() == 3; }
  2704. Value *getCondition() const {
  2705. assert(isConditional() && "Cannot get condition of an uncond branch!");
  2706. return Op<-3>();
  2707. }
  2708. void setCondition(Value *V) {
  2709. assert(isConditional() && "Cannot set condition of unconditional branch!");
  2710. Op<-3>() = V;
  2711. }
  2712. unsigned getNumSuccessors() const { return 1+isConditional(); }
  2713. BasicBlock *getSuccessor(unsigned i) const {
  2714. assert(i < getNumSuccessors() && "Successor # out of range for Branch!");
  2715. return cast_or_null<BasicBlock>((&Op<-1>() - i)->get());
  2716. }
  2717. void setSuccessor(unsigned idx, BasicBlock *NewSucc) {
  2718. assert(idx < getNumSuccessors() && "Successor # out of range for Branch!");
  2719. *(&Op<-1>() - idx) = NewSucc;
  2720. }
  2721. /// Swap the successors of this branch instruction.
  2722. ///
  2723. /// Swaps the successors of the branch instruction. This also swaps any
  2724. /// branch weight metadata associated with the instruction so that it
  2725. /// continues to map correctly to each operand.
  2726. void swapSuccessors();
  2727. iterator_range<succ_op_iterator> successors() {
  2728. return make_range(
  2729. succ_op_iterator(std::next(value_op_begin(), isConditional() ? 1 : 0)),
  2730. succ_op_iterator(value_op_end()));
  2731. }
  2732. iterator_range<const_succ_op_iterator> successors() const {
  2733. return make_range(const_succ_op_iterator(
  2734. std::next(value_op_begin(), isConditional() ? 1 : 0)),
  2735. const_succ_op_iterator(value_op_end()));
  2736. }
  2737. // Methods for support type inquiry through isa, cast, and dyn_cast:
  2738. static bool classof(const Instruction *I) {
  2739. return (I->getOpcode() == Instruction::Br);
  2740. }
  2741. static bool classof(const Value *V) {
  2742. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  2743. }
  2744. };
  2745. template <>
  2746. struct OperandTraits<BranchInst> : public VariadicOperandTraits<BranchInst, 1> {
  2747. };
  2748. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(BranchInst, Value)
  2749. //===----------------------------------------------------------------------===//
  2750. // SwitchInst Class
  2751. //===----------------------------------------------------------------------===//
  2752. //===---------------------------------------------------------------------------
  2753. /// Multiway switch
  2754. ///
  2755. class SwitchInst : public Instruction {
  2756. unsigned ReservedSpace;
  2757. // Operand[0] = Value to switch on
  2758. // Operand[1] = Default basic block destination
  2759. // Operand[2n ] = Value to match
  2760. // Operand[2n+1] = BasicBlock to go to on match
  2761. SwitchInst(const SwitchInst &SI);
  2762. /// Create a new switch instruction, specifying a value to switch on and a
  2763. /// default destination. The number of additional cases can be specified here
  2764. /// to make memory allocation more efficient. This constructor can also
  2765. /// auto-insert before another instruction.
  2766. SwitchInst(Value *Value, BasicBlock *Default, unsigned NumCases,
  2767. Instruction *InsertBefore);
  2768. /// Create a new switch instruction, specifying a value to switch on and a
  2769. /// default destination. The number of additional cases can be specified here
  2770. /// to make memory allocation more efficient. This constructor also
  2771. /// auto-inserts at the end of the specified BasicBlock.
  2772. SwitchInst(Value *Value, BasicBlock *Default, unsigned NumCases,
  2773. BasicBlock *InsertAtEnd);
  2774. // allocate space for exactly zero operands
  2775. void *operator new(size_t S) { return User::operator new(S); }
  2776. void init(Value *Value, BasicBlock *Default, unsigned NumReserved);
  2777. void growOperands();
  2778. protected:
  2779. // Note: Instruction needs to be a friend here to call cloneImpl.
  2780. friend class Instruction;
  2781. SwitchInst *cloneImpl() const;
  2782. public:
  2783. void operator delete(void *Ptr) { User::operator delete(Ptr); }
  2784. // -2
  2785. static const unsigned DefaultPseudoIndex = static_cast<unsigned>(~0L-1);
  2786. template <typename CaseHandleT> class CaseIteratorImpl;
  2787. /// A handle to a particular switch case. It exposes a convenient interface
  2788. /// to both the case value and the successor block.
  2789. ///
  2790. /// We define this as a template and instantiate it to form both a const and
  2791. /// non-const handle.
  2792. template <typename SwitchInstT, typename ConstantIntT, typename BasicBlockT>
  2793. class CaseHandleImpl {
  2794. // Directly befriend both const and non-const iterators.
  2795. friend class SwitchInst::CaseIteratorImpl<
  2796. CaseHandleImpl<SwitchInstT, ConstantIntT, BasicBlockT>>;
  2797. protected:
  2798. // Expose the switch type we're parameterized with to the iterator.
  2799. using SwitchInstType = SwitchInstT;
  2800. SwitchInstT *SI;
  2801. ptrdiff_t Index;
  2802. CaseHandleImpl() = default;
  2803. CaseHandleImpl(SwitchInstT *SI, ptrdiff_t Index) : SI(SI), Index(Index) {}
  2804. public:
  2805. /// Resolves case value for current case.
  2806. ConstantIntT *getCaseValue() const {
  2807. assert((unsigned)Index < SI->getNumCases() &&
  2808. "Index out the number of cases.");
  2809. return reinterpret_cast<ConstantIntT *>(SI->getOperand(2 + Index * 2));
  2810. }
  2811. /// Resolves successor for current case.
  2812. BasicBlockT *getCaseSuccessor() const {
  2813. assert(((unsigned)Index < SI->getNumCases() ||
  2814. (unsigned)Index == DefaultPseudoIndex) &&
  2815. "Index out the number of cases.");
  2816. return SI->getSuccessor(getSuccessorIndex());
  2817. }
  2818. /// Returns number of current case.
  2819. unsigned getCaseIndex() const { return Index; }
  2820. /// Returns successor index for current case successor.
  2821. unsigned getSuccessorIndex() const {
  2822. assert(((unsigned)Index == DefaultPseudoIndex ||
  2823. (unsigned)Index < SI->getNumCases()) &&
  2824. "Index out the number of cases.");
  2825. return (unsigned)Index != DefaultPseudoIndex ? Index + 1 : 0;
  2826. }
  2827. bool operator==(const CaseHandleImpl &RHS) const {
  2828. assert(SI == RHS.SI && "Incompatible operators.");
  2829. return Index == RHS.Index;
  2830. }
  2831. };
  2832. using ConstCaseHandle =
  2833. CaseHandleImpl<const SwitchInst, const ConstantInt, const BasicBlock>;
  2834. class CaseHandle
  2835. : public CaseHandleImpl<SwitchInst, ConstantInt, BasicBlock> {
  2836. friend class SwitchInst::CaseIteratorImpl<CaseHandle>;
  2837. public:
  2838. CaseHandle(SwitchInst *SI, ptrdiff_t Index) : CaseHandleImpl(SI, Index) {}
  2839. /// Sets the new value for current case.
  2840. void setValue(ConstantInt *V) const {
  2841. assert((unsigned)Index < SI->getNumCases() &&
  2842. "Index out the number of cases.");
  2843. SI->setOperand(2 + Index*2, reinterpret_cast<Value*>(V));
  2844. }
  2845. /// Sets the new successor for current case.
  2846. void setSuccessor(BasicBlock *S) const {
  2847. SI->setSuccessor(getSuccessorIndex(), S);
  2848. }
  2849. };
  2850. template <typename CaseHandleT>
  2851. class CaseIteratorImpl
  2852. : public iterator_facade_base<CaseIteratorImpl<CaseHandleT>,
  2853. std::random_access_iterator_tag,
  2854. const CaseHandleT> {
  2855. using SwitchInstT = typename CaseHandleT::SwitchInstType;
  2856. CaseHandleT Case;
  2857. public:
  2858. /// Default constructed iterator is in an invalid state until assigned to
  2859. /// a case for a particular switch.
  2860. CaseIteratorImpl() = default;
  2861. /// Initializes case iterator for given SwitchInst and for given
  2862. /// case number.
  2863. CaseIteratorImpl(SwitchInstT *SI, unsigned CaseNum) : Case(SI, CaseNum) {}
  2864. /// Initializes case iterator for given SwitchInst and for given
  2865. /// successor index.
  2866. static CaseIteratorImpl fromSuccessorIndex(SwitchInstT *SI,
  2867. unsigned SuccessorIndex) {
  2868. assert(SuccessorIndex < SI->getNumSuccessors() &&
  2869. "Successor index # out of range!");
  2870. return SuccessorIndex != 0 ? CaseIteratorImpl(SI, SuccessorIndex - 1)
  2871. : CaseIteratorImpl(SI, DefaultPseudoIndex);
  2872. }
  2873. /// Support converting to the const variant. This will be a no-op for const
  2874. /// variant.
  2875. operator CaseIteratorImpl<ConstCaseHandle>() const {
  2876. return CaseIteratorImpl<ConstCaseHandle>(Case.SI, Case.Index);
  2877. }
  2878. CaseIteratorImpl &operator+=(ptrdiff_t N) {
  2879. // Check index correctness after addition.
  2880. // Note: Index == getNumCases() means end().
  2881. assert(Case.Index + N >= 0 &&
  2882. (unsigned)(Case.Index + N) <= Case.SI->getNumCases() &&
  2883. "Case.Index out the number of cases.");
  2884. Case.Index += N;
  2885. return *this;
  2886. }
  2887. CaseIteratorImpl &operator-=(ptrdiff_t N) {
  2888. // Check index correctness after subtraction.
  2889. // Note: Case.Index == getNumCases() means end().
  2890. assert(Case.Index - N >= 0 &&
  2891. (unsigned)(Case.Index - N) <= Case.SI->getNumCases() &&
  2892. "Case.Index out the number of cases.");
  2893. Case.Index -= N;
  2894. return *this;
  2895. }
  2896. ptrdiff_t operator-(const CaseIteratorImpl &RHS) const {
  2897. assert(Case.SI == RHS.Case.SI && "Incompatible operators.");
  2898. return Case.Index - RHS.Case.Index;
  2899. }
  2900. bool operator==(const CaseIteratorImpl &RHS) const {
  2901. return Case == RHS.Case;
  2902. }
  2903. bool operator<(const CaseIteratorImpl &RHS) const {
  2904. assert(Case.SI == RHS.Case.SI && "Incompatible operators.");
  2905. return Case.Index < RHS.Case.Index;
  2906. }
  2907. const CaseHandleT &operator*() const { return Case; }
  2908. };
  2909. using CaseIt = CaseIteratorImpl<CaseHandle>;
  2910. using ConstCaseIt = CaseIteratorImpl<ConstCaseHandle>;
  2911. static SwitchInst *Create(Value *Value, BasicBlock *Default,
  2912. unsigned NumCases,
  2913. Instruction *InsertBefore = nullptr) {
  2914. return new SwitchInst(Value, Default, NumCases, InsertBefore);
  2915. }
  2916. static SwitchInst *Create(Value *Value, BasicBlock *Default,
  2917. unsigned NumCases, BasicBlock *InsertAtEnd) {
  2918. return new SwitchInst(Value, Default, NumCases, InsertAtEnd);
  2919. }
  2920. /// Provide fast operand accessors
  2921. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  2922. // Accessor Methods for Switch stmt
  2923. Value *getCondition() const { return getOperand(0); }
  2924. void setCondition(Value *V) { setOperand(0, V); }
  2925. BasicBlock *getDefaultDest() const {
  2926. return cast<BasicBlock>(getOperand(1));
  2927. }
  2928. void setDefaultDest(BasicBlock *DefaultCase) {
  2929. setOperand(1, reinterpret_cast<Value*>(DefaultCase));
  2930. }
  2931. /// Return the number of 'cases' in this switch instruction, excluding the
  2932. /// default case.
  2933. unsigned getNumCases() const {
  2934. return getNumOperands()/2 - 1;
  2935. }
  2936. /// Returns a read/write iterator that points to the first case in the
  2937. /// SwitchInst.
  2938. CaseIt case_begin() {
  2939. return CaseIt(this, 0);
  2940. }
  2941. /// Returns a read-only iterator that points to the first case in the
  2942. /// SwitchInst.
  2943. ConstCaseIt case_begin() const {
  2944. return ConstCaseIt(this, 0);
  2945. }
  2946. /// Returns a read/write iterator that points one past the last in the
  2947. /// SwitchInst.
  2948. CaseIt case_end() {
  2949. return CaseIt(this, getNumCases());
  2950. }
  2951. /// Returns a read-only iterator that points one past the last in the
  2952. /// SwitchInst.
  2953. ConstCaseIt case_end() const {
  2954. return ConstCaseIt(this, getNumCases());
  2955. }
  2956. /// Iteration adapter for range-for loops.
  2957. iterator_range<CaseIt> cases() {
  2958. return make_range(case_begin(), case_end());
  2959. }
  2960. /// Constant iteration adapter for range-for loops.
  2961. iterator_range<ConstCaseIt> cases() const {
  2962. return make_range(case_begin(), case_end());
  2963. }
  2964. /// Returns an iterator that points to the default case.
  2965. /// Note: this iterator allows to resolve successor only. Attempt
  2966. /// to resolve case value causes an assertion.
  2967. /// Also note, that increment and decrement also causes an assertion and
  2968. /// makes iterator invalid.
  2969. CaseIt case_default() {
  2970. return CaseIt(this, DefaultPseudoIndex);
  2971. }
  2972. ConstCaseIt case_default() const {
  2973. return ConstCaseIt(this, DefaultPseudoIndex);
  2974. }
  2975. /// Search all of the case values for the specified constant. If it is
  2976. /// explicitly handled, return the case iterator of it, otherwise return
  2977. /// default case iterator to indicate that it is handled by the default
  2978. /// handler.
  2979. CaseIt findCaseValue(const ConstantInt *C) {
  2980. return CaseIt(
  2981. this,
  2982. const_cast<const SwitchInst *>(this)->findCaseValue(C)->getCaseIndex());
  2983. }
  2984. ConstCaseIt findCaseValue(const ConstantInt *C) const {
  2985. ConstCaseIt I = llvm::find_if(cases(), [C](const ConstCaseHandle &Case) {
  2986. return Case.getCaseValue() == C;
  2987. });
  2988. if (I != case_end())
  2989. return I;
  2990. return case_default();
  2991. }
  2992. /// Finds the unique case value for a given successor. Returns null if the
  2993. /// successor is not found, not unique, or is the default case.
  2994. ConstantInt *findCaseDest(BasicBlock *BB) {
  2995. if (BB == getDefaultDest())
  2996. return nullptr;
  2997. ConstantInt *CI = nullptr;
  2998. for (auto Case : cases()) {
  2999. if (Case.getCaseSuccessor() != BB)
  3000. continue;
  3001. if (CI)
  3002. return nullptr; // Multiple cases lead to BB.
  3003. CI = Case.getCaseValue();
  3004. }
  3005. return CI;
  3006. }
  3007. /// Add an entry to the switch instruction.
  3008. /// Note:
  3009. /// This action invalidates case_end(). Old case_end() iterator will
  3010. /// point to the added case.
  3011. void addCase(ConstantInt *OnVal, BasicBlock *Dest);
  3012. /// This method removes the specified case and its successor from the switch
  3013. /// instruction. Note that this operation may reorder the remaining cases at
  3014. /// index idx and above.
  3015. /// Note:
  3016. /// This action invalidates iterators for all cases following the one removed,
  3017. /// including the case_end() iterator. It returns an iterator for the next
  3018. /// case.
  3019. CaseIt removeCase(CaseIt I);
  3020. unsigned getNumSuccessors() const { return getNumOperands()/2; }
  3021. BasicBlock *getSuccessor(unsigned idx) const {
  3022. assert(idx < getNumSuccessors() &&"Successor idx out of range for switch!");
  3023. return cast<BasicBlock>(getOperand(idx*2+1));
  3024. }
  3025. void setSuccessor(unsigned idx, BasicBlock *NewSucc) {
  3026. assert(idx < getNumSuccessors() && "Successor # out of range for switch!");
  3027. setOperand(idx * 2 + 1, NewSucc);
  3028. }
  3029. // Methods for support type inquiry through isa, cast, and dyn_cast:
  3030. static bool classof(const Instruction *I) {
  3031. return I->getOpcode() == Instruction::Switch;
  3032. }
  3033. static bool classof(const Value *V) {
  3034. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  3035. }
  3036. };
  3037. /// A wrapper class to simplify modification of SwitchInst cases along with
  3038. /// their prof branch_weights metadata.
  3039. class SwitchInstProfUpdateWrapper {
  3040. SwitchInst &SI;
  3041. std::optional<SmallVector<uint32_t, 8>> Weights;
  3042. bool Changed = false;
  3043. protected:
  3044. MDNode *buildProfBranchWeightsMD();
  3045. void init();
  3046. public:
  3047. using CaseWeightOpt = std::optional<uint32_t>;
  3048. SwitchInst *operator->() { return &SI; }
  3049. SwitchInst &operator*() { return SI; }
  3050. operator SwitchInst *() { return &SI; }
  3051. SwitchInstProfUpdateWrapper(SwitchInst &SI) : SI(SI) { init(); }
  3052. ~SwitchInstProfUpdateWrapper() {
  3053. if (Changed)
  3054. SI.setMetadata(LLVMContext::MD_prof, buildProfBranchWeightsMD());
  3055. }
  3056. /// Delegate the call to the underlying SwitchInst::removeCase() and remove
  3057. /// correspondent branch weight.
  3058. SwitchInst::CaseIt removeCase(SwitchInst::CaseIt I);
  3059. /// Delegate the call to the underlying SwitchInst::addCase() and set the
  3060. /// specified branch weight for the added case.
  3061. void addCase(ConstantInt *OnVal, BasicBlock *Dest, CaseWeightOpt W);
  3062. /// Delegate the call to the underlying SwitchInst::eraseFromParent() and mark
  3063. /// this object to not touch the underlying SwitchInst in destructor.
  3064. SymbolTableList<Instruction>::iterator eraseFromParent();
  3065. void setSuccessorWeight(unsigned idx, CaseWeightOpt W);
  3066. CaseWeightOpt getSuccessorWeight(unsigned idx);
  3067. static CaseWeightOpt getSuccessorWeight(const SwitchInst &SI, unsigned idx);
  3068. };
  3069. template <>
  3070. struct OperandTraits<SwitchInst> : public HungoffOperandTraits<2> {
  3071. };
  3072. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(SwitchInst, Value)
  3073. //===----------------------------------------------------------------------===//
  3074. // IndirectBrInst Class
  3075. //===----------------------------------------------------------------------===//
  3076. //===---------------------------------------------------------------------------
  3077. /// Indirect Branch Instruction.
  3078. ///
  3079. class IndirectBrInst : public Instruction {
  3080. unsigned ReservedSpace;
  3081. // Operand[0] = Address to jump to
  3082. // Operand[n+1] = n-th destination
  3083. IndirectBrInst(const IndirectBrInst &IBI);
  3084. /// Create a new indirectbr instruction, specifying an
  3085. /// Address to jump to. The number of expected destinations can be specified
  3086. /// here to make memory allocation more efficient. This constructor can also
  3087. /// autoinsert before another instruction.
  3088. IndirectBrInst(Value *Address, unsigned NumDests, Instruction *InsertBefore);
  3089. /// Create a new indirectbr instruction, specifying an
  3090. /// Address to jump to. The number of expected destinations can be specified
  3091. /// here to make memory allocation more efficient. This constructor also
  3092. /// autoinserts at the end of the specified BasicBlock.
  3093. IndirectBrInst(Value *Address, unsigned NumDests, BasicBlock *InsertAtEnd);
  3094. // allocate space for exactly zero operands
  3095. void *operator new(size_t S) { return User::operator new(S); }
  3096. void init(Value *Address, unsigned NumDests);
  3097. void growOperands();
  3098. protected:
  3099. // Note: Instruction needs to be a friend here to call cloneImpl.
  3100. friend class Instruction;
  3101. IndirectBrInst *cloneImpl() const;
  3102. public:
  3103. void operator delete(void *Ptr) { User::operator delete(Ptr); }
  3104. /// Iterator type that casts an operand to a basic block.
  3105. ///
  3106. /// This only makes sense because the successors are stored as adjacent
  3107. /// operands for indirectbr instructions.
  3108. struct succ_op_iterator
  3109. : iterator_adaptor_base<succ_op_iterator, value_op_iterator,
  3110. std::random_access_iterator_tag, BasicBlock *,
  3111. ptrdiff_t, BasicBlock *, BasicBlock *> {
  3112. explicit succ_op_iterator(value_op_iterator I) : iterator_adaptor_base(I) {}
  3113. BasicBlock *operator*() const { return cast<BasicBlock>(*I); }
  3114. BasicBlock *operator->() const { return operator*(); }
  3115. };
  3116. /// The const version of `succ_op_iterator`.
  3117. struct const_succ_op_iterator
  3118. : iterator_adaptor_base<const_succ_op_iterator, const_value_op_iterator,
  3119. std::random_access_iterator_tag,
  3120. const BasicBlock *, ptrdiff_t, const BasicBlock *,
  3121. const BasicBlock *> {
  3122. explicit const_succ_op_iterator(const_value_op_iterator I)
  3123. : iterator_adaptor_base(I) {}
  3124. const BasicBlock *operator*() const { return cast<BasicBlock>(*I); }
  3125. const BasicBlock *operator->() const { return operator*(); }
  3126. };
  3127. static IndirectBrInst *Create(Value *Address, unsigned NumDests,
  3128. Instruction *InsertBefore = nullptr) {
  3129. return new IndirectBrInst(Address, NumDests, InsertBefore);
  3130. }
  3131. static IndirectBrInst *Create(Value *Address, unsigned NumDests,
  3132. BasicBlock *InsertAtEnd) {
  3133. return new IndirectBrInst(Address, NumDests, InsertAtEnd);
  3134. }
  3135. /// Provide fast operand accessors.
  3136. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  3137. // Accessor Methods for IndirectBrInst instruction.
  3138. Value *getAddress() { return getOperand(0); }
  3139. const Value *getAddress() const { return getOperand(0); }
  3140. void setAddress(Value *V) { setOperand(0, V); }
  3141. /// return the number of possible destinations in this
  3142. /// indirectbr instruction.
  3143. unsigned getNumDestinations() const { return getNumOperands()-1; }
  3144. /// Return the specified destination.
  3145. BasicBlock *getDestination(unsigned i) { return getSuccessor(i); }
  3146. const BasicBlock *getDestination(unsigned i) const { return getSuccessor(i); }
  3147. /// Add a destination.
  3148. ///
  3149. void addDestination(BasicBlock *Dest);
  3150. /// This method removes the specified successor from the
  3151. /// indirectbr instruction.
  3152. void removeDestination(unsigned i);
  3153. unsigned getNumSuccessors() const { return getNumOperands()-1; }
  3154. BasicBlock *getSuccessor(unsigned i) const {
  3155. return cast<BasicBlock>(getOperand(i+1));
  3156. }
  3157. void setSuccessor(unsigned i, BasicBlock *NewSucc) {
  3158. setOperand(i + 1, NewSucc);
  3159. }
  3160. iterator_range<succ_op_iterator> successors() {
  3161. return make_range(succ_op_iterator(std::next(value_op_begin())),
  3162. succ_op_iterator(value_op_end()));
  3163. }
  3164. iterator_range<const_succ_op_iterator> successors() const {
  3165. return make_range(const_succ_op_iterator(std::next(value_op_begin())),
  3166. const_succ_op_iterator(value_op_end()));
  3167. }
  3168. // Methods for support type inquiry through isa, cast, and dyn_cast:
  3169. static bool classof(const Instruction *I) {
  3170. return I->getOpcode() == Instruction::IndirectBr;
  3171. }
  3172. static bool classof(const Value *V) {
  3173. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  3174. }
  3175. };
  3176. template <>
  3177. struct OperandTraits<IndirectBrInst> : public HungoffOperandTraits<1> {
  3178. };
  3179. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(IndirectBrInst, Value)
  3180. //===----------------------------------------------------------------------===//
  3181. // InvokeInst Class
  3182. //===----------------------------------------------------------------------===//
  3183. /// Invoke instruction. The SubclassData field is used to hold the
  3184. /// calling convention of the call.
  3185. ///
  3186. class InvokeInst : public CallBase {
  3187. /// The number of operands for this call beyond the called function,
  3188. /// arguments, and operand bundles.
  3189. static constexpr int NumExtraOperands = 2;
  3190. /// The index from the end of the operand array to the normal destination.
  3191. static constexpr int NormalDestOpEndIdx = -3;
  3192. /// The index from the end of the operand array to the unwind destination.
  3193. static constexpr int UnwindDestOpEndIdx = -2;
  3194. InvokeInst(const InvokeInst &BI);
  3195. /// Construct an InvokeInst given a range of arguments.
  3196. ///
  3197. /// Construct an InvokeInst from a range of arguments
  3198. inline InvokeInst(FunctionType *Ty, Value *Func, BasicBlock *IfNormal,
  3199. BasicBlock *IfException, ArrayRef<Value *> Args,
  3200. ArrayRef<OperandBundleDef> Bundles, int NumOperands,
  3201. const Twine &NameStr, Instruction *InsertBefore);
  3202. inline InvokeInst(FunctionType *Ty, Value *Func, BasicBlock *IfNormal,
  3203. BasicBlock *IfException, ArrayRef<Value *> Args,
  3204. ArrayRef<OperandBundleDef> Bundles, int NumOperands,
  3205. const Twine &NameStr, BasicBlock *InsertAtEnd);
  3206. void init(FunctionType *Ty, Value *Func, BasicBlock *IfNormal,
  3207. BasicBlock *IfException, ArrayRef<Value *> Args,
  3208. ArrayRef<OperandBundleDef> Bundles, const Twine &NameStr);
  3209. /// Compute the number of operands to allocate.
  3210. static int ComputeNumOperands(int NumArgs, int NumBundleInputs = 0) {
  3211. // We need one operand for the called function, plus our extra operands and
  3212. // the input operand counts provided.
  3213. return 1 + NumExtraOperands + NumArgs + NumBundleInputs;
  3214. }
  3215. protected:
  3216. // Note: Instruction needs to be a friend here to call cloneImpl.
  3217. friend class Instruction;
  3218. InvokeInst *cloneImpl() const;
  3219. public:
  3220. static InvokeInst *Create(FunctionType *Ty, Value *Func, BasicBlock *IfNormal,
  3221. BasicBlock *IfException, ArrayRef<Value *> Args,
  3222. const Twine &NameStr,
  3223. Instruction *InsertBefore = nullptr) {
  3224. int NumOperands = ComputeNumOperands(Args.size());
  3225. return new (NumOperands)
  3226. InvokeInst(Ty, Func, IfNormal, IfException, Args, std::nullopt,
  3227. NumOperands, NameStr, InsertBefore);
  3228. }
  3229. static InvokeInst *Create(FunctionType *Ty, Value *Func, BasicBlock *IfNormal,
  3230. BasicBlock *IfException, ArrayRef<Value *> Args,
  3231. ArrayRef<OperandBundleDef> Bundles = std::nullopt,
  3232. const Twine &NameStr = "",
  3233. Instruction *InsertBefore = nullptr) {
  3234. int NumOperands =
  3235. ComputeNumOperands(Args.size(), CountBundleInputs(Bundles));
  3236. unsigned DescriptorBytes = Bundles.size() * sizeof(BundleOpInfo);
  3237. return new (NumOperands, DescriptorBytes)
  3238. InvokeInst(Ty, Func, IfNormal, IfException, Args, Bundles, NumOperands,
  3239. NameStr, InsertBefore);
  3240. }
  3241. static InvokeInst *Create(FunctionType *Ty, Value *Func, BasicBlock *IfNormal,
  3242. BasicBlock *IfException, ArrayRef<Value *> Args,
  3243. const Twine &NameStr, BasicBlock *InsertAtEnd) {
  3244. int NumOperands = ComputeNumOperands(Args.size());
  3245. return new (NumOperands)
  3246. InvokeInst(Ty, Func, IfNormal, IfException, Args, std::nullopt,
  3247. NumOperands, NameStr, InsertAtEnd);
  3248. }
  3249. static InvokeInst *Create(FunctionType *Ty, Value *Func, BasicBlock *IfNormal,
  3250. BasicBlock *IfException, ArrayRef<Value *> Args,
  3251. ArrayRef<OperandBundleDef> Bundles,
  3252. const Twine &NameStr, BasicBlock *InsertAtEnd) {
  3253. int NumOperands =
  3254. ComputeNumOperands(Args.size(), CountBundleInputs(Bundles));
  3255. unsigned DescriptorBytes = Bundles.size() * sizeof(BundleOpInfo);
  3256. return new (NumOperands, DescriptorBytes)
  3257. InvokeInst(Ty, Func, IfNormal, IfException, Args, Bundles, NumOperands,
  3258. NameStr, InsertAtEnd);
  3259. }
  3260. static InvokeInst *Create(FunctionCallee Func, BasicBlock *IfNormal,
  3261. BasicBlock *IfException, ArrayRef<Value *> Args,
  3262. const Twine &NameStr,
  3263. Instruction *InsertBefore = nullptr) {
  3264. return Create(Func.getFunctionType(), Func.getCallee(), IfNormal,
  3265. IfException, Args, std::nullopt, NameStr, InsertBefore);
  3266. }
  3267. static InvokeInst *Create(FunctionCallee Func, BasicBlock *IfNormal,
  3268. BasicBlock *IfException, ArrayRef<Value *> Args,
  3269. ArrayRef<OperandBundleDef> Bundles = std::nullopt,
  3270. const Twine &NameStr = "",
  3271. Instruction *InsertBefore = nullptr) {
  3272. return Create(Func.getFunctionType(), Func.getCallee(), IfNormal,
  3273. IfException, Args, Bundles, NameStr, InsertBefore);
  3274. }
  3275. static InvokeInst *Create(FunctionCallee Func, BasicBlock *IfNormal,
  3276. BasicBlock *IfException, ArrayRef<Value *> Args,
  3277. const Twine &NameStr, BasicBlock *InsertAtEnd) {
  3278. return Create(Func.getFunctionType(), Func.getCallee(), IfNormal,
  3279. IfException, Args, NameStr, InsertAtEnd);
  3280. }
  3281. static InvokeInst *Create(FunctionCallee Func, BasicBlock *IfNormal,
  3282. BasicBlock *IfException, ArrayRef<Value *> Args,
  3283. ArrayRef<OperandBundleDef> Bundles,
  3284. const Twine &NameStr, BasicBlock *InsertAtEnd) {
  3285. return Create(Func.getFunctionType(), Func.getCallee(), IfNormal,
  3286. IfException, Args, Bundles, NameStr, InsertAtEnd);
  3287. }
  3288. /// Create a clone of \p II with a different set of operand bundles and
  3289. /// insert it before \p InsertPt.
  3290. ///
  3291. /// The returned invoke instruction is identical to \p II in every way except
  3292. /// that the operand bundles for the new instruction are set to the operand
  3293. /// bundles in \p Bundles.
  3294. static InvokeInst *Create(InvokeInst *II, ArrayRef<OperandBundleDef> Bundles,
  3295. Instruction *InsertPt = nullptr);
  3296. // get*Dest - Return the destination basic blocks...
  3297. BasicBlock *getNormalDest() const {
  3298. return cast<BasicBlock>(Op<NormalDestOpEndIdx>());
  3299. }
  3300. BasicBlock *getUnwindDest() const {
  3301. return cast<BasicBlock>(Op<UnwindDestOpEndIdx>());
  3302. }
  3303. void setNormalDest(BasicBlock *B) {
  3304. Op<NormalDestOpEndIdx>() = reinterpret_cast<Value *>(B);
  3305. }
  3306. void setUnwindDest(BasicBlock *B) {
  3307. Op<UnwindDestOpEndIdx>() = reinterpret_cast<Value *>(B);
  3308. }
  3309. /// Get the landingpad instruction from the landing pad
  3310. /// block (the unwind destination).
  3311. LandingPadInst *getLandingPadInst() const;
  3312. BasicBlock *getSuccessor(unsigned i) const {
  3313. assert(i < 2 && "Successor # out of range for invoke!");
  3314. return i == 0 ? getNormalDest() : getUnwindDest();
  3315. }
  3316. void setSuccessor(unsigned i, BasicBlock *NewSucc) {
  3317. assert(i < 2 && "Successor # out of range for invoke!");
  3318. if (i == 0)
  3319. setNormalDest(NewSucc);
  3320. else
  3321. setUnwindDest(NewSucc);
  3322. }
  3323. unsigned getNumSuccessors() const { return 2; }
  3324. // Methods for support type inquiry through isa, cast, and dyn_cast:
  3325. static bool classof(const Instruction *I) {
  3326. return (I->getOpcode() == Instruction::Invoke);
  3327. }
  3328. static bool classof(const Value *V) {
  3329. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  3330. }
  3331. private:
  3332. // Shadow Instruction::setInstructionSubclassData with a private forwarding
  3333. // method so that subclasses cannot accidentally use it.
  3334. template <typename Bitfield>
  3335. void setSubclassData(typename Bitfield::Type Value) {
  3336. Instruction::setSubclassData<Bitfield>(Value);
  3337. }
  3338. };
  3339. InvokeInst::InvokeInst(FunctionType *Ty, Value *Func, BasicBlock *IfNormal,
  3340. BasicBlock *IfException, ArrayRef<Value *> Args,
  3341. ArrayRef<OperandBundleDef> Bundles, int NumOperands,
  3342. const Twine &NameStr, Instruction *InsertBefore)
  3343. : CallBase(Ty->getReturnType(), Instruction::Invoke,
  3344. OperandTraits<CallBase>::op_end(this) - NumOperands, NumOperands,
  3345. InsertBefore) {
  3346. init(Ty, Func, IfNormal, IfException, Args, Bundles, NameStr);
  3347. }
  3348. InvokeInst::InvokeInst(FunctionType *Ty, Value *Func, BasicBlock *IfNormal,
  3349. BasicBlock *IfException, ArrayRef<Value *> Args,
  3350. ArrayRef<OperandBundleDef> Bundles, int NumOperands,
  3351. const Twine &NameStr, BasicBlock *InsertAtEnd)
  3352. : CallBase(Ty->getReturnType(), Instruction::Invoke,
  3353. OperandTraits<CallBase>::op_end(this) - NumOperands, NumOperands,
  3354. InsertAtEnd) {
  3355. init(Ty, Func, IfNormal, IfException, Args, Bundles, NameStr);
  3356. }
  3357. //===----------------------------------------------------------------------===//
  3358. // CallBrInst Class
  3359. //===----------------------------------------------------------------------===//
  3360. /// CallBr instruction, tracking function calls that may not return control but
  3361. /// instead transfer it to a third location. The SubclassData field is used to
  3362. /// hold the calling convention of the call.
  3363. ///
  3364. class CallBrInst : public CallBase {
  3365. unsigned NumIndirectDests;
  3366. CallBrInst(const CallBrInst &BI);
  3367. /// Construct a CallBrInst given a range of arguments.
  3368. ///
  3369. /// Construct a CallBrInst from a range of arguments
  3370. inline CallBrInst(FunctionType *Ty, Value *Func, BasicBlock *DefaultDest,
  3371. ArrayRef<BasicBlock *> IndirectDests,
  3372. ArrayRef<Value *> Args,
  3373. ArrayRef<OperandBundleDef> Bundles, int NumOperands,
  3374. const Twine &NameStr, Instruction *InsertBefore);
  3375. inline CallBrInst(FunctionType *Ty, Value *Func, BasicBlock *DefaultDest,
  3376. ArrayRef<BasicBlock *> IndirectDests,
  3377. ArrayRef<Value *> Args,
  3378. ArrayRef<OperandBundleDef> Bundles, int NumOperands,
  3379. const Twine &NameStr, BasicBlock *InsertAtEnd);
  3380. void init(FunctionType *FTy, Value *Func, BasicBlock *DefaultDest,
  3381. ArrayRef<BasicBlock *> IndirectDests, ArrayRef<Value *> Args,
  3382. ArrayRef<OperandBundleDef> Bundles, const Twine &NameStr);
  3383. /// Compute the number of operands to allocate.
  3384. static int ComputeNumOperands(int NumArgs, int NumIndirectDests,
  3385. int NumBundleInputs = 0) {
  3386. // We need one operand for the called function, plus our extra operands and
  3387. // the input operand counts provided.
  3388. return 2 + NumIndirectDests + NumArgs + NumBundleInputs;
  3389. }
  3390. protected:
  3391. // Note: Instruction needs to be a friend here to call cloneImpl.
  3392. friend class Instruction;
  3393. CallBrInst *cloneImpl() const;
  3394. public:
  3395. static CallBrInst *Create(FunctionType *Ty, Value *Func,
  3396. BasicBlock *DefaultDest,
  3397. ArrayRef<BasicBlock *> IndirectDests,
  3398. ArrayRef<Value *> Args, const Twine &NameStr,
  3399. Instruction *InsertBefore = nullptr) {
  3400. int NumOperands = ComputeNumOperands(Args.size(), IndirectDests.size());
  3401. return new (NumOperands)
  3402. CallBrInst(Ty, Func, DefaultDest, IndirectDests, Args, std::nullopt,
  3403. NumOperands, NameStr, InsertBefore);
  3404. }
  3405. static CallBrInst *
  3406. Create(FunctionType *Ty, Value *Func, BasicBlock *DefaultDest,
  3407. ArrayRef<BasicBlock *> IndirectDests, ArrayRef<Value *> Args,
  3408. ArrayRef<OperandBundleDef> Bundles = std::nullopt,
  3409. const Twine &NameStr = "", Instruction *InsertBefore = nullptr) {
  3410. int NumOperands = ComputeNumOperands(Args.size(), IndirectDests.size(),
  3411. CountBundleInputs(Bundles));
  3412. unsigned DescriptorBytes = Bundles.size() * sizeof(BundleOpInfo);
  3413. return new (NumOperands, DescriptorBytes)
  3414. CallBrInst(Ty, Func, DefaultDest, IndirectDests, Args, Bundles,
  3415. NumOperands, NameStr, InsertBefore);
  3416. }
  3417. static CallBrInst *Create(FunctionType *Ty, Value *Func,
  3418. BasicBlock *DefaultDest,
  3419. ArrayRef<BasicBlock *> IndirectDests,
  3420. ArrayRef<Value *> Args, const Twine &NameStr,
  3421. BasicBlock *InsertAtEnd) {
  3422. int NumOperands = ComputeNumOperands(Args.size(), IndirectDests.size());
  3423. return new (NumOperands)
  3424. CallBrInst(Ty, Func, DefaultDest, IndirectDests, Args, std::nullopt,
  3425. NumOperands, NameStr, InsertAtEnd);
  3426. }
  3427. static CallBrInst *Create(FunctionType *Ty, Value *Func,
  3428. BasicBlock *DefaultDest,
  3429. ArrayRef<BasicBlock *> IndirectDests,
  3430. ArrayRef<Value *> Args,
  3431. ArrayRef<OperandBundleDef> Bundles,
  3432. const Twine &NameStr, BasicBlock *InsertAtEnd) {
  3433. int NumOperands = ComputeNumOperands(Args.size(), IndirectDests.size(),
  3434. CountBundleInputs(Bundles));
  3435. unsigned DescriptorBytes = Bundles.size() * sizeof(BundleOpInfo);
  3436. return new (NumOperands, DescriptorBytes)
  3437. CallBrInst(Ty, Func, DefaultDest, IndirectDests, Args, Bundles,
  3438. NumOperands, NameStr, InsertAtEnd);
  3439. }
  3440. static CallBrInst *Create(FunctionCallee Func, BasicBlock *DefaultDest,
  3441. ArrayRef<BasicBlock *> IndirectDests,
  3442. ArrayRef<Value *> Args, const Twine &NameStr,
  3443. Instruction *InsertBefore = nullptr) {
  3444. return Create(Func.getFunctionType(), Func.getCallee(), DefaultDest,
  3445. IndirectDests, Args, NameStr, InsertBefore);
  3446. }
  3447. static CallBrInst *Create(FunctionCallee Func, BasicBlock *DefaultDest,
  3448. ArrayRef<BasicBlock *> IndirectDests,
  3449. ArrayRef<Value *> Args,
  3450. ArrayRef<OperandBundleDef> Bundles = std::nullopt,
  3451. const Twine &NameStr = "",
  3452. Instruction *InsertBefore = nullptr) {
  3453. return Create(Func.getFunctionType(), Func.getCallee(), DefaultDest,
  3454. IndirectDests, Args, Bundles, NameStr, InsertBefore);
  3455. }
  3456. static CallBrInst *Create(FunctionCallee Func, BasicBlock *DefaultDest,
  3457. ArrayRef<BasicBlock *> IndirectDests,
  3458. ArrayRef<Value *> Args, const Twine &NameStr,
  3459. BasicBlock *InsertAtEnd) {
  3460. return Create(Func.getFunctionType(), Func.getCallee(), DefaultDest,
  3461. IndirectDests, Args, NameStr, InsertAtEnd);
  3462. }
  3463. static CallBrInst *Create(FunctionCallee Func,
  3464. BasicBlock *DefaultDest,
  3465. ArrayRef<BasicBlock *> IndirectDests,
  3466. ArrayRef<Value *> Args,
  3467. ArrayRef<OperandBundleDef> Bundles,
  3468. const Twine &NameStr, BasicBlock *InsertAtEnd) {
  3469. return Create(Func.getFunctionType(), Func.getCallee(), DefaultDest,
  3470. IndirectDests, Args, Bundles, NameStr, InsertAtEnd);
  3471. }
  3472. /// Create a clone of \p CBI with a different set of operand bundles and
  3473. /// insert it before \p InsertPt.
  3474. ///
  3475. /// The returned callbr instruction is identical to \p CBI in every way
  3476. /// except that the operand bundles for the new instruction are set to the
  3477. /// operand bundles in \p Bundles.
  3478. static CallBrInst *Create(CallBrInst *CBI,
  3479. ArrayRef<OperandBundleDef> Bundles,
  3480. Instruction *InsertPt = nullptr);
  3481. /// Return the number of callbr indirect dest labels.
  3482. ///
  3483. unsigned getNumIndirectDests() const { return NumIndirectDests; }
  3484. /// getIndirectDestLabel - Return the i-th indirect dest label.
  3485. ///
  3486. Value *getIndirectDestLabel(unsigned i) const {
  3487. assert(i < getNumIndirectDests() && "Out of bounds!");
  3488. return getOperand(i + arg_size() + getNumTotalBundleOperands() + 1);
  3489. }
  3490. Value *getIndirectDestLabelUse(unsigned i) const {
  3491. assert(i < getNumIndirectDests() && "Out of bounds!");
  3492. return getOperandUse(i + arg_size() + getNumTotalBundleOperands() + 1);
  3493. }
  3494. // Return the destination basic blocks...
  3495. BasicBlock *getDefaultDest() const {
  3496. return cast<BasicBlock>(*(&Op<-1>() - getNumIndirectDests() - 1));
  3497. }
  3498. BasicBlock *getIndirectDest(unsigned i) const {
  3499. return cast_or_null<BasicBlock>(*(&Op<-1>() - getNumIndirectDests() + i));
  3500. }
  3501. SmallVector<BasicBlock *, 16> getIndirectDests() const {
  3502. SmallVector<BasicBlock *, 16> IndirectDests;
  3503. for (unsigned i = 0, e = getNumIndirectDests(); i < e; ++i)
  3504. IndirectDests.push_back(getIndirectDest(i));
  3505. return IndirectDests;
  3506. }
  3507. void setDefaultDest(BasicBlock *B) {
  3508. *(&Op<-1>() - getNumIndirectDests() - 1) = reinterpret_cast<Value *>(B);
  3509. }
  3510. void setIndirectDest(unsigned i, BasicBlock *B) {
  3511. *(&Op<-1>() - getNumIndirectDests() + i) = reinterpret_cast<Value *>(B);
  3512. }
  3513. BasicBlock *getSuccessor(unsigned i) const {
  3514. assert(i < getNumSuccessors() + 1 &&
  3515. "Successor # out of range for callbr!");
  3516. return i == 0 ? getDefaultDest() : getIndirectDest(i - 1);
  3517. }
  3518. void setSuccessor(unsigned i, BasicBlock *NewSucc) {
  3519. assert(i < getNumIndirectDests() + 1 &&
  3520. "Successor # out of range for callbr!");
  3521. return i == 0 ? setDefaultDest(NewSucc) : setIndirectDest(i - 1, NewSucc);
  3522. }
  3523. unsigned getNumSuccessors() const { return getNumIndirectDests() + 1; }
  3524. // Methods for support type inquiry through isa, cast, and dyn_cast:
  3525. static bool classof(const Instruction *I) {
  3526. return (I->getOpcode() == Instruction::CallBr);
  3527. }
  3528. static bool classof(const Value *V) {
  3529. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  3530. }
  3531. private:
  3532. // Shadow Instruction::setInstructionSubclassData with a private forwarding
  3533. // method so that subclasses cannot accidentally use it.
  3534. template <typename Bitfield>
  3535. void setSubclassData(typename Bitfield::Type Value) {
  3536. Instruction::setSubclassData<Bitfield>(Value);
  3537. }
  3538. };
  3539. CallBrInst::CallBrInst(FunctionType *Ty, Value *Func, BasicBlock *DefaultDest,
  3540. ArrayRef<BasicBlock *> IndirectDests,
  3541. ArrayRef<Value *> Args,
  3542. ArrayRef<OperandBundleDef> Bundles, int NumOperands,
  3543. const Twine &NameStr, Instruction *InsertBefore)
  3544. : CallBase(Ty->getReturnType(), Instruction::CallBr,
  3545. OperandTraits<CallBase>::op_end(this) - NumOperands, NumOperands,
  3546. InsertBefore) {
  3547. init(Ty, Func, DefaultDest, IndirectDests, Args, Bundles, NameStr);
  3548. }
  3549. CallBrInst::CallBrInst(FunctionType *Ty, Value *Func, BasicBlock *DefaultDest,
  3550. ArrayRef<BasicBlock *> IndirectDests,
  3551. ArrayRef<Value *> Args,
  3552. ArrayRef<OperandBundleDef> Bundles, int NumOperands,
  3553. const Twine &NameStr, BasicBlock *InsertAtEnd)
  3554. : CallBase(Ty->getReturnType(), Instruction::CallBr,
  3555. OperandTraits<CallBase>::op_end(this) - NumOperands, NumOperands,
  3556. InsertAtEnd) {
  3557. init(Ty, Func, DefaultDest, IndirectDests, Args, Bundles, NameStr);
  3558. }
  3559. //===----------------------------------------------------------------------===//
  3560. // ResumeInst Class
  3561. //===----------------------------------------------------------------------===//
  3562. //===---------------------------------------------------------------------------
  3563. /// Resume the propagation of an exception.
  3564. ///
  3565. class ResumeInst : public Instruction {
  3566. ResumeInst(const ResumeInst &RI);
  3567. explicit ResumeInst(Value *Exn, Instruction *InsertBefore=nullptr);
  3568. ResumeInst(Value *Exn, BasicBlock *InsertAtEnd);
  3569. protected:
  3570. // Note: Instruction needs to be a friend here to call cloneImpl.
  3571. friend class Instruction;
  3572. ResumeInst *cloneImpl() const;
  3573. public:
  3574. static ResumeInst *Create(Value *Exn, Instruction *InsertBefore = nullptr) {
  3575. return new(1) ResumeInst(Exn, InsertBefore);
  3576. }
  3577. static ResumeInst *Create(Value *Exn, BasicBlock *InsertAtEnd) {
  3578. return new(1) ResumeInst(Exn, InsertAtEnd);
  3579. }
  3580. /// Provide fast operand accessors
  3581. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  3582. /// Convenience accessor.
  3583. Value *getValue() const { return Op<0>(); }
  3584. unsigned getNumSuccessors() const { return 0; }
  3585. // Methods for support type inquiry through isa, cast, and dyn_cast:
  3586. static bool classof(const Instruction *I) {
  3587. return I->getOpcode() == Instruction::Resume;
  3588. }
  3589. static bool classof(const Value *V) {
  3590. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  3591. }
  3592. private:
  3593. BasicBlock *getSuccessor(unsigned idx) const {
  3594. llvm_unreachable("ResumeInst has no successors!");
  3595. }
  3596. void setSuccessor(unsigned idx, BasicBlock *NewSucc) {
  3597. llvm_unreachable("ResumeInst has no successors!");
  3598. }
  3599. };
  3600. template <>
  3601. struct OperandTraits<ResumeInst> :
  3602. public FixedNumOperandTraits<ResumeInst, 1> {
  3603. };
  3604. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(ResumeInst, Value)
  3605. //===----------------------------------------------------------------------===//
  3606. // CatchSwitchInst Class
  3607. //===----------------------------------------------------------------------===//
  3608. class CatchSwitchInst : public Instruction {
  3609. using UnwindDestField = BoolBitfieldElementT<0>;
  3610. /// The number of operands actually allocated. NumOperands is
  3611. /// the number actually in use.
  3612. unsigned ReservedSpace;
  3613. // Operand[0] = Outer scope
  3614. // Operand[1] = Unwind block destination
  3615. // Operand[n] = BasicBlock to go to on match
  3616. CatchSwitchInst(const CatchSwitchInst &CSI);
  3617. /// Create a new switch instruction, specifying a
  3618. /// default destination. The number of additional handlers can be specified
  3619. /// here to make memory allocation more efficient.
  3620. /// This constructor can also autoinsert before another instruction.
  3621. CatchSwitchInst(Value *ParentPad, BasicBlock *UnwindDest,
  3622. unsigned NumHandlers, const Twine &NameStr,
  3623. Instruction *InsertBefore);
  3624. /// Create a new switch instruction, specifying a
  3625. /// default destination. The number of additional handlers can be specified
  3626. /// here to make memory allocation more efficient.
  3627. /// This constructor also autoinserts at the end of the specified BasicBlock.
  3628. CatchSwitchInst(Value *ParentPad, BasicBlock *UnwindDest,
  3629. unsigned NumHandlers, const Twine &NameStr,
  3630. BasicBlock *InsertAtEnd);
  3631. // allocate space for exactly zero operands
  3632. void *operator new(size_t S) { return User::operator new(S); }
  3633. void init(Value *ParentPad, BasicBlock *UnwindDest, unsigned NumReserved);
  3634. void growOperands(unsigned Size);
  3635. protected:
  3636. // Note: Instruction needs to be a friend here to call cloneImpl.
  3637. friend class Instruction;
  3638. CatchSwitchInst *cloneImpl() const;
  3639. public:
  3640. void operator delete(void *Ptr) { return User::operator delete(Ptr); }
  3641. static CatchSwitchInst *Create(Value *ParentPad, BasicBlock *UnwindDest,
  3642. unsigned NumHandlers,
  3643. const Twine &NameStr = "",
  3644. Instruction *InsertBefore = nullptr) {
  3645. return new CatchSwitchInst(ParentPad, UnwindDest, NumHandlers, NameStr,
  3646. InsertBefore);
  3647. }
  3648. static CatchSwitchInst *Create(Value *ParentPad, BasicBlock *UnwindDest,
  3649. unsigned NumHandlers, const Twine &NameStr,
  3650. BasicBlock *InsertAtEnd) {
  3651. return new CatchSwitchInst(ParentPad, UnwindDest, NumHandlers, NameStr,
  3652. InsertAtEnd);
  3653. }
  3654. /// Provide fast operand accessors
  3655. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  3656. // Accessor Methods for CatchSwitch stmt
  3657. Value *getParentPad() const { return getOperand(0); }
  3658. void setParentPad(Value *ParentPad) { setOperand(0, ParentPad); }
  3659. // Accessor Methods for CatchSwitch stmt
  3660. bool hasUnwindDest() const { return getSubclassData<UnwindDestField>(); }
  3661. bool unwindsToCaller() const { return !hasUnwindDest(); }
  3662. BasicBlock *getUnwindDest() const {
  3663. if (hasUnwindDest())
  3664. return cast<BasicBlock>(getOperand(1));
  3665. return nullptr;
  3666. }
  3667. void setUnwindDest(BasicBlock *UnwindDest) {
  3668. assert(UnwindDest);
  3669. assert(hasUnwindDest());
  3670. setOperand(1, UnwindDest);
  3671. }
  3672. /// return the number of 'handlers' in this catchswitch
  3673. /// instruction, except the default handler
  3674. unsigned getNumHandlers() const {
  3675. if (hasUnwindDest())
  3676. return getNumOperands() - 2;
  3677. return getNumOperands() - 1;
  3678. }
  3679. private:
  3680. static BasicBlock *handler_helper(Value *V) { return cast<BasicBlock>(V); }
  3681. static const BasicBlock *handler_helper(const Value *V) {
  3682. return cast<BasicBlock>(V);
  3683. }
  3684. public:
  3685. using DerefFnTy = BasicBlock *(*)(Value *);
  3686. using handler_iterator = mapped_iterator<op_iterator, DerefFnTy>;
  3687. using handler_range = iterator_range<handler_iterator>;
  3688. using ConstDerefFnTy = const BasicBlock *(*)(const Value *);
  3689. using const_handler_iterator =
  3690. mapped_iterator<const_op_iterator, ConstDerefFnTy>;
  3691. using const_handler_range = iterator_range<const_handler_iterator>;
  3692. /// Returns an iterator that points to the first handler in CatchSwitchInst.
  3693. handler_iterator handler_begin() {
  3694. op_iterator It = op_begin() + 1;
  3695. if (hasUnwindDest())
  3696. ++It;
  3697. return handler_iterator(It, DerefFnTy(handler_helper));
  3698. }
  3699. /// Returns an iterator that points to the first handler in the
  3700. /// CatchSwitchInst.
  3701. const_handler_iterator handler_begin() const {
  3702. const_op_iterator It = op_begin() + 1;
  3703. if (hasUnwindDest())
  3704. ++It;
  3705. return const_handler_iterator(It, ConstDerefFnTy(handler_helper));
  3706. }
  3707. /// Returns a read-only iterator that points one past the last
  3708. /// handler in the CatchSwitchInst.
  3709. handler_iterator handler_end() {
  3710. return handler_iterator(op_end(), DerefFnTy(handler_helper));
  3711. }
  3712. /// Returns an iterator that points one past the last handler in the
  3713. /// CatchSwitchInst.
  3714. const_handler_iterator handler_end() const {
  3715. return const_handler_iterator(op_end(), ConstDerefFnTy(handler_helper));
  3716. }
  3717. /// iteration adapter for range-for loops.
  3718. handler_range handlers() {
  3719. return make_range(handler_begin(), handler_end());
  3720. }
  3721. /// iteration adapter for range-for loops.
  3722. const_handler_range handlers() const {
  3723. return make_range(handler_begin(), handler_end());
  3724. }
  3725. /// Add an entry to the switch instruction...
  3726. /// Note:
  3727. /// This action invalidates handler_end(). Old handler_end() iterator will
  3728. /// point to the added handler.
  3729. void addHandler(BasicBlock *Dest);
  3730. void removeHandler(handler_iterator HI);
  3731. unsigned getNumSuccessors() const { return getNumOperands() - 1; }
  3732. BasicBlock *getSuccessor(unsigned Idx) const {
  3733. assert(Idx < getNumSuccessors() &&
  3734. "Successor # out of range for catchswitch!");
  3735. return cast<BasicBlock>(getOperand(Idx + 1));
  3736. }
  3737. void setSuccessor(unsigned Idx, BasicBlock *NewSucc) {
  3738. assert(Idx < getNumSuccessors() &&
  3739. "Successor # out of range for catchswitch!");
  3740. setOperand(Idx + 1, NewSucc);
  3741. }
  3742. // Methods for support type inquiry through isa, cast, and dyn_cast:
  3743. static bool classof(const Instruction *I) {
  3744. return I->getOpcode() == Instruction::CatchSwitch;
  3745. }
  3746. static bool classof(const Value *V) {
  3747. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  3748. }
  3749. };
  3750. template <>
  3751. struct OperandTraits<CatchSwitchInst> : public HungoffOperandTraits<2> {};
  3752. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(CatchSwitchInst, Value)
  3753. //===----------------------------------------------------------------------===//
  3754. // CleanupPadInst Class
  3755. //===----------------------------------------------------------------------===//
  3756. class CleanupPadInst : public FuncletPadInst {
  3757. private:
  3758. explicit CleanupPadInst(Value *ParentPad, ArrayRef<Value *> Args,
  3759. unsigned Values, const Twine &NameStr,
  3760. Instruction *InsertBefore)
  3761. : FuncletPadInst(Instruction::CleanupPad, ParentPad, Args, Values,
  3762. NameStr, InsertBefore) {}
  3763. explicit CleanupPadInst(Value *ParentPad, ArrayRef<Value *> Args,
  3764. unsigned Values, const Twine &NameStr,
  3765. BasicBlock *InsertAtEnd)
  3766. : FuncletPadInst(Instruction::CleanupPad, ParentPad, Args, Values,
  3767. NameStr, InsertAtEnd) {}
  3768. public:
  3769. static CleanupPadInst *Create(Value *ParentPad,
  3770. ArrayRef<Value *> Args = std::nullopt,
  3771. const Twine &NameStr = "",
  3772. Instruction *InsertBefore = nullptr) {
  3773. unsigned Values = 1 + Args.size();
  3774. return new (Values)
  3775. CleanupPadInst(ParentPad, Args, Values, NameStr, InsertBefore);
  3776. }
  3777. static CleanupPadInst *Create(Value *ParentPad, ArrayRef<Value *> Args,
  3778. const Twine &NameStr, BasicBlock *InsertAtEnd) {
  3779. unsigned Values = 1 + Args.size();
  3780. return new (Values)
  3781. CleanupPadInst(ParentPad, Args, Values, NameStr, InsertAtEnd);
  3782. }
  3783. /// Methods for support type inquiry through isa, cast, and dyn_cast:
  3784. static bool classof(const Instruction *I) {
  3785. return I->getOpcode() == Instruction::CleanupPad;
  3786. }
  3787. static bool classof(const Value *V) {
  3788. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  3789. }
  3790. };
  3791. //===----------------------------------------------------------------------===//
  3792. // CatchPadInst Class
  3793. //===----------------------------------------------------------------------===//
  3794. class CatchPadInst : public FuncletPadInst {
  3795. private:
  3796. explicit CatchPadInst(Value *CatchSwitch, ArrayRef<Value *> Args,
  3797. unsigned Values, const Twine &NameStr,
  3798. Instruction *InsertBefore)
  3799. : FuncletPadInst(Instruction::CatchPad, CatchSwitch, Args, Values,
  3800. NameStr, InsertBefore) {}
  3801. explicit CatchPadInst(Value *CatchSwitch, ArrayRef<Value *> Args,
  3802. unsigned Values, const Twine &NameStr,
  3803. BasicBlock *InsertAtEnd)
  3804. : FuncletPadInst(Instruction::CatchPad, CatchSwitch, Args, Values,
  3805. NameStr, InsertAtEnd) {}
  3806. public:
  3807. static CatchPadInst *Create(Value *CatchSwitch, ArrayRef<Value *> Args,
  3808. const Twine &NameStr = "",
  3809. Instruction *InsertBefore = nullptr) {
  3810. unsigned Values = 1 + Args.size();
  3811. return new (Values)
  3812. CatchPadInst(CatchSwitch, Args, Values, NameStr, InsertBefore);
  3813. }
  3814. static CatchPadInst *Create(Value *CatchSwitch, ArrayRef<Value *> Args,
  3815. const Twine &NameStr, BasicBlock *InsertAtEnd) {
  3816. unsigned Values = 1 + Args.size();
  3817. return new (Values)
  3818. CatchPadInst(CatchSwitch, Args, Values, NameStr, InsertAtEnd);
  3819. }
  3820. /// Convenience accessors
  3821. CatchSwitchInst *getCatchSwitch() const {
  3822. return cast<CatchSwitchInst>(Op<-1>());
  3823. }
  3824. void setCatchSwitch(Value *CatchSwitch) {
  3825. assert(CatchSwitch);
  3826. Op<-1>() = CatchSwitch;
  3827. }
  3828. /// Methods for support type inquiry through isa, cast, and dyn_cast:
  3829. static bool classof(const Instruction *I) {
  3830. return I->getOpcode() == Instruction::CatchPad;
  3831. }
  3832. static bool classof(const Value *V) {
  3833. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  3834. }
  3835. };
  3836. //===----------------------------------------------------------------------===//
  3837. // CatchReturnInst Class
  3838. //===----------------------------------------------------------------------===//
  3839. class CatchReturnInst : public Instruction {
  3840. CatchReturnInst(const CatchReturnInst &RI);
  3841. CatchReturnInst(Value *CatchPad, BasicBlock *BB, Instruction *InsertBefore);
  3842. CatchReturnInst(Value *CatchPad, BasicBlock *BB, BasicBlock *InsertAtEnd);
  3843. void init(Value *CatchPad, BasicBlock *BB);
  3844. protected:
  3845. // Note: Instruction needs to be a friend here to call cloneImpl.
  3846. friend class Instruction;
  3847. CatchReturnInst *cloneImpl() const;
  3848. public:
  3849. static CatchReturnInst *Create(Value *CatchPad, BasicBlock *BB,
  3850. Instruction *InsertBefore = nullptr) {
  3851. assert(CatchPad);
  3852. assert(BB);
  3853. return new (2) CatchReturnInst(CatchPad, BB, InsertBefore);
  3854. }
  3855. static CatchReturnInst *Create(Value *CatchPad, BasicBlock *BB,
  3856. BasicBlock *InsertAtEnd) {
  3857. assert(CatchPad);
  3858. assert(BB);
  3859. return new (2) CatchReturnInst(CatchPad, BB, InsertAtEnd);
  3860. }
  3861. /// Provide fast operand accessors
  3862. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  3863. /// Convenience accessors.
  3864. CatchPadInst *getCatchPad() const { return cast<CatchPadInst>(Op<0>()); }
  3865. void setCatchPad(CatchPadInst *CatchPad) {
  3866. assert(CatchPad);
  3867. Op<0>() = CatchPad;
  3868. }
  3869. BasicBlock *getSuccessor() const { return cast<BasicBlock>(Op<1>()); }
  3870. void setSuccessor(BasicBlock *NewSucc) {
  3871. assert(NewSucc);
  3872. Op<1>() = NewSucc;
  3873. }
  3874. unsigned getNumSuccessors() const { return 1; }
  3875. /// Get the parentPad of this catchret's catchpad's catchswitch.
  3876. /// The successor block is implicitly a member of this funclet.
  3877. Value *getCatchSwitchParentPad() const {
  3878. return getCatchPad()->getCatchSwitch()->getParentPad();
  3879. }
  3880. // Methods for support type inquiry through isa, cast, and dyn_cast:
  3881. static bool classof(const Instruction *I) {
  3882. return (I->getOpcode() == Instruction::CatchRet);
  3883. }
  3884. static bool classof(const Value *V) {
  3885. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  3886. }
  3887. private:
  3888. BasicBlock *getSuccessor(unsigned Idx) const {
  3889. assert(Idx < getNumSuccessors() && "Successor # out of range for catchret!");
  3890. return getSuccessor();
  3891. }
  3892. void setSuccessor(unsigned Idx, BasicBlock *B) {
  3893. assert(Idx < getNumSuccessors() && "Successor # out of range for catchret!");
  3894. setSuccessor(B);
  3895. }
  3896. };
  3897. template <>
  3898. struct OperandTraits<CatchReturnInst>
  3899. : public FixedNumOperandTraits<CatchReturnInst, 2> {};
  3900. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(CatchReturnInst, Value)
  3901. //===----------------------------------------------------------------------===//
  3902. // CleanupReturnInst Class
  3903. //===----------------------------------------------------------------------===//
  3904. class CleanupReturnInst : public Instruction {
  3905. using UnwindDestField = BoolBitfieldElementT<0>;
  3906. private:
  3907. CleanupReturnInst(const CleanupReturnInst &RI);
  3908. CleanupReturnInst(Value *CleanupPad, BasicBlock *UnwindBB, unsigned Values,
  3909. Instruction *InsertBefore = nullptr);
  3910. CleanupReturnInst(Value *CleanupPad, BasicBlock *UnwindBB, unsigned Values,
  3911. BasicBlock *InsertAtEnd);
  3912. void init(Value *CleanupPad, BasicBlock *UnwindBB);
  3913. protected:
  3914. // Note: Instruction needs to be a friend here to call cloneImpl.
  3915. friend class Instruction;
  3916. CleanupReturnInst *cloneImpl() const;
  3917. public:
  3918. static CleanupReturnInst *Create(Value *CleanupPad,
  3919. BasicBlock *UnwindBB = nullptr,
  3920. Instruction *InsertBefore = nullptr) {
  3921. assert(CleanupPad);
  3922. unsigned Values = 1;
  3923. if (UnwindBB)
  3924. ++Values;
  3925. return new (Values)
  3926. CleanupReturnInst(CleanupPad, UnwindBB, Values, InsertBefore);
  3927. }
  3928. static CleanupReturnInst *Create(Value *CleanupPad, BasicBlock *UnwindBB,
  3929. BasicBlock *InsertAtEnd) {
  3930. assert(CleanupPad);
  3931. unsigned Values = 1;
  3932. if (UnwindBB)
  3933. ++Values;
  3934. return new (Values)
  3935. CleanupReturnInst(CleanupPad, UnwindBB, Values, InsertAtEnd);
  3936. }
  3937. /// Provide fast operand accessors
  3938. DECLARE_TRANSPARENT_OPERAND_ACCESSORS(Value);
  3939. bool hasUnwindDest() const { return getSubclassData<UnwindDestField>(); }
  3940. bool unwindsToCaller() const { return !hasUnwindDest(); }
  3941. /// Convenience accessor.
  3942. CleanupPadInst *getCleanupPad() const {
  3943. return cast<CleanupPadInst>(Op<0>());
  3944. }
  3945. void setCleanupPad(CleanupPadInst *CleanupPad) {
  3946. assert(CleanupPad);
  3947. Op<0>() = CleanupPad;
  3948. }
  3949. unsigned getNumSuccessors() const { return hasUnwindDest() ? 1 : 0; }
  3950. BasicBlock *getUnwindDest() const {
  3951. return hasUnwindDest() ? cast<BasicBlock>(Op<1>()) : nullptr;
  3952. }
  3953. void setUnwindDest(BasicBlock *NewDest) {
  3954. assert(NewDest);
  3955. assert(hasUnwindDest());
  3956. Op<1>() = NewDest;
  3957. }
  3958. // Methods for support type inquiry through isa, cast, and dyn_cast:
  3959. static bool classof(const Instruction *I) {
  3960. return (I->getOpcode() == Instruction::CleanupRet);
  3961. }
  3962. static bool classof(const Value *V) {
  3963. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  3964. }
  3965. private:
  3966. BasicBlock *getSuccessor(unsigned Idx) const {
  3967. assert(Idx == 0);
  3968. return getUnwindDest();
  3969. }
  3970. void setSuccessor(unsigned Idx, BasicBlock *B) {
  3971. assert(Idx == 0);
  3972. setUnwindDest(B);
  3973. }
  3974. // Shadow Instruction::setInstructionSubclassData with a private forwarding
  3975. // method so that subclasses cannot accidentally use it.
  3976. template <typename Bitfield>
  3977. void setSubclassData(typename Bitfield::Type Value) {
  3978. Instruction::setSubclassData<Bitfield>(Value);
  3979. }
  3980. };
  3981. template <>
  3982. struct OperandTraits<CleanupReturnInst>
  3983. : public VariadicOperandTraits<CleanupReturnInst, /*MINARITY=*/1> {};
  3984. DEFINE_TRANSPARENT_OPERAND_ACCESSORS(CleanupReturnInst, Value)
  3985. //===----------------------------------------------------------------------===//
  3986. // UnreachableInst Class
  3987. //===----------------------------------------------------------------------===//
  3988. //===---------------------------------------------------------------------------
  3989. /// This function has undefined behavior. In particular, the
  3990. /// presence of this instruction indicates some higher level knowledge that the
  3991. /// end of the block cannot be reached.
  3992. ///
  3993. class UnreachableInst : public Instruction {
  3994. protected:
  3995. // Note: Instruction needs to be a friend here to call cloneImpl.
  3996. friend class Instruction;
  3997. UnreachableInst *cloneImpl() const;
  3998. public:
  3999. explicit UnreachableInst(LLVMContext &C, Instruction *InsertBefore = nullptr);
  4000. explicit UnreachableInst(LLVMContext &C, BasicBlock *InsertAtEnd);
  4001. // allocate space for exactly zero operands
  4002. void *operator new(size_t S) { return User::operator new(S, 0); }
  4003. void operator delete(void *Ptr) { User::operator delete(Ptr); }
  4004. unsigned getNumSuccessors() const { return 0; }
  4005. // Methods for support type inquiry through isa, cast, and dyn_cast:
  4006. static bool classof(const Instruction *I) {
  4007. return I->getOpcode() == Instruction::Unreachable;
  4008. }
  4009. static bool classof(const Value *V) {
  4010. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4011. }
  4012. private:
  4013. BasicBlock *getSuccessor(unsigned idx) const {
  4014. llvm_unreachable("UnreachableInst has no successors!");
  4015. }
  4016. void setSuccessor(unsigned idx, BasicBlock *B) {
  4017. llvm_unreachable("UnreachableInst has no successors!");
  4018. }
  4019. };
  4020. //===----------------------------------------------------------------------===//
  4021. // TruncInst Class
  4022. //===----------------------------------------------------------------------===//
  4023. /// This class represents a truncation of integer types.
  4024. class TruncInst : public CastInst {
  4025. protected:
  4026. // Note: Instruction needs to be a friend here to call cloneImpl.
  4027. friend class Instruction;
  4028. /// Clone an identical TruncInst
  4029. TruncInst *cloneImpl() const;
  4030. public:
  4031. /// Constructor with insert-before-instruction semantics
  4032. TruncInst(
  4033. Value *S, ///< The value to be truncated
  4034. Type *Ty, ///< The (smaller) type to truncate to
  4035. const Twine &NameStr = "", ///< A name for the new instruction
  4036. Instruction *InsertBefore = nullptr ///< Where to insert the new instruction
  4037. );
  4038. /// Constructor with insert-at-end-of-block semantics
  4039. TruncInst(
  4040. Value *S, ///< The value to be truncated
  4041. Type *Ty, ///< The (smaller) type to truncate to
  4042. const Twine &NameStr, ///< A name for the new instruction
  4043. BasicBlock *InsertAtEnd ///< The block to insert the instruction into
  4044. );
  4045. /// Methods for support type inquiry through isa, cast, and dyn_cast:
  4046. static bool classof(const Instruction *I) {
  4047. return I->getOpcode() == Trunc;
  4048. }
  4049. static bool classof(const Value *V) {
  4050. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4051. }
  4052. };
  4053. //===----------------------------------------------------------------------===//
  4054. // ZExtInst Class
  4055. //===----------------------------------------------------------------------===//
  4056. /// This class represents zero extension of integer types.
  4057. class ZExtInst : public CastInst {
  4058. protected:
  4059. // Note: Instruction needs to be a friend here to call cloneImpl.
  4060. friend class Instruction;
  4061. /// Clone an identical ZExtInst
  4062. ZExtInst *cloneImpl() const;
  4063. public:
  4064. /// Constructor with insert-before-instruction semantics
  4065. ZExtInst(
  4066. Value *S, ///< The value to be zero extended
  4067. Type *Ty, ///< The type to zero extend to
  4068. const Twine &NameStr = "", ///< A name for the new instruction
  4069. Instruction *InsertBefore = nullptr ///< Where to insert the new instruction
  4070. );
  4071. /// Constructor with insert-at-end semantics.
  4072. ZExtInst(
  4073. Value *S, ///< The value to be zero extended
  4074. Type *Ty, ///< The type to zero extend to
  4075. const Twine &NameStr, ///< A name for the new instruction
  4076. BasicBlock *InsertAtEnd ///< The block to insert the instruction into
  4077. );
  4078. /// Methods for support type inquiry through isa, cast, and dyn_cast:
  4079. static bool classof(const Instruction *I) {
  4080. return I->getOpcode() == ZExt;
  4081. }
  4082. static bool classof(const Value *V) {
  4083. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4084. }
  4085. };
  4086. //===----------------------------------------------------------------------===//
  4087. // SExtInst Class
  4088. //===----------------------------------------------------------------------===//
  4089. /// This class represents a sign extension of integer types.
  4090. class SExtInst : public CastInst {
  4091. protected:
  4092. // Note: Instruction needs to be a friend here to call cloneImpl.
  4093. friend class Instruction;
  4094. /// Clone an identical SExtInst
  4095. SExtInst *cloneImpl() const;
  4096. public:
  4097. /// Constructor with insert-before-instruction semantics
  4098. SExtInst(
  4099. Value *S, ///< The value to be sign extended
  4100. Type *Ty, ///< The type to sign extend to
  4101. const Twine &NameStr = "", ///< A name for the new instruction
  4102. Instruction *InsertBefore = nullptr ///< Where to insert the new instruction
  4103. );
  4104. /// Constructor with insert-at-end-of-block semantics
  4105. SExtInst(
  4106. Value *S, ///< The value to be sign extended
  4107. Type *Ty, ///< The type to sign extend to
  4108. const Twine &NameStr, ///< A name for the new instruction
  4109. BasicBlock *InsertAtEnd ///< The block to insert the instruction into
  4110. );
  4111. /// Methods for support type inquiry through isa, cast, and dyn_cast:
  4112. static bool classof(const Instruction *I) {
  4113. return I->getOpcode() == SExt;
  4114. }
  4115. static bool classof(const Value *V) {
  4116. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4117. }
  4118. };
  4119. //===----------------------------------------------------------------------===//
  4120. // FPTruncInst Class
  4121. //===----------------------------------------------------------------------===//
  4122. /// This class represents a truncation of floating point types.
  4123. class FPTruncInst : public CastInst {
  4124. protected:
  4125. // Note: Instruction needs to be a friend here to call cloneImpl.
  4126. friend class Instruction;
  4127. /// Clone an identical FPTruncInst
  4128. FPTruncInst *cloneImpl() const;
  4129. public:
  4130. /// Constructor with insert-before-instruction semantics
  4131. FPTruncInst(
  4132. Value *S, ///< The value to be truncated
  4133. Type *Ty, ///< The type to truncate to
  4134. const Twine &NameStr = "", ///< A name for the new instruction
  4135. Instruction *InsertBefore = nullptr ///< Where to insert the new instruction
  4136. );
  4137. /// Constructor with insert-before-instruction semantics
  4138. FPTruncInst(
  4139. Value *S, ///< The value to be truncated
  4140. Type *Ty, ///< The type to truncate to
  4141. const Twine &NameStr, ///< A name for the new instruction
  4142. BasicBlock *InsertAtEnd ///< The block to insert the instruction into
  4143. );
  4144. /// Methods for support type inquiry through isa, cast, and dyn_cast:
  4145. static bool classof(const Instruction *I) {
  4146. return I->getOpcode() == FPTrunc;
  4147. }
  4148. static bool classof(const Value *V) {
  4149. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4150. }
  4151. };
  4152. //===----------------------------------------------------------------------===//
  4153. // FPExtInst Class
  4154. //===----------------------------------------------------------------------===//
  4155. /// This class represents an extension of floating point types.
  4156. class FPExtInst : public CastInst {
  4157. protected:
  4158. // Note: Instruction needs to be a friend here to call cloneImpl.
  4159. friend class Instruction;
  4160. /// Clone an identical FPExtInst
  4161. FPExtInst *cloneImpl() const;
  4162. public:
  4163. /// Constructor with insert-before-instruction semantics
  4164. FPExtInst(
  4165. Value *S, ///< The value to be extended
  4166. Type *Ty, ///< The type to extend to
  4167. const Twine &NameStr = "", ///< A name for the new instruction
  4168. Instruction *InsertBefore = nullptr ///< Where to insert the new instruction
  4169. );
  4170. /// Constructor with insert-at-end-of-block semantics
  4171. FPExtInst(
  4172. Value *S, ///< The value to be extended
  4173. Type *Ty, ///< The type to extend to
  4174. const Twine &NameStr, ///< A name for the new instruction
  4175. BasicBlock *InsertAtEnd ///< The block to insert the instruction into
  4176. );
  4177. /// Methods for support type inquiry through isa, cast, and dyn_cast:
  4178. static bool classof(const Instruction *I) {
  4179. return I->getOpcode() == FPExt;
  4180. }
  4181. static bool classof(const Value *V) {
  4182. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4183. }
  4184. };
  4185. //===----------------------------------------------------------------------===//
  4186. // UIToFPInst Class
  4187. //===----------------------------------------------------------------------===//
  4188. /// This class represents a cast unsigned integer to floating point.
  4189. class UIToFPInst : public CastInst {
  4190. protected:
  4191. // Note: Instruction needs to be a friend here to call cloneImpl.
  4192. friend class Instruction;
  4193. /// Clone an identical UIToFPInst
  4194. UIToFPInst *cloneImpl() const;
  4195. public:
  4196. /// Constructor with insert-before-instruction semantics
  4197. UIToFPInst(
  4198. Value *S, ///< The value to be converted
  4199. Type *Ty, ///< The type to convert to
  4200. const Twine &NameStr = "", ///< A name for the new instruction
  4201. Instruction *InsertBefore = nullptr ///< Where to insert the new instruction
  4202. );
  4203. /// Constructor with insert-at-end-of-block semantics
  4204. UIToFPInst(
  4205. Value *S, ///< The value to be converted
  4206. Type *Ty, ///< The type to convert to
  4207. const Twine &NameStr, ///< A name for the new instruction
  4208. BasicBlock *InsertAtEnd ///< The block to insert the instruction into
  4209. );
  4210. /// Methods for support type inquiry through isa, cast, and dyn_cast:
  4211. static bool classof(const Instruction *I) {
  4212. return I->getOpcode() == UIToFP;
  4213. }
  4214. static bool classof(const Value *V) {
  4215. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4216. }
  4217. };
  4218. //===----------------------------------------------------------------------===//
  4219. // SIToFPInst Class
  4220. //===----------------------------------------------------------------------===//
  4221. /// This class represents a cast from signed integer to floating point.
  4222. class SIToFPInst : public CastInst {
  4223. protected:
  4224. // Note: Instruction needs to be a friend here to call cloneImpl.
  4225. friend class Instruction;
  4226. /// Clone an identical SIToFPInst
  4227. SIToFPInst *cloneImpl() const;
  4228. public:
  4229. /// Constructor with insert-before-instruction semantics
  4230. SIToFPInst(
  4231. Value *S, ///< The value to be converted
  4232. Type *Ty, ///< The type to convert to
  4233. const Twine &NameStr = "", ///< A name for the new instruction
  4234. Instruction *InsertBefore = nullptr ///< Where to insert the new instruction
  4235. );
  4236. /// Constructor with insert-at-end-of-block semantics
  4237. SIToFPInst(
  4238. Value *S, ///< The value to be converted
  4239. Type *Ty, ///< The type to convert to
  4240. const Twine &NameStr, ///< A name for the new instruction
  4241. BasicBlock *InsertAtEnd ///< The block to insert the instruction into
  4242. );
  4243. /// Methods for support type inquiry through isa, cast, and dyn_cast:
  4244. static bool classof(const Instruction *I) {
  4245. return I->getOpcode() == SIToFP;
  4246. }
  4247. static bool classof(const Value *V) {
  4248. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4249. }
  4250. };
  4251. //===----------------------------------------------------------------------===//
  4252. // FPToUIInst Class
  4253. //===----------------------------------------------------------------------===//
  4254. /// This class represents a cast from floating point to unsigned integer
  4255. class FPToUIInst : public CastInst {
  4256. protected:
  4257. // Note: Instruction needs to be a friend here to call cloneImpl.
  4258. friend class Instruction;
  4259. /// Clone an identical FPToUIInst
  4260. FPToUIInst *cloneImpl() const;
  4261. public:
  4262. /// Constructor with insert-before-instruction semantics
  4263. FPToUIInst(
  4264. Value *S, ///< The value to be converted
  4265. Type *Ty, ///< The type to convert to
  4266. const Twine &NameStr = "", ///< A name for the new instruction
  4267. Instruction *InsertBefore = nullptr ///< Where to insert the new instruction
  4268. );
  4269. /// Constructor with insert-at-end-of-block semantics
  4270. FPToUIInst(
  4271. Value *S, ///< The value to be converted
  4272. Type *Ty, ///< The type to convert to
  4273. const Twine &NameStr, ///< A name for the new instruction
  4274. BasicBlock *InsertAtEnd ///< Where to insert the new instruction
  4275. );
  4276. /// Methods for support type inquiry through isa, cast, and dyn_cast:
  4277. static bool classof(const Instruction *I) {
  4278. return I->getOpcode() == FPToUI;
  4279. }
  4280. static bool classof(const Value *V) {
  4281. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4282. }
  4283. };
  4284. //===----------------------------------------------------------------------===//
  4285. // FPToSIInst Class
  4286. //===----------------------------------------------------------------------===//
  4287. /// This class represents a cast from floating point to signed integer.
  4288. class FPToSIInst : public CastInst {
  4289. protected:
  4290. // Note: Instruction needs to be a friend here to call cloneImpl.
  4291. friend class Instruction;
  4292. /// Clone an identical FPToSIInst
  4293. FPToSIInst *cloneImpl() const;
  4294. public:
  4295. /// Constructor with insert-before-instruction semantics
  4296. FPToSIInst(
  4297. Value *S, ///< The value to be converted
  4298. Type *Ty, ///< The type to convert to
  4299. const Twine &NameStr = "", ///< A name for the new instruction
  4300. Instruction *InsertBefore = nullptr ///< Where to insert the new instruction
  4301. );
  4302. /// Constructor with insert-at-end-of-block semantics
  4303. FPToSIInst(
  4304. Value *S, ///< The value to be converted
  4305. Type *Ty, ///< The type to convert to
  4306. const Twine &NameStr, ///< A name for the new instruction
  4307. BasicBlock *InsertAtEnd ///< The block to insert the instruction into
  4308. );
  4309. /// Methods for support type inquiry through isa, cast, and dyn_cast:
  4310. static bool classof(const Instruction *I) {
  4311. return I->getOpcode() == FPToSI;
  4312. }
  4313. static bool classof(const Value *V) {
  4314. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4315. }
  4316. };
  4317. //===----------------------------------------------------------------------===//
  4318. // IntToPtrInst Class
  4319. //===----------------------------------------------------------------------===//
  4320. /// This class represents a cast from an integer to a pointer.
  4321. class IntToPtrInst : public CastInst {
  4322. public:
  4323. // Note: Instruction needs to be a friend here to call cloneImpl.
  4324. friend class Instruction;
  4325. /// Constructor with insert-before-instruction semantics
  4326. IntToPtrInst(
  4327. Value *S, ///< The value to be converted
  4328. Type *Ty, ///< The type to convert to
  4329. const Twine &NameStr = "", ///< A name for the new instruction
  4330. Instruction *InsertBefore = nullptr ///< Where to insert the new instruction
  4331. );
  4332. /// Constructor with insert-at-end-of-block semantics
  4333. IntToPtrInst(
  4334. Value *S, ///< The value to be converted
  4335. Type *Ty, ///< The type to convert to
  4336. const Twine &NameStr, ///< A name for the new instruction
  4337. BasicBlock *InsertAtEnd ///< The block to insert the instruction into
  4338. );
  4339. /// Clone an identical IntToPtrInst.
  4340. IntToPtrInst *cloneImpl() const;
  4341. /// Returns the address space of this instruction's pointer type.
  4342. unsigned getAddressSpace() const {
  4343. return getType()->getPointerAddressSpace();
  4344. }
  4345. // Methods for support type inquiry through isa, cast, and dyn_cast:
  4346. static bool classof(const Instruction *I) {
  4347. return I->getOpcode() == IntToPtr;
  4348. }
  4349. static bool classof(const Value *V) {
  4350. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4351. }
  4352. };
  4353. //===----------------------------------------------------------------------===//
  4354. // PtrToIntInst Class
  4355. //===----------------------------------------------------------------------===//
  4356. /// This class represents a cast from a pointer to an integer.
  4357. class PtrToIntInst : public CastInst {
  4358. protected:
  4359. // Note: Instruction needs to be a friend here to call cloneImpl.
  4360. friend class Instruction;
  4361. /// Clone an identical PtrToIntInst.
  4362. PtrToIntInst *cloneImpl() const;
  4363. public:
  4364. /// Constructor with insert-before-instruction semantics
  4365. PtrToIntInst(
  4366. Value *S, ///< The value to be converted
  4367. Type *Ty, ///< The type to convert to
  4368. const Twine &NameStr = "", ///< A name for the new instruction
  4369. Instruction *InsertBefore = nullptr ///< Where to insert the new instruction
  4370. );
  4371. /// Constructor with insert-at-end-of-block semantics
  4372. PtrToIntInst(
  4373. Value *S, ///< The value to be converted
  4374. Type *Ty, ///< The type to convert to
  4375. const Twine &NameStr, ///< A name for the new instruction
  4376. BasicBlock *InsertAtEnd ///< The block to insert the instruction into
  4377. );
  4378. /// Gets the pointer operand.
  4379. Value *getPointerOperand() { return getOperand(0); }
  4380. /// Gets the pointer operand.
  4381. const Value *getPointerOperand() const { return getOperand(0); }
  4382. /// Gets the operand index of the pointer operand.
  4383. static unsigned getPointerOperandIndex() { return 0U; }
  4384. /// Returns the address space of the pointer operand.
  4385. unsigned getPointerAddressSpace() const {
  4386. return getPointerOperand()->getType()->getPointerAddressSpace();
  4387. }
  4388. // Methods for support type inquiry through isa, cast, and dyn_cast:
  4389. static bool classof(const Instruction *I) {
  4390. return I->getOpcode() == PtrToInt;
  4391. }
  4392. static bool classof(const Value *V) {
  4393. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4394. }
  4395. };
  4396. //===----------------------------------------------------------------------===//
  4397. // BitCastInst Class
  4398. //===----------------------------------------------------------------------===//
  4399. /// This class represents a no-op cast from one type to another.
  4400. class BitCastInst : public CastInst {
  4401. protected:
  4402. // Note: Instruction needs to be a friend here to call cloneImpl.
  4403. friend class Instruction;
  4404. /// Clone an identical BitCastInst.
  4405. BitCastInst *cloneImpl() const;
  4406. public:
  4407. /// Constructor with insert-before-instruction semantics
  4408. BitCastInst(
  4409. Value *S, ///< The value to be casted
  4410. Type *Ty, ///< The type to casted to
  4411. const Twine &NameStr = "", ///< A name for the new instruction
  4412. Instruction *InsertBefore = nullptr ///< Where to insert the new instruction
  4413. );
  4414. /// Constructor with insert-at-end-of-block semantics
  4415. BitCastInst(
  4416. Value *S, ///< The value to be casted
  4417. Type *Ty, ///< The type to casted to
  4418. const Twine &NameStr, ///< A name for the new instruction
  4419. BasicBlock *InsertAtEnd ///< The block to insert the instruction into
  4420. );
  4421. // Methods for support type inquiry through isa, cast, and dyn_cast:
  4422. static bool classof(const Instruction *I) {
  4423. return I->getOpcode() == BitCast;
  4424. }
  4425. static bool classof(const Value *V) {
  4426. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4427. }
  4428. };
  4429. //===----------------------------------------------------------------------===//
  4430. // AddrSpaceCastInst Class
  4431. //===----------------------------------------------------------------------===//
  4432. /// This class represents a conversion between pointers from one address space
  4433. /// to another.
  4434. class AddrSpaceCastInst : public CastInst {
  4435. protected:
  4436. // Note: Instruction needs to be a friend here to call cloneImpl.
  4437. friend class Instruction;
  4438. /// Clone an identical AddrSpaceCastInst.
  4439. AddrSpaceCastInst *cloneImpl() const;
  4440. public:
  4441. /// Constructor with insert-before-instruction semantics
  4442. AddrSpaceCastInst(
  4443. Value *S, ///< The value to be casted
  4444. Type *Ty, ///< The type to casted to
  4445. const Twine &NameStr = "", ///< A name for the new instruction
  4446. Instruction *InsertBefore = nullptr ///< Where to insert the new instruction
  4447. );
  4448. /// Constructor with insert-at-end-of-block semantics
  4449. AddrSpaceCastInst(
  4450. Value *S, ///< The value to be casted
  4451. Type *Ty, ///< The type to casted to
  4452. const Twine &NameStr, ///< A name for the new instruction
  4453. BasicBlock *InsertAtEnd ///< The block to insert the instruction into
  4454. );
  4455. // Methods for support type inquiry through isa, cast, and dyn_cast:
  4456. static bool classof(const Instruction *I) {
  4457. return I->getOpcode() == AddrSpaceCast;
  4458. }
  4459. static bool classof(const Value *V) {
  4460. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4461. }
  4462. /// Gets the pointer operand.
  4463. Value *getPointerOperand() {
  4464. return getOperand(0);
  4465. }
  4466. /// Gets the pointer operand.
  4467. const Value *getPointerOperand() const {
  4468. return getOperand(0);
  4469. }
  4470. /// Gets the operand index of the pointer operand.
  4471. static unsigned getPointerOperandIndex() {
  4472. return 0U;
  4473. }
  4474. /// Returns the address space of the pointer operand.
  4475. unsigned getSrcAddressSpace() const {
  4476. return getPointerOperand()->getType()->getPointerAddressSpace();
  4477. }
  4478. /// Returns the address space of the result.
  4479. unsigned getDestAddressSpace() const {
  4480. return getType()->getPointerAddressSpace();
  4481. }
  4482. };
  4483. //===----------------------------------------------------------------------===//
  4484. // Helper functions
  4485. //===----------------------------------------------------------------------===//
  4486. /// A helper function that returns the pointer operand of a load or store
  4487. /// instruction. Returns nullptr if not load or store.
  4488. inline const Value *getLoadStorePointerOperand(const Value *V) {
  4489. if (auto *Load = dyn_cast<LoadInst>(V))
  4490. return Load->getPointerOperand();
  4491. if (auto *Store = dyn_cast<StoreInst>(V))
  4492. return Store->getPointerOperand();
  4493. return nullptr;
  4494. }
  4495. inline Value *getLoadStorePointerOperand(Value *V) {
  4496. return const_cast<Value *>(
  4497. getLoadStorePointerOperand(static_cast<const Value *>(V)));
  4498. }
  4499. /// A helper function that returns the pointer operand of a load, store
  4500. /// or GEP instruction. Returns nullptr if not load, store, or GEP.
  4501. inline const Value *getPointerOperand(const Value *V) {
  4502. if (auto *Ptr = getLoadStorePointerOperand(V))
  4503. return Ptr;
  4504. if (auto *Gep = dyn_cast<GetElementPtrInst>(V))
  4505. return Gep->getPointerOperand();
  4506. return nullptr;
  4507. }
  4508. inline Value *getPointerOperand(Value *V) {
  4509. return const_cast<Value *>(getPointerOperand(static_cast<const Value *>(V)));
  4510. }
  4511. /// A helper function that returns the alignment of load or store instruction.
  4512. inline Align getLoadStoreAlignment(Value *I) {
  4513. assert((isa<LoadInst>(I) || isa<StoreInst>(I)) &&
  4514. "Expected Load or Store instruction");
  4515. if (auto *LI = dyn_cast<LoadInst>(I))
  4516. return LI->getAlign();
  4517. return cast<StoreInst>(I)->getAlign();
  4518. }
  4519. /// A helper function that returns the address space of the pointer operand of
  4520. /// load or store instruction.
  4521. inline unsigned getLoadStoreAddressSpace(Value *I) {
  4522. assert((isa<LoadInst>(I) || isa<StoreInst>(I)) &&
  4523. "Expected Load or Store instruction");
  4524. if (auto *LI = dyn_cast<LoadInst>(I))
  4525. return LI->getPointerAddressSpace();
  4526. return cast<StoreInst>(I)->getPointerAddressSpace();
  4527. }
  4528. /// A helper function that returns the type of a load or store instruction.
  4529. inline Type *getLoadStoreType(Value *I) {
  4530. assert((isa<LoadInst>(I) || isa<StoreInst>(I)) &&
  4531. "Expected Load or Store instruction");
  4532. if (auto *LI = dyn_cast<LoadInst>(I))
  4533. return LI->getType();
  4534. return cast<StoreInst>(I)->getValueOperand()->getType();
  4535. }
  4536. /// A helper function that returns an atomic operation's sync scope; returns
  4537. /// std::nullopt if it is not an atomic operation.
  4538. inline std::optional<SyncScope::ID> getAtomicSyncScopeID(const Instruction *I) {
  4539. if (!I->isAtomic())
  4540. return std::nullopt;
  4541. if (auto *AI = dyn_cast<LoadInst>(I))
  4542. return AI->getSyncScopeID();
  4543. if (auto *AI = dyn_cast<StoreInst>(I))
  4544. return AI->getSyncScopeID();
  4545. if (auto *AI = dyn_cast<FenceInst>(I))
  4546. return AI->getSyncScopeID();
  4547. if (auto *AI = dyn_cast<AtomicCmpXchgInst>(I))
  4548. return AI->getSyncScopeID();
  4549. if (auto *AI = dyn_cast<AtomicRMWInst>(I))
  4550. return AI->getSyncScopeID();
  4551. llvm_unreachable("unhandled atomic operation");
  4552. }
  4553. //===----------------------------------------------------------------------===//
  4554. // FreezeInst Class
  4555. //===----------------------------------------------------------------------===//
  4556. /// This class represents a freeze function that returns random concrete
  4557. /// value if an operand is either a poison value or an undef value
  4558. class FreezeInst : public UnaryInstruction {
  4559. protected:
  4560. // Note: Instruction needs to be a friend here to call cloneImpl.
  4561. friend class Instruction;
  4562. /// Clone an identical FreezeInst
  4563. FreezeInst *cloneImpl() const;
  4564. public:
  4565. explicit FreezeInst(Value *S,
  4566. const Twine &NameStr = "",
  4567. Instruction *InsertBefore = nullptr);
  4568. FreezeInst(Value *S, const Twine &NameStr, BasicBlock *InsertAtEnd);
  4569. // Methods for support type inquiry through isa, cast, and dyn_cast:
  4570. static inline bool classof(const Instruction *I) {
  4571. return I->getOpcode() == Freeze;
  4572. }
  4573. static inline bool classof(const Value *V) {
  4574. return isa<Instruction>(V) && classof(cast<Instruction>(V));
  4575. }
  4576. };
  4577. } // end namespace llvm
  4578. #endif // LLVM_IR_INSTRUCTIONS_H
  4579. #ifdef __GNUC__
  4580. #pragma GCC diagnostic pop
  4581. #endif