CGExpr.cpp 223 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638
  1. //===--- CGExpr.cpp - Emit LLVM Code from Expressions ---------------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This contains code to emit Expr nodes as LLVM code.
  10. //
  11. //===----------------------------------------------------------------------===//
  12. #include "CGCUDARuntime.h"
  13. #include "CGCXXABI.h"
  14. #include "CGCall.h"
  15. #include "CGCleanup.h"
  16. #include "CGDebugInfo.h"
  17. #include "CGObjCRuntime.h"
  18. #include "CGOpenMPRuntime.h"
  19. #include "CGRecordLayout.h"
  20. #include "CodeGenFunction.h"
  21. #include "CodeGenModule.h"
  22. #include "ConstantEmitter.h"
  23. #include "TargetInfo.h"
  24. #include "clang/AST/ASTContext.h"
  25. #include "clang/AST/Attr.h"
  26. #include "clang/AST/DeclObjC.h"
  27. #include "clang/AST/NSAPI.h"
  28. #include "clang/Basic/Builtins.h"
  29. #include "clang/Basic/CodeGenOptions.h"
  30. #include "clang/Basic/SourceManager.h"
  31. #include "llvm/ADT/Hashing.h"
  32. #include "llvm/ADT/StringExtras.h"
  33. #include "llvm/IR/DataLayout.h"
  34. #include "llvm/IR/Intrinsics.h"
  35. #include "llvm/IR/LLVMContext.h"
  36. #include "llvm/IR/MDBuilder.h"
  37. #include "llvm/IR/MatrixBuilder.h"
  38. #include "llvm/Support/ConvertUTF.h"
  39. #include "llvm/Support/MathExtras.h"
  40. #include "llvm/Support/Path.h"
  41. #include "llvm/Support/SaveAndRestore.h"
  42. #include "llvm/Transforms/Utils/SanitizerStats.h"
  43. #include <optional>
  44. #include <string>
  45. using namespace clang;
  46. using namespace CodeGen;
  47. //===--------------------------------------------------------------------===//
  48. // Miscellaneous Helper Methods
  49. //===--------------------------------------------------------------------===//
  50. llvm::Value *CodeGenFunction::EmitCastToVoidPtr(llvm::Value *value) {
  51. unsigned addressSpace =
  52. cast<llvm::PointerType>(value->getType())->getAddressSpace();
  53. llvm::PointerType *destType = Int8PtrTy;
  54. if (addressSpace)
  55. destType = llvm::Type::getInt8PtrTy(getLLVMContext(), addressSpace);
  56. if (value->getType() == destType) return value;
  57. return Builder.CreateBitCast(value, destType);
  58. }
  59. /// CreateTempAlloca - This creates a alloca and inserts it into the entry
  60. /// block.
  61. Address CodeGenFunction::CreateTempAllocaWithoutCast(llvm::Type *Ty,
  62. CharUnits Align,
  63. const Twine &Name,
  64. llvm::Value *ArraySize) {
  65. auto Alloca = CreateTempAlloca(Ty, Name, ArraySize);
  66. Alloca->setAlignment(Align.getAsAlign());
  67. return Address(Alloca, Ty, Align);
  68. }
  69. /// CreateTempAlloca - This creates a alloca and inserts it into the entry
  70. /// block. The alloca is casted to default address space if necessary.
  71. Address CodeGenFunction::CreateTempAlloca(llvm::Type *Ty, CharUnits Align,
  72. const Twine &Name,
  73. llvm::Value *ArraySize,
  74. Address *AllocaAddr) {
  75. auto Alloca = CreateTempAllocaWithoutCast(Ty, Align, Name, ArraySize);
  76. if (AllocaAddr)
  77. *AllocaAddr = Alloca;
  78. llvm::Value *V = Alloca.getPointer();
  79. // Alloca always returns a pointer in alloca address space, which may
  80. // be different from the type defined by the language. For example,
  81. // in C++ the auto variables are in the default address space. Therefore
  82. // cast alloca to the default address space when necessary.
  83. if (getASTAllocaAddressSpace() != LangAS::Default) {
  84. auto DestAddrSpace = getContext().getTargetAddressSpace(LangAS::Default);
  85. llvm::IRBuilderBase::InsertPointGuard IPG(Builder);
  86. // When ArraySize is nullptr, alloca is inserted at AllocaInsertPt,
  87. // otherwise alloca is inserted at the current insertion point of the
  88. // builder.
  89. if (!ArraySize)
  90. Builder.SetInsertPoint(getPostAllocaInsertPoint());
  91. V = getTargetHooks().performAddrSpaceCast(
  92. *this, V, getASTAllocaAddressSpace(), LangAS::Default,
  93. Ty->getPointerTo(DestAddrSpace), /*non-null*/ true);
  94. }
  95. return Address(V, Ty, Align);
  96. }
  97. /// CreateTempAlloca - This creates an alloca and inserts it into the entry
  98. /// block if \p ArraySize is nullptr, otherwise inserts it at the current
  99. /// insertion point of the builder.
  100. llvm::AllocaInst *CodeGenFunction::CreateTempAlloca(llvm::Type *Ty,
  101. const Twine &Name,
  102. llvm::Value *ArraySize) {
  103. if (ArraySize)
  104. return Builder.CreateAlloca(Ty, ArraySize, Name);
  105. return new llvm::AllocaInst(Ty, CGM.getDataLayout().getAllocaAddrSpace(),
  106. ArraySize, Name, AllocaInsertPt);
  107. }
  108. /// CreateDefaultAlignTempAlloca - This creates an alloca with the
  109. /// default alignment of the corresponding LLVM type, which is *not*
  110. /// guaranteed to be related in any way to the expected alignment of
  111. /// an AST type that might have been lowered to Ty.
  112. Address CodeGenFunction::CreateDefaultAlignTempAlloca(llvm::Type *Ty,
  113. const Twine &Name) {
  114. CharUnits Align =
  115. CharUnits::fromQuantity(CGM.getDataLayout().getPrefTypeAlign(Ty));
  116. return CreateTempAlloca(Ty, Align, Name);
  117. }
  118. Address CodeGenFunction::CreateIRTemp(QualType Ty, const Twine &Name) {
  119. CharUnits Align = getContext().getTypeAlignInChars(Ty);
  120. return CreateTempAlloca(ConvertType(Ty), Align, Name);
  121. }
  122. Address CodeGenFunction::CreateMemTemp(QualType Ty, const Twine &Name,
  123. Address *Alloca) {
  124. // FIXME: Should we prefer the preferred type alignment here?
  125. return CreateMemTemp(Ty, getContext().getTypeAlignInChars(Ty), Name, Alloca);
  126. }
  127. Address CodeGenFunction::CreateMemTemp(QualType Ty, CharUnits Align,
  128. const Twine &Name, Address *Alloca) {
  129. Address Result = CreateTempAlloca(ConvertTypeForMem(Ty), Align, Name,
  130. /*ArraySize=*/nullptr, Alloca);
  131. if (Ty->isConstantMatrixType()) {
  132. auto *ArrayTy = cast<llvm::ArrayType>(Result.getElementType());
  133. auto *VectorTy = llvm::FixedVectorType::get(ArrayTy->getElementType(),
  134. ArrayTy->getNumElements());
  135. Result = Address(
  136. Builder.CreateBitCast(Result.getPointer(), VectorTy->getPointerTo()),
  137. VectorTy, Result.getAlignment());
  138. }
  139. return Result;
  140. }
  141. Address CodeGenFunction::CreateMemTempWithoutCast(QualType Ty, CharUnits Align,
  142. const Twine &Name) {
  143. return CreateTempAllocaWithoutCast(ConvertTypeForMem(Ty), Align, Name);
  144. }
  145. Address CodeGenFunction::CreateMemTempWithoutCast(QualType Ty,
  146. const Twine &Name) {
  147. return CreateMemTempWithoutCast(Ty, getContext().getTypeAlignInChars(Ty),
  148. Name);
  149. }
  150. /// EvaluateExprAsBool - Perform the usual unary conversions on the specified
  151. /// expression and compare the result against zero, returning an Int1Ty value.
  152. llvm::Value *CodeGenFunction::EvaluateExprAsBool(const Expr *E) {
  153. PGO.setCurrentStmt(E);
  154. if (const MemberPointerType *MPT = E->getType()->getAs<MemberPointerType>()) {
  155. llvm::Value *MemPtr = EmitScalarExpr(E);
  156. return CGM.getCXXABI().EmitMemberPointerIsNotNull(*this, MemPtr, MPT);
  157. }
  158. QualType BoolTy = getContext().BoolTy;
  159. SourceLocation Loc = E->getExprLoc();
  160. CGFPOptionsRAII FPOptsRAII(*this, E);
  161. if (!E->getType()->isAnyComplexType())
  162. return EmitScalarConversion(EmitScalarExpr(E), E->getType(), BoolTy, Loc);
  163. return EmitComplexToScalarConversion(EmitComplexExpr(E), E->getType(), BoolTy,
  164. Loc);
  165. }
  166. /// EmitIgnoredExpr - Emit code to compute the specified expression,
  167. /// ignoring the result.
  168. void CodeGenFunction::EmitIgnoredExpr(const Expr *E) {
  169. if (E->isPRValue())
  170. return (void)EmitAnyExpr(E, AggValueSlot::ignored(), true);
  171. // if this is a bitfield-resulting conditional operator, we can special case
  172. // emit this. The normal 'EmitLValue' version of this is particularly
  173. // difficult to codegen for, since creating a single "LValue" for two
  174. // different sized arguments here is not particularly doable.
  175. if (const auto *CondOp = dyn_cast<AbstractConditionalOperator>(
  176. E->IgnoreParenNoopCasts(getContext()))) {
  177. if (CondOp->getObjectKind() == OK_BitField)
  178. return EmitIgnoredConditionalOperator(CondOp);
  179. }
  180. // Just emit it as an l-value and drop the result.
  181. EmitLValue(E);
  182. }
  183. /// EmitAnyExpr - Emit code to compute the specified expression which
  184. /// can have any type. The result is returned as an RValue struct.
  185. /// If this is an aggregate expression, AggSlot indicates where the
  186. /// result should be returned.
  187. RValue CodeGenFunction::EmitAnyExpr(const Expr *E,
  188. AggValueSlot aggSlot,
  189. bool ignoreResult) {
  190. switch (getEvaluationKind(E->getType())) {
  191. case TEK_Scalar:
  192. return RValue::get(EmitScalarExpr(E, ignoreResult));
  193. case TEK_Complex:
  194. return RValue::getComplex(EmitComplexExpr(E, ignoreResult, ignoreResult));
  195. case TEK_Aggregate:
  196. if (!ignoreResult && aggSlot.isIgnored())
  197. aggSlot = CreateAggTemp(E->getType(), "agg-temp");
  198. EmitAggExpr(E, aggSlot);
  199. return aggSlot.asRValue();
  200. }
  201. llvm_unreachable("bad evaluation kind");
  202. }
  203. /// EmitAnyExprToTemp - Similar to EmitAnyExpr(), however, the result will
  204. /// always be accessible even if no aggregate location is provided.
  205. RValue CodeGenFunction::EmitAnyExprToTemp(const Expr *E) {
  206. AggValueSlot AggSlot = AggValueSlot::ignored();
  207. if (hasAggregateEvaluationKind(E->getType()))
  208. AggSlot = CreateAggTemp(E->getType(), "agg.tmp");
  209. return EmitAnyExpr(E, AggSlot);
  210. }
  211. /// EmitAnyExprToMem - Evaluate an expression into a given memory
  212. /// location.
  213. void CodeGenFunction::EmitAnyExprToMem(const Expr *E,
  214. Address Location,
  215. Qualifiers Quals,
  216. bool IsInit) {
  217. // FIXME: This function should take an LValue as an argument.
  218. switch (getEvaluationKind(E->getType())) {
  219. case TEK_Complex:
  220. EmitComplexExprIntoLValue(E, MakeAddrLValue(Location, E->getType()),
  221. /*isInit*/ false);
  222. return;
  223. case TEK_Aggregate: {
  224. EmitAggExpr(E, AggValueSlot::forAddr(Location, Quals,
  225. AggValueSlot::IsDestructed_t(IsInit),
  226. AggValueSlot::DoesNotNeedGCBarriers,
  227. AggValueSlot::IsAliased_t(!IsInit),
  228. AggValueSlot::MayOverlap));
  229. return;
  230. }
  231. case TEK_Scalar: {
  232. RValue RV = RValue::get(EmitScalarExpr(E, /*Ignore*/ false));
  233. LValue LV = MakeAddrLValue(Location, E->getType());
  234. EmitStoreThroughLValue(RV, LV);
  235. return;
  236. }
  237. }
  238. llvm_unreachable("bad evaluation kind");
  239. }
  240. static void
  241. pushTemporaryCleanup(CodeGenFunction &CGF, const MaterializeTemporaryExpr *M,
  242. const Expr *E, Address ReferenceTemporary) {
  243. // Objective-C++ ARC:
  244. // If we are binding a reference to a temporary that has ownership, we
  245. // need to perform retain/release operations on the temporary.
  246. //
  247. // FIXME: This should be looking at E, not M.
  248. if (auto Lifetime = M->getType().getObjCLifetime()) {
  249. switch (Lifetime) {
  250. case Qualifiers::OCL_None:
  251. case Qualifiers::OCL_ExplicitNone:
  252. // Carry on to normal cleanup handling.
  253. break;
  254. case Qualifiers::OCL_Autoreleasing:
  255. // Nothing to do; cleaned up by an autorelease pool.
  256. return;
  257. case Qualifiers::OCL_Strong:
  258. case Qualifiers::OCL_Weak:
  259. switch (StorageDuration Duration = M->getStorageDuration()) {
  260. case SD_Static:
  261. // Note: we intentionally do not register a cleanup to release
  262. // the object on program termination.
  263. return;
  264. case SD_Thread:
  265. // FIXME: We should probably register a cleanup in this case.
  266. return;
  267. case SD_Automatic:
  268. case SD_FullExpression:
  269. CodeGenFunction::Destroyer *Destroy;
  270. CleanupKind CleanupKind;
  271. if (Lifetime == Qualifiers::OCL_Strong) {
  272. const ValueDecl *VD = M->getExtendingDecl();
  273. bool Precise =
  274. VD && isa<VarDecl>(VD) && VD->hasAttr<ObjCPreciseLifetimeAttr>();
  275. CleanupKind = CGF.getARCCleanupKind();
  276. Destroy = Precise ? &CodeGenFunction::destroyARCStrongPrecise
  277. : &CodeGenFunction::destroyARCStrongImprecise;
  278. } else {
  279. // __weak objects always get EH cleanups; otherwise, exceptions
  280. // could cause really nasty crashes instead of mere leaks.
  281. CleanupKind = NormalAndEHCleanup;
  282. Destroy = &CodeGenFunction::destroyARCWeak;
  283. }
  284. if (Duration == SD_FullExpression)
  285. CGF.pushDestroy(CleanupKind, ReferenceTemporary,
  286. M->getType(), *Destroy,
  287. CleanupKind & EHCleanup);
  288. else
  289. CGF.pushLifetimeExtendedDestroy(CleanupKind, ReferenceTemporary,
  290. M->getType(),
  291. *Destroy, CleanupKind & EHCleanup);
  292. return;
  293. case SD_Dynamic:
  294. llvm_unreachable("temporary cannot have dynamic storage duration");
  295. }
  296. llvm_unreachable("unknown storage duration");
  297. }
  298. }
  299. CXXDestructorDecl *ReferenceTemporaryDtor = nullptr;
  300. if (const RecordType *RT =
  301. E->getType()->getBaseElementTypeUnsafe()->getAs<RecordType>()) {
  302. // Get the destructor for the reference temporary.
  303. auto *ClassDecl = cast<CXXRecordDecl>(RT->getDecl());
  304. if (!ClassDecl->hasTrivialDestructor())
  305. ReferenceTemporaryDtor = ClassDecl->getDestructor();
  306. }
  307. if (!ReferenceTemporaryDtor)
  308. return;
  309. // Call the destructor for the temporary.
  310. switch (M->getStorageDuration()) {
  311. case SD_Static:
  312. case SD_Thread: {
  313. llvm::FunctionCallee CleanupFn;
  314. llvm::Constant *CleanupArg;
  315. if (E->getType()->isArrayType()) {
  316. CleanupFn = CodeGenFunction(CGF.CGM).generateDestroyHelper(
  317. ReferenceTemporary, E->getType(),
  318. CodeGenFunction::destroyCXXObject, CGF.getLangOpts().Exceptions,
  319. dyn_cast_or_null<VarDecl>(M->getExtendingDecl()));
  320. CleanupArg = llvm::Constant::getNullValue(CGF.Int8PtrTy);
  321. } else {
  322. CleanupFn = CGF.CGM.getAddrAndTypeOfCXXStructor(
  323. GlobalDecl(ReferenceTemporaryDtor, Dtor_Complete));
  324. CleanupArg = cast<llvm::Constant>(ReferenceTemporary.getPointer());
  325. }
  326. CGF.CGM.getCXXABI().registerGlobalDtor(
  327. CGF, *cast<VarDecl>(M->getExtendingDecl()), CleanupFn, CleanupArg);
  328. break;
  329. }
  330. case SD_FullExpression:
  331. CGF.pushDestroy(NormalAndEHCleanup, ReferenceTemporary, E->getType(),
  332. CodeGenFunction::destroyCXXObject,
  333. CGF.getLangOpts().Exceptions);
  334. break;
  335. case SD_Automatic:
  336. CGF.pushLifetimeExtendedDestroy(NormalAndEHCleanup,
  337. ReferenceTemporary, E->getType(),
  338. CodeGenFunction::destroyCXXObject,
  339. CGF.getLangOpts().Exceptions);
  340. break;
  341. case SD_Dynamic:
  342. llvm_unreachable("temporary cannot have dynamic storage duration");
  343. }
  344. }
  345. static Address createReferenceTemporary(CodeGenFunction &CGF,
  346. const MaterializeTemporaryExpr *M,
  347. const Expr *Inner,
  348. Address *Alloca = nullptr) {
  349. auto &TCG = CGF.getTargetHooks();
  350. switch (M->getStorageDuration()) {
  351. case SD_FullExpression:
  352. case SD_Automatic: {
  353. // If we have a constant temporary array or record try to promote it into a
  354. // constant global under the same rules a normal constant would've been
  355. // promoted. This is easier on the optimizer and generally emits fewer
  356. // instructions.
  357. QualType Ty = Inner->getType();
  358. if (CGF.CGM.getCodeGenOpts().MergeAllConstants &&
  359. (Ty->isArrayType() || Ty->isRecordType()) &&
  360. CGF.CGM.isTypeConstant(Ty, true))
  361. if (auto Init = ConstantEmitter(CGF).tryEmitAbstract(Inner, Ty)) {
  362. auto AS = CGF.CGM.GetGlobalConstantAddressSpace();
  363. auto *GV = new llvm::GlobalVariable(
  364. CGF.CGM.getModule(), Init->getType(), /*isConstant=*/true,
  365. llvm::GlobalValue::PrivateLinkage, Init, ".ref.tmp", nullptr,
  366. llvm::GlobalValue::NotThreadLocal,
  367. CGF.getContext().getTargetAddressSpace(AS));
  368. CharUnits alignment = CGF.getContext().getTypeAlignInChars(Ty);
  369. GV->setAlignment(alignment.getAsAlign());
  370. llvm::Constant *C = GV;
  371. if (AS != LangAS::Default)
  372. C = TCG.performAddrSpaceCast(
  373. CGF.CGM, GV, AS, LangAS::Default,
  374. GV->getValueType()->getPointerTo(
  375. CGF.getContext().getTargetAddressSpace(LangAS::Default)));
  376. // FIXME: Should we put the new global into a COMDAT?
  377. return Address(C, GV->getValueType(), alignment);
  378. }
  379. return CGF.CreateMemTemp(Ty, "ref.tmp", Alloca);
  380. }
  381. case SD_Thread:
  382. case SD_Static:
  383. return CGF.CGM.GetAddrOfGlobalTemporary(M, Inner);
  384. case SD_Dynamic:
  385. llvm_unreachable("temporary can't have dynamic storage duration");
  386. }
  387. llvm_unreachable("unknown storage duration");
  388. }
  389. /// Helper method to check if the underlying ABI is AAPCS
  390. static bool isAAPCS(const TargetInfo &TargetInfo) {
  391. return TargetInfo.getABI().startswith("aapcs");
  392. }
  393. LValue CodeGenFunction::
  394. EmitMaterializeTemporaryExpr(const MaterializeTemporaryExpr *M) {
  395. const Expr *E = M->getSubExpr();
  396. assert((!M->getExtendingDecl() || !isa<VarDecl>(M->getExtendingDecl()) ||
  397. !cast<VarDecl>(M->getExtendingDecl())->isARCPseudoStrong()) &&
  398. "Reference should never be pseudo-strong!");
  399. // FIXME: ideally this would use EmitAnyExprToMem, however, we cannot do so
  400. // as that will cause the lifetime adjustment to be lost for ARC
  401. auto ownership = M->getType().getObjCLifetime();
  402. if (ownership != Qualifiers::OCL_None &&
  403. ownership != Qualifiers::OCL_ExplicitNone) {
  404. Address Object = createReferenceTemporary(*this, M, E);
  405. if (auto *Var = dyn_cast<llvm::GlobalVariable>(Object.getPointer())) {
  406. llvm::Type *Ty = ConvertTypeForMem(E->getType());
  407. Object = Address(llvm::ConstantExpr::getBitCast(
  408. Var, Ty->getPointerTo(Object.getAddressSpace())),
  409. Ty, Object.getAlignment());
  410. // createReferenceTemporary will promote the temporary to a global with a
  411. // constant initializer if it can. It can only do this to a value of
  412. // ARC-manageable type if the value is global and therefore "immune" to
  413. // ref-counting operations. Therefore we have no need to emit either a
  414. // dynamic initialization or a cleanup and we can just return the address
  415. // of the temporary.
  416. if (Var->hasInitializer())
  417. return MakeAddrLValue(Object, M->getType(), AlignmentSource::Decl);
  418. Var->setInitializer(CGM.EmitNullConstant(E->getType()));
  419. }
  420. LValue RefTempDst = MakeAddrLValue(Object, M->getType(),
  421. AlignmentSource::Decl);
  422. switch (getEvaluationKind(E->getType())) {
  423. default: llvm_unreachable("expected scalar or aggregate expression");
  424. case TEK_Scalar:
  425. EmitScalarInit(E, M->getExtendingDecl(), RefTempDst, false);
  426. break;
  427. case TEK_Aggregate: {
  428. EmitAggExpr(E, AggValueSlot::forAddr(Object,
  429. E->getType().getQualifiers(),
  430. AggValueSlot::IsDestructed,
  431. AggValueSlot::DoesNotNeedGCBarriers,
  432. AggValueSlot::IsNotAliased,
  433. AggValueSlot::DoesNotOverlap));
  434. break;
  435. }
  436. }
  437. pushTemporaryCleanup(*this, M, E, Object);
  438. return RefTempDst;
  439. }
  440. SmallVector<const Expr *, 2> CommaLHSs;
  441. SmallVector<SubobjectAdjustment, 2> Adjustments;
  442. E = E->skipRValueSubobjectAdjustments(CommaLHSs, Adjustments);
  443. for (const auto &Ignored : CommaLHSs)
  444. EmitIgnoredExpr(Ignored);
  445. if (const auto *opaque = dyn_cast<OpaqueValueExpr>(E)) {
  446. if (opaque->getType()->isRecordType()) {
  447. assert(Adjustments.empty());
  448. return EmitOpaqueValueLValue(opaque);
  449. }
  450. }
  451. // Create and initialize the reference temporary.
  452. Address Alloca = Address::invalid();
  453. Address Object = createReferenceTemporary(*this, M, E, &Alloca);
  454. if (auto *Var = dyn_cast<llvm::GlobalVariable>(
  455. Object.getPointer()->stripPointerCasts())) {
  456. llvm::Type *TemporaryType = ConvertTypeForMem(E->getType());
  457. Object = Address(llvm::ConstantExpr::getBitCast(
  458. cast<llvm::Constant>(Object.getPointer()),
  459. TemporaryType->getPointerTo()),
  460. TemporaryType,
  461. Object.getAlignment());
  462. // If the temporary is a global and has a constant initializer or is a
  463. // constant temporary that we promoted to a global, we may have already
  464. // initialized it.
  465. if (!Var->hasInitializer()) {
  466. Var->setInitializer(CGM.EmitNullConstant(E->getType()));
  467. EmitAnyExprToMem(E, Object, Qualifiers(), /*IsInit*/true);
  468. }
  469. } else {
  470. switch (M->getStorageDuration()) {
  471. case SD_Automatic:
  472. if (auto *Size = EmitLifetimeStart(
  473. CGM.getDataLayout().getTypeAllocSize(Alloca.getElementType()),
  474. Alloca.getPointer())) {
  475. pushCleanupAfterFullExpr<CallLifetimeEnd>(NormalEHLifetimeMarker,
  476. Alloca, Size);
  477. }
  478. break;
  479. case SD_FullExpression: {
  480. if (!ShouldEmitLifetimeMarkers)
  481. break;
  482. // Avoid creating a conditional cleanup just to hold an llvm.lifetime.end
  483. // marker. Instead, start the lifetime of a conditional temporary earlier
  484. // so that it's unconditional. Don't do this with sanitizers which need
  485. // more precise lifetime marks.
  486. ConditionalEvaluation *OldConditional = nullptr;
  487. CGBuilderTy::InsertPoint OldIP;
  488. if (isInConditionalBranch() && !E->getType().isDestructedType() &&
  489. !SanOpts.has(SanitizerKind::HWAddress) &&
  490. !SanOpts.has(SanitizerKind::Memory) &&
  491. !CGM.getCodeGenOpts().SanitizeAddressUseAfterScope) {
  492. OldConditional = OutermostConditional;
  493. OutermostConditional = nullptr;
  494. OldIP = Builder.saveIP();
  495. llvm::BasicBlock *Block = OldConditional->getStartingBlock();
  496. Builder.restoreIP(CGBuilderTy::InsertPoint(
  497. Block, llvm::BasicBlock::iterator(Block->back())));
  498. }
  499. if (auto *Size = EmitLifetimeStart(
  500. CGM.getDataLayout().getTypeAllocSize(Alloca.getElementType()),
  501. Alloca.getPointer())) {
  502. pushFullExprCleanup<CallLifetimeEnd>(NormalEHLifetimeMarker, Alloca,
  503. Size);
  504. }
  505. if (OldConditional) {
  506. OutermostConditional = OldConditional;
  507. Builder.restoreIP(OldIP);
  508. }
  509. break;
  510. }
  511. default:
  512. break;
  513. }
  514. EmitAnyExprToMem(E, Object, Qualifiers(), /*IsInit*/true);
  515. }
  516. pushTemporaryCleanup(*this, M, E, Object);
  517. // Perform derived-to-base casts and/or field accesses, to get from the
  518. // temporary object we created (and, potentially, for which we extended
  519. // the lifetime) to the subobject we're binding the reference to.
  520. for (SubobjectAdjustment &Adjustment : llvm::reverse(Adjustments)) {
  521. switch (Adjustment.Kind) {
  522. case SubobjectAdjustment::DerivedToBaseAdjustment:
  523. Object =
  524. GetAddressOfBaseClass(Object, Adjustment.DerivedToBase.DerivedClass,
  525. Adjustment.DerivedToBase.BasePath->path_begin(),
  526. Adjustment.DerivedToBase.BasePath->path_end(),
  527. /*NullCheckValue=*/ false, E->getExprLoc());
  528. break;
  529. case SubobjectAdjustment::FieldAdjustment: {
  530. LValue LV = MakeAddrLValue(Object, E->getType(), AlignmentSource::Decl);
  531. LV = EmitLValueForField(LV, Adjustment.Field);
  532. assert(LV.isSimple() &&
  533. "materialized temporary field is not a simple lvalue");
  534. Object = LV.getAddress(*this);
  535. break;
  536. }
  537. case SubobjectAdjustment::MemberPointerAdjustment: {
  538. llvm::Value *Ptr = EmitScalarExpr(Adjustment.Ptr.RHS);
  539. Object = EmitCXXMemberDataPointerAddress(E, Object, Ptr,
  540. Adjustment.Ptr.MPT);
  541. break;
  542. }
  543. }
  544. }
  545. return MakeAddrLValue(Object, M->getType(), AlignmentSource::Decl);
  546. }
  547. RValue
  548. CodeGenFunction::EmitReferenceBindingToExpr(const Expr *E) {
  549. // Emit the expression as an lvalue.
  550. LValue LV = EmitLValue(E);
  551. assert(LV.isSimple());
  552. llvm::Value *Value = LV.getPointer(*this);
  553. if (sanitizePerformTypeCheck() && !E->getType()->isFunctionType()) {
  554. // C++11 [dcl.ref]p5 (as amended by core issue 453):
  555. // If a glvalue to which a reference is directly bound designates neither
  556. // an existing object or function of an appropriate type nor a region of
  557. // storage of suitable size and alignment to contain an object of the
  558. // reference's type, the behavior is undefined.
  559. QualType Ty = E->getType();
  560. EmitTypeCheck(TCK_ReferenceBinding, E->getExprLoc(), Value, Ty);
  561. }
  562. return RValue::get(Value);
  563. }
  564. /// getAccessedFieldNo - Given an encoded value and a result number, return the
  565. /// input field number being accessed.
  566. unsigned CodeGenFunction::getAccessedFieldNo(unsigned Idx,
  567. const llvm::Constant *Elts) {
  568. return cast<llvm::ConstantInt>(Elts->getAggregateElement(Idx))
  569. ->getZExtValue();
  570. }
  571. /// Emit the hash_16_bytes function from include/llvm/ADT/Hashing.h.
  572. static llvm::Value *emitHash16Bytes(CGBuilderTy &Builder, llvm::Value *Low,
  573. llvm::Value *High) {
  574. llvm::Value *KMul = Builder.getInt64(0x9ddfea08eb382d69ULL);
  575. llvm::Value *K47 = Builder.getInt64(47);
  576. llvm::Value *A0 = Builder.CreateMul(Builder.CreateXor(Low, High), KMul);
  577. llvm::Value *A1 = Builder.CreateXor(Builder.CreateLShr(A0, K47), A0);
  578. llvm::Value *B0 = Builder.CreateMul(Builder.CreateXor(High, A1), KMul);
  579. llvm::Value *B1 = Builder.CreateXor(Builder.CreateLShr(B0, K47), B0);
  580. return Builder.CreateMul(B1, KMul);
  581. }
  582. bool CodeGenFunction::isNullPointerAllowed(TypeCheckKind TCK) {
  583. return TCK == TCK_DowncastPointer || TCK == TCK_Upcast ||
  584. TCK == TCK_UpcastToVirtualBase || TCK == TCK_DynamicOperation;
  585. }
  586. bool CodeGenFunction::isVptrCheckRequired(TypeCheckKind TCK, QualType Ty) {
  587. CXXRecordDecl *RD = Ty->getAsCXXRecordDecl();
  588. return (RD && RD->hasDefinition() && RD->isDynamicClass()) &&
  589. (TCK == TCK_MemberAccess || TCK == TCK_MemberCall ||
  590. TCK == TCK_DowncastPointer || TCK == TCK_DowncastReference ||
  591. TCK == TCK_UpcastToVirtualBase || TCK == TCK_DynamicOperation);
  592. }
  593. bool CodeGenFunction::sanitizePerformTypeCheck() const {
  594. return SanOpts.has(SanitizerKind::Null) ||
  595. SanOpts.has(SanitizerKind::Alignment) ||
  596. SanOpts.has(SanitizerKind::ObjectSize) ||
  597. SanOpts.has(SanitizerKind::Vptr);
  598. }
  599. void CodeGenFunction::EmitTypeCheck(TypeCheckKind TCK, SourceLocation Loc,
  600. llvm::Value *Ptr, QualType Ty,
  601. CharUnits Alignment,
  602. SanitizerSet SkippedChecks,
  603. llvm::Value *ArraySize) {
  604. if (!sanitizePerformTypeCheck())
  605. return;
  606. // Don't check pointers outside the default address space. The null check
  607. // isn't correct, the object-size check isn't supported by LLVM, and we can't
  608. // communicate the addresses to the runtime handler for the vptr check.
  609. if (Ptr->getType()->getPointerAddressSpace())
  610. return;
  611. // Don't check pointers to volatile data. The behavior here is implementation-
  612. // defined.
  613. if (Ty.isVolatileQualified())
  614. return;
  615. SanitizerScope SanScope(this);
  616. SmallVector<std::pair<llvm::Value *, SanitizerMask>, 3> Checks;
  617. llvm::BasicBlock *Done = nullptr;
  618. // Quickly determine whether we have a pointer to an alloca. It's possible
  619. // to skip null checks, and some alignment checks, for these pointers. This
  620. // can reduce compile-time significantly.
  621. auto PtrToAlloca = dyn_cast<llvm::AllocaInst>(Ptr->stripPointerCasts());
  622. llvm::Value *True = llvm::ConstantInt::getTrue(getLLVMContext());
  623. llvm::Value *IsNonNull = nullptr;
  624. bool IsGuaranteedNonNull =
  625. SkippedChecks.has(SanitizerKind::Null) || PtrToAlloca;
  626. bool AllowNullPointers = isNullPointerAllowed(TCK);
  627. if ((SanOpts.has(SanitizerKind::Null) || AllowNullPointers) &&
  628. !IsGuaranteedNonNull) {
  629. // The glvalue must not be an empty glvalue.
  630. IsNonNull = Builder.CreateIsNotNull(Ptr);
  631. // The IR builder can constant-fold the null check if the pointer points to
  632. // a constant.
  633. IsGuaranteedNonNull = IsNonNull == True;
  634. // Skip the null check if the pointer is known to be non-null.
  635. if (!IsGuaranteedNonNull) {
  636. if (AllowNullPointers) {
  637. // When performing pointer casts, it's OK if the value is null.
  638. // Skip the remaining checks in that case.
  639. Done = createBasicBlock("null");
  640. llvm::BasicBlock *Rest = createBasicBlock("not.null");
  641. Builder.CreateCondBr(IsNonNull, Rest, Done);
  642. EmitBlock(Rest);
  643. } else {
  644. Checks.push_back(std::make_pair(IsNonNull, SanitizerKind::Null));
  645. }
  646. }
  647. }
  648. if (SanOpts.has(SanitizerKind::ObjectSize) &&
  649. !SkippedChecks.has(SanitizerKind::ObjectSize) &&
  650. !Ty->isIncompleteType()) {
  651. uint64_t TySize = CGM.getMinimumObjectSize(Ty).getQuantity();
  652. llvm::Value *Size = llvm::ConstantInt::get(IntPtrTy, TySize);
  653. if (ArraySize)
  654. Size = Builder.CreateMul(Size, ArraySize);
  655. // Degenerate case: new X[0] does not need an objectsize check.
  656. llvm::Constant *ConstantSize = dyn_cast<llvm::Constant>(Size);
  657. if (!ConstantSize || !ConstantSize->isNullValue()) {
  658. // The glvalue must refer to a large enough storage region.
  659. // FIXME: If Address Sanitizer is enabled, insert dynamic instrumentation
  660. // to check this.
  661. // FIXME: Get object address space
  662. llvm::Type *Tys[2] = { IntPtrTy, Int8PtrTy };
  663. llvm::Function *F = CGM.getIntrinsic(llvm::Intrinsic::objectsize, Tys);
  664. llvm::Value *Min = Builder.getFalse();
  665. llvm::Value *NullIsUnknown = Builder.getFalse();
  666. llvm::Value *Dynamic = Builder.getFalse();
  667. llvm::Value *CastAddr = Builder.CreateBitCast(Ptr, Int8PtrTy);
  668. llvm::Value *LargeEnough = Builder.CreateICmpUGE(
  669. Builder.CreateCall(F, {CastAddr, Min, NullIsUnknown, Dynamic}), Size);
  670. Checks.push_back(std::make_pair(LargeEnough, SanitizerKind::ObjectSize));
  671. }
  672. }
  673. llvm::MaybeAlign AlignVal;
  674. llvm::Value *PtrAsInt = nullptr;
  675. if (SanOpts.has(SanitizerKind::Alignment) &&
  676. !SkippedChecks.has(SanitizerKind::Alignment)) {
  677. AlignVal = Alignment.getAsMaybeAlign();
  678. if (!Ty->isIncompleteType() && !AlignVal)
  679. AlignVal = CGM.getNaturalTypeAlignment(Ty, nullptr, nullptr,
  680. /*ForPointeeType=*/true)
  681. .getAsMaybeAlign();
  682. // The glvalue must be suitably aligned.
  683. if (AlignVal && *AlignVal > llvm::Align(1) &&
  684. (!PtrToAlloca || PtrToAlloca->getAlign() < *AlignVal)) {
  685. PtrAsInt = Builder.CreatePtrToInt(Ptr, IntPtrTy);
  686. llvm::Value *Align = Builder.CreateAnd(
  687. PtrAsInt, llvm::ConstantInt::get(IntPtrTy, AlignVal->value() - 1));
  688. llvm::Value *Aligned =
  689. Builder.CreateICmpEQ(Align, llvm::ConstantInt::get(IntPtrTy, 0));
  690. if (Aligned != True)
  691. Checks.push_back(std::make_pair(Aligned, SanitizerKind::Alignment));
  692. }
  693. }
  694. if (Checks.size() > 0) {
  695. llvm::Constant *StaticData[] = {
  696. EmitCheckSourceLocation(Loc), EmitCheckTypeDescriptor(Ty),
  697. llvm::ConstantInt::get(Int8Ty, AlignVal ? llvm::Log2(*AlignVal) : 1),
  698. llvm::ConstantInt::get(Int8Ty, TCK)};
  699. EmitCheck(Checks, SanitizerHandler::TypeMismatch, StaticData,
  700. PtrAsInt ? PtrAsInt : Ptr);
  701. }
  702. // If possible, check that the vptr indicates that there is a subobject of
  703. // type Ty at offset zero within this object.
  704. //
  705. // C++11 [basic.life]p5,6:
  706. // [For storage which does not refer to an object within its lifetime]
  707. // The program has undefined behavior if:
  708. // -- the [pointer or glvalue] is used to access a non-static data member
  709. // or call a non-static member function
  710. if (SanOpts.has(SanitizerKind::Vptr) &&
  711. !SkippedChecks.has(SanitizerKind::Vptr) && isVptrCheckRequired(TCK, Ty)) {
  712. // Ensure that the pointer is non-null before loading it. If there is no
  713. // compile-time guarantee, reuse the run-time null check or emit a new one.
  714. if (!IsGuaranteedNonNull) {
  715. if (!IsNonNull)
  716. IsNonNull = Builder.CreateIsNotNull(Ptr);
  717. if (!Done)
  718. Done = createBasicBlock("vptr.null");
  719. llvm::BasicBlock *VptrNotNull = createBasicBlock("vptr.not.null");
  720. Builder.CreateCondBr(IsNonNull, VptrNotNull, Done);
  721. EmitBlock(VptrNotNull);
  722. }
  723. // Compute a hash of the mangled name of the type.
  724. //
  725. // FIXME: This is not guaranteed to be deterministic! Move to a
  726. // fingerprinting mechanism once LLVM provides one. For the time
  727. // being the implementation happens to be deterministic.
  728. SmallString<64> MangledName;
  729. llvm::raw_svector_ostream Out(MangledName);
  730. CGM.getCXXABI().getMangleContext().mangleCXXRTTI(Ty.getUnqualifiedType(),
  731. Out);
  732. // Contained in NoSanitizeList based on the mangled type.
  733. if (!CGM.getContext().getNoSanitizeList().containsType(SanitizerKind::Vptr,
  734. Out.str())) {
  735. llvm::hash_code TypeHash = hash_value(Out.str());
  736. // Load the vptr, and compute hash_16_bytes(TypeHash, vptr).
  737. llvm::Value *Low = llvm::ConstantInt::get(Int64Ty, TypeHash);
  738. llvm::Type *VPtrTy = llvm::PointerType::get(IntPtrTy, 0);
  739. Address VPtrAddr(Builder.CreateBitCast(Ptr, VPtrTy), IntPtrTy,
  740. getPointerAlign());
  741. llvm::Value *VPtrVal = Builder.CreateLoad(VPtrAddr);
  742. llvm::Value *High = Builder.CreateZExt(VPtrVal, Int64Ty);
  743. llvm::Value *Hash = emitHash16Bytes(Builder, Low, High);
  744. Hash = Builder.CreateTrunc(Hash, IntPtrTy);
  745. // Look the hash up in our cache.
  746. const int CacheSize = 128;
  747. llvm::Type *HashTable = llvm::ArrayType::get(IntPtrTy, CacheSize);
  748. llvm::Value *Cache = CGM.CreateRuntimeVariable(HashTable,
  749. "__ubsan_vptr_type_cache");
  750. llvm::Value *Slot = Builder.CreateAnd(Hash,
  751. llvm::ConstantInt::get(IntPtrTy,
  752. CacheSize-1));
  753. llvm::Value *Indices[] = { Builder.getInt32(0), Slot };
  754. llvm::Value *CacheVal = Builder.CreateAlignedLoad(
  755. IntPtrTy, Builder.CreateInBoundsGEP(HashTable, Cache, Indices),
  756. getPointerAlign());
  757. // If the hash isn't in the cache, call a runtime handler to perform the
  758. // hard work of checking whether the vptr is for an object of the right
  759. // type. This will either fill in the cache and return, or produce a
  760. // diagnostic.
  761. llvm::Value *EqualHash = Builder.CreateICmpEQ(CacheVal, Hash);
  762. llvm::Constant *StaticData[] = {
  763. EmitCheckSourceLocation(Loc),
  764. EmitCheckTypeDescriptor(Ty),
  765. CGM.GetAddrOfRTTIDescriptor(Ty.getUnqualifiedType()),
  766. llvm::ConstantInt::get(Int8Ty, TCK)
  767. };
  768. llvm::Value *DynamicData[] = { Ptr, Hash };
  769. EmitCheck(std::make_pair(EqualHash, SanitizerKind::Vptr),
  770. SanitizerHandler::DynamicTypeCacheMiss, StaticData,
  771. DynamicData);
  772. }
  773. }
  774. if (Done) {
  775. Builder.CreateBr(Done);
  776. EmitBlock(Done);
  777. }
  778. }
  779. llvm::Value *CodeGenFunction::LoadPassedObjectSize(const Expr *E,
  780. QualType EltTy) {
  781. ASTContext &C = getContext();
  782. uint64_t EltSize = C.getTypeSizeInChars(EltTy).getQuantity();
  783. if (!EltSize)
  784. return nullptr;
  785. auto *ArrayDeclRef = dyn_cast<DeclRefExpr>(E->IgnoreParenImpCasts());
  786. if (!ArrayDeclRef)
  787. return nullptr;
  788. auto *ParamDecl = dyn_cast<ParmVarDecl>(ArrayDeclRef->getDecl());
  789. if (!ParamDecl)
  790. return nullptr;
  791. auto *POSAttr = ParamDecl->getAttr<PassObjectSizeAttr>();
  792. if (!POSAttr)
  793. return nullptr;
  794. // Don't load the size if it's a lower bound.
  795. int POSType = POSAttr->getType();
  796. if (POSType != 0 && POSType != 1)
  797. return nullptr;
  798. // Find the implicit size parameter.
  799. auto PassedSizeIt = SizeArguments.find(ParamDecl);
  800. if (PassedSizeIt == SizeArguments.end())
  801. return nullptr;
  802. const ImplicitParamDecl *PassedSizeDecl = PassedSizeIt->second;
  803. assert(LocalDeclMap.count(PassedSizeDecl) && "Passed size not loadable");
  804. Address AddrOfSize = LocalDeclMap.find(PassedSizeDecl)->second;
  805. llvm::Value *SizeInBytes = EmitLoadOfScalar(AddrOfSize, /*Volatile=*/false,
  806. C.getSizeType(), E->getExprLoc());
  807. llvm::Value *SizeOfElement =
  808. llvm::ConstantInt::get(SizeInBytes->getType(), EltSize);
  809. return Builder.CreateUDiv(SizeInBytes, SizeOfElement);
  810. }
  811. /// If Base is known to point to the start of an array, return the length of
  812. /// that array. Return 0 if the length cannot be determined.
  813. static llvm::Value *getArrayIndexingBound(CodeGenFunction &CGF,
  814. const Expr *Base,
  815. QualType &IndexedType,
  816. LangOptions::StrictFlexArraysLevelKind
  817. StrictFlexArraysLevel) {
  818. // For the vector indexing extension, the bound is the number of elements.
  819. if (const VectorType *VT = Base->getType()->getAs<VectorType>()) {
  820. IndexedType = Base->getType();
  821. return CGF.Builder.getInt32(VT->getNumElements());
  822. }
  823. Base = Base->IgnoreParens();
  824. if (const auto *CE = dyn_cast<CastExpr>(Base)) {
  825. if (CE->getCastKind() == CK_ArrayToPointerDecay &&
  826. !CE->getSubExpr()->isFlexibleArrayMemberLike(CGF.getContext(),
  827. StrictFlexArraysLevel)) {
  828. IndexedType = CE->getSubExpr()->getType();
  829. const ArrayType *AT = IndexedType->castAsArrayTypeUnsafe();
  830. if (const auto *CAT = dyn_cast<ConstantArrayType>(AT))
  831. return CGF.Builder.getInt(CAT->getSize());
  832. else if (const auto *VAT = dyn_cast<VariableArrayType>(AT))
  833. return CGF.getVLASize(VAT).NumElts;
  834. // Ignore pass_object_size here. It's not applicable on decayed pointers.
  835. }
  836. }
  837. QualType EltTy{Base->getType()->getPointeeOrArrayElementType(), 0};
  838. if (llvm::Value *POS = CGF.LoadPassedObjectSize(Base, EltTy)) {
  839. IndexedType = Base->getType();
  840. return POS;
  841. }
  842. return nullptr;
  843. }
  844. void CodeGenFunction::EmitBoundsCheck(const Expr *E, const Expr *Base,
  845. llvm::Value *Index, QualType IndexType,
  846. bool Accessed) {
  847. assert(SanOpts.has(SanitizerKind::ArrayBounds) &&
  848. "should not be called unless adding bounds checks");
  849. SanitizerScope SanScope(this);
  850. const LangOptions::StrictFlexArraysLevelKind StrictFlexArraysLevel =
  851. getLangOpts().getStrictFlexArraysLevel();
  852. QualType IndexedType;
  853. llvm::Value *Bound =
  854. getArrayIndexingBound(*this, Base, IndexedType, StrictFlexArraysLevel);
  855. if (!Bound)
  856. return;
  857. bool IndexSigned = IndexType->isSignedIntegerOrEnumerationType();
  858. llvm::Value *IndexVal = Builder.CreateIntCast(Index, SizeTy, IndexSigned);
  859. llvm::Value *BoundVal = Builder.CreateIntCast(Bound, SizeTy, false);
  860. llvm::Constant *StaticData[] = {
  861. EmitCheckSourceLocation(E->getExprLoc()),
  862. EmitCheckTypeDescriptor(IndexedType),
  863. EmitCheckTypeDescriptor(IndexType)
  864. };
  865. llvm::Value *Check = Accessed ? Builder.CreateICmpULT(IndexVal, BoundVal)
  866. : Builder.CreateICmpULE(IndexVal, BoundVal);
  867. EmitCheck(std::make_pair(Check, SanitizerKind::ArrayBounds),
  868. SanitizerHandler::OutOfBounds, StaticData, Index);
  869. }
  870. CodeGenFunction::ComplexPairTy CodeGenFunction::
  871. EmitComplexPrePostIncDec(const UnaryOperator *E, LValue LV,
  872. bool isInc, bool isPre) {
  873. ComplexPairTy InVal = EmitLoadOfComplex(LV, E->getExprLoc());
  874. llvm::Value *NextVal;
  875. if (isa<llvm::IntegerType>(InVal.first->getType())) {
  876. uint64_t AmountVal = isInc ? 1 : -1;
  877. NextVal = llvm::ConstantInt::get(InVal.first->getType(), AmountVal, true);
  878. // Add the inc/dec to the real part.
  879. NextVal = Builder.CreateAdd(InVal.first, NextVal, isInc ? "inc" : "dec");
  880. } else {
  881. QualType ElemTy = E->getType()->castAs<ComplexType>()->getElementType();
  882. llvm::APFloat FVal(getContext().getFloatTypeSemantics(ElemTy), 1);
  883. if (!isInc)
  884. FVal.changeSign();
  885. NextVal = llvm::ConstantFP::get(getLLVMContext(), FVal);
  886. // Add the inc/dec to the real part.
  887. NextVal = Builder.CreateFAdd(InVal.first, NextVal, isInc ? "inc" : "dec");
  888. }
  889. ComplexPairTy IncVal(NextVal, InVal.second);
  890. // Store the updated result through the lvalue.
  891. EmitStoreOfComplex(IncVal, LV, /*init*/ false);
  892. if (getLangOpts().OpenMP)
  893. CGM.getOpenMPRuntime().checkAndEmitLastprivateConditional(*this,
  894. E->getSubExpr());
  895. // If this is a postinc, return the value read from memory, otherwise use the
  896. // updated value.
  897. return isPre ? IncVal : InVal;
  898. }
  899. void CodeGenModule::EmitExplicitCastExprType(const ExplicitCastExpr *E,
  900. CodeGenFunction *CGF) {
  901. // Bind VLAs in the cast type.
  902. if (CGF && E->getType()->isVariablyModifiedType())
  903. CGF->EmitVariablyModifiedType(E->getType());
  904. if (CGDebugInfo *DI = getModuleDebugInfo())
  905. DI->EmitExplicitCastType(E->getType());
  906. }
  907. //===----------------------------------------------------------------------===//
  908. // LValue Expression Emission
  909. //===----------------------------------------------------------------------===//
  910. /// EmitPointerWithAlignment - Given an expression of pointer type, try to
  911. /// derive a more accurate bound on the alignment of the pointer.
  912. Address CodeGenFunction::EmitPointerWithAlignment(const Expr *E,
  913. LValueBaseInfo *BaseInfo,
  914. TBAAAccessInfo *TBAAInfo) {
  915. // We allow this with ObjC object pointers because of fragile ABIs.
  916. assert(E->getType()->isPointerType() ||
  917. E->getType()->isObjCObjectPointerType());
  918. E = E->IgnoreParens();
  919. // Casts:
  920. if (const CastExpr *CE = dyn_cast<CastExpr>(E)) {
  921. if (const auto *ECE = dyn_cast<ExplicitCastExpr>(CE))
  922. CGM.EmitExplicitCastExprType(ECE, this);
  923. switch (CE->getCastKind()) {
  924. // Non-converting casts (but not C's implicit conversion from void*).
  925. case CK_BitCast:
  926. case CK_NoOp:
  927. case CK_AddressSpaceConversion:
  928. if (auto PtrTy = CE->getSubExpr()->getType()->getAs<PointerType>()) {
  929. if (PtrTy->getPointeeType()->isVoidType())
  930. break;
  931. LValueBaseInfo InnerBaseInfo;
  932. TBAAAccessInfo InnerTBAAInfo;
  933. Address Addr = EmitPointerWithAlignment(CE->getSubExpr(),
  934. &InnerBaseInfo,
  935. &InnerTBAAInfo);
  936. if (BaseInfo) *BaseInfo = InnerBaseInfo;
  937. if (TBAAInfo) *TBAAInfo = InnerTBAAInfo;
  938. if (isa<ExplicitCastExpr>(CE)) {
  939. LValueBaseInfo TargetTypeBaseInfo;
  940. TBAAAccessInfo TargetTypeTBAAInfo;
  941. CharUnits Align = CGM.getNaturalPointeeTypeAlignment(
  942. E->getType(), &TargetTypeBaseInfo, &TargetTypeTBAAInfo);
  943. if (TBAAInfo)
  944. *TBAAInfo = CGM.mergeTBAAInfoForCast(*TBAAInfo,
  945. TargetTypeTBAAInfo);
  946. // If the source l-value is opaque, honor the alignment of the
  947. // casted-to type.
  948. if (InnerBaseInfo.getAlignmentSource() != AlignmentSource::Decl) {
  949. if (BaseInfo)
  950. BaseInfo->mergeForCast(TargetTypeBaseInfo);
  951. Addr = Address(Addr.getPointer(), Addr.getElementType(), Align);
  952. }
  953. }
  954. if (SanOpts.has(SanitizerKind::CFIUnrelatedCast) &&
  955. CE->getCastKind() == CK_BitCast) {
  956. if (auto PT = E->getType()->getAs<PointerType>())
  957. EmitVTablePtrCheckForCast(PT->getPointeeType(), Addr,
  958. /*MayBeNull=*/true,
  959. CodeGenFunction::CFITCK_UnrelatedCast,
  960. CE->getBeginLoc());
  961. }
  962. llvm::Type *ElemTy = ConvertTypeForMem(E->getType()->getPointeeType());
  963. Addr = Builder.CreateElementBitCast(Addr, ElemTy);
  964. if (CE->getCastKind() == CK_AddressSpaceConversion)
  965. Addr = Builder.CreateAddrSpaceCast(Addr, ConvertType(E->getType()));
  966. return Addr;
  967. }
  968. break;
  969. // Array-to-pointer decay.
  970. case CK_ArrayToPointerDecay:
  971. return EmitArrayToPointerDecay(CE->getSubExpr(), BaseInfo, TBAAInfo);
  972. // Derived-to-base conversions.
  973. case CK_UncheckedDerivedToBase:
  974. case CK_DerivedToBase: {
  975. // TODO: Support accesses to members of base classes in TBAA. For now, we
  976. // conservatively pretend that the complete object is of the base class
  977. // type.
  978. if (TBAAInfo)
  979. *TBAAInfo = CGM.getTBAAAccessInfo(E->getType());
  980. Address Addr = EmitPointerWithAlignment(CE->getSubExpr(), BaseInfo);
  981. auto Derived = CE->getSubExpr()->getType()->getPointeeCXXRecordDecl();
  982. return GetAddressOfBaseClass(Addr, Derived,
  983. CE->path_begin(), CE->path_end(),
  984. ShouldNullCheckClassCastValue(CE),
  985. CE->getExprLoc());
  986. }
  987. // TODO: Is there any reason to treat base-to-derived conversions
  988. // specially?
  989. default:
  990. break;
  991. }
  992. }
  993. // Unary &.
  994. if (const UnaryOperator *UO = dyn_cast<UnaryOperator>(E)) {
  995. if (UO->getOpcode() == UO_AddrOf) {
  996. LValue LV = EmitLValue(UO->getSubExpr());
  997. if (BaseInfo) *BaseInfo = LV.getBaseInfo();
  998. if (TBAAInfo) *TBAAInfo = LV.getTBAAInfo();
  999. return LV.getAddress(*this);
  1000. }
  1001. }
  1002. // std::addressof and variants.
  1003. if (auto *Call = dyn_cast<CallExpr>(E)) {
  1004. switch (Call->getBuiltinCallee()) {
  1005. default:
  1006. break;
  1007. case Builtin::BIaddressof:
  1008. case Builtin::BI__addressof:
  1009. case Builtin::BI__builtin_addressof: {
  1010. LValue LV = EmitLValue(Call->getArg(0));
  1011. if (BaseInfo) *BaseInfo = LV.getBaseInfo();
  1012. if (TBAAInfo) *TBAAInfo = LV.getTBAAInfo();
  1013. return LV.getAddress(*this);
  1014. }
  1015. }
  1016. }
  1017. // TODO: conditional operators, comma.
  1018. // Otherwise, use the alignment of the type.
  1019. CharUnits Align =
  1020. CGM.getNaturalPointeeTypeAlignment(E->getType(), BaseInfo, TBAAInfo);
  1021. llvm::Type *ElemTy = ConvertTypeForMem(E->getType()->getPointeeType());
  1022. return Address(EmitScalarExpr(E), ElemTy, Align);
  1023. }
  1024. llvm::Value *CodeGenFunction::EmitNonNullRValueCheck(RValue RV, QualType T) {
  1025. llvm::Value *V = RV.getScalarVal();
  1026. if (auto MPT = T->getAs<MemberPointerType>())
  1027. return CGM.getCXXABI().EmitMemberPointerIsNotNull(*this, V, MPT);
  1028. return Builder.CreateICmpNE(V, llvm::Constant::getNullValue(V->getType()));
  1029. }
  1030. RValue CodeGenFunction::GetUndefRValue(QualType Ty) {
  1031. if (Ty->isVoidType())
  1032. return RValue::get(nullptr);
  1033. switch (getEvaluationKind(Ty)) {
  1034. case TEK_Complex: {
  1035. llvm::Type *EltTy =
  1036. ConvertType(Ty->castAs<ComplexType>()->getElementType());
  1037. llvm::Value *U = llvm::UndefValue::get(EltTy);
  1038. return RValue::getComplex(std::make_pair(U, U));
  1039. }
  1040. // If this is a use of an undefined aggregate type, the aggregate must have an
  1041. // identifiable address. Just because the contents of the value are undefined
  1042. // doesn't mean that the address can't be taken and compared.
  1043. case TEK_Aggregate: {
  1044. Address DestPtr = CreateMemTemp(Ty, "undef.agg.tmp");
  1045. return RValue::getAggregate(DestPtr);
  1046. }
  1047. case TEK_Scalar:
  1048. return RValue::get(llvm::UndefValue::get(ConvertType(Ty)));
  1049. }
  1050. llvm_unreachable("bad evaluation kind");
  1051. }
  1052. RValue CodeGenFunction::EmitUnsupportedRValue(const Expr *E,
  1053. const char *Name) {
  1054. ErrorUnsupported(E, Name);
  1055. return GetUndefRValue(E->getType());
  1056. }
  1057. LValue CodeGenFunction::EmitUnsupportedLValue(const Expr *E,
  1058. const char *Name) {
  1059. ErrorUnsupported(E, Name);
  1060. llvm::Type *ElTy = ConvertType(E->getType());
  1061. llvm::Type *Ty = llvm::PointerType::getUnqual(ElTy);
  1062. return MakeAddrLValue(
  1063. Address(llvm::UndefValue::get(Ty), ElTy, CharUnits::One()), E->getType());
  1064. }
  1065. bool CodeGenFunction::IsWrappedCXXThis(const Expr *Obj) {
  1066. const Expr *Base = Obj;
  1067. while (!isa<CXXThisExpr>(Base)) {
  1068. // The result of a dynamic_cast can be null.
  1069. if (isa<CXXDynamicCastExpr>(Base))
  1070. return false;
  1071. if (const auto *CE = dyn_cast<CastExpr>(Base)) {
  1072. Base = CE->getSubExpr();
  1073. } else if (const auto *PE = dyn_cast<ParenExpr>(Base)) {
  1074. Base = PE->getSubExpr();
  1075. } else if (const auto *UO = dyn_cast<UnaryOperator>(Base)) {
  1076. if (UO->getOpcode() == UO_Extension)
  1077. Base = UO->getSubExpr();
  1078. else
  1079. return false;
  1080. } else {
  1081. return false;
  1082. }
  1083. }
  1084. return true;
  1085. }
  1086. LValue CodeGenFunction::EmitCheckedLValue(const Expr *E, TypeCheckKind TCK) {
  1087. LValue LV;
  1088. if (SanOpts.has(SanitizerKind::ArrayBounds) && isa<ArraySubscriptExpr>(E))
  1089. LV = EmitArraySubscriptExpr(cast<ArraySubscriptExpr>(E), /*Accessed*/true);
  1090. else
  1091. LV = EmitLValue(E);
  1092. if (!isa<DeclRefExpr>(E) && !LV.isBitField() && LV.isSimple()) {
  1093. SanitizerSet SkippedChecks;
  1094. if (const auto *ME = dyn_cast<MemberExpr>(E)) {
  1095. bool IsBaseCXXThis = IsWrappedCXXThis(ME->getBase());
  1096. if (IsBaseCXXThis)
  1097. SkippedChecks.set(SanitizerKind::Alignment, true);
  1098. if (IsBaseCXXThis || isa<DeclRefExpr>(ME->getBase()))
  1099. SkippedChecks.set(SanitizerKind::Null, true);
  1100. }
  1101. EmitTypeCheck(TCK, E->getExprLoc(), LV.getPointer(*this), E->getType(),
  1102. LV.getAlignment(), SkippedChecks);
  1103. }
  1104. return LV;
  1105. }
  1106. /// EmitLValue - Emit code to compute a designator that specifies the location
  1107. /// of the expression.
  1108. ///
  1109. /// This can return one of two things: a simple address or a bitfield reference.
  1110. /// In either case, the LLVM Value* in the LValue structure is guaranteed to be
  1111. /// an LLVM pointer type.
  1112. ///
  1113. /// If this returns a bitfield reference, nothing about the pointee type of the
  1114. /// LLVM value is known: For example, it may not be a pointer to an integer.
  1115. ///
  1116. /// If this returns a normal address, and if the lvalue's C type is fixed size,
  1117. /// this method guarantees that the returned pointer type will point to an LLVM
  1118. /// type of the same size of the lvalue's type. If the lvalue has a variable
  1119. /// length type, this is not possible.
  1120. ///
  1121. LValue CodeGenFunction::EmitLValue(const Expr *E) {
  1122. ApplyDebugLocation DL(*this, E);
  1123. switch (E->getStmtClass()) {
  1124. default: return EmitUnsupportedLValue(E, "l-value expression");
  1125. case Expr::ObjCPropertyRefExprClass:
  1126. llvm_unreachable("cannot emit a property reference directly");
  1127. case Expr::ObjCSelectorExprClass:
  1128. return EmitObjCSelectorLValue(cast<ObjCSelectorExpr>(E));
  1129. case Expr::ObjCIsaExprClass:
  1130. return EmitObjCIsaExpr(cast<ObjCIsaExpr>(E));
  1131. case Expr::BinaryOperatorClass:
  1132. return EmitBinaryOperatorLValue(cast<BinaryOperator>(E));
  1133. case Expr::CompoundAssignOperatorClass: {
  1134. QualType Ty = E->getType();
  1135. if (const AtomicType *AT = Ty->getAs<AtomicType>())
  1136. Ty = AT->getValueType();
  1137. if (!Ty->isAnyComplexType())
  1138. return EmitCompoundAssignmentLValue(cast<CompoundAssignOperator>(E));
  1139. return EmitComplexCompoundAssignmentLValue(cast<CompoundAssignOperator>(E));
  1140. }
  1141. case Expr::CallExprClass:
  1142. case Expr::CXXMemberCallExprClass:
  1143. case Expr::CXXOperatorCallExprClass:
  1144. case Expr::UserDefinedLiteralClass:
  1145. return EmitCallExprLValue(cast<CallExpr>(E));
  1146. case Expr::CXXRewrittenBinaryOperatorClass:
  1147. return EmitLValue(cast<CXXRewrittenBinaryOperator>(E)->getSemanticForm());
  1148. case Expr::VAArgExprClass:
  1149. return EmitVAArgExprLValue(cast<VAArgExpr>(E));
  1150. case Expr::DeclRefExprClass:
  1151. return EmitDeclRefLValue(cast<DeclRefExpr>(E));
  1152. case Expr::ConstantExprClass: {
  1153. const ConstantExpr *CE = cast<ConstantExpr>(E);
  1154. if (llvm::Value *Result = ConstantEmitter(*this).tryEmitConstantExpr(CE)) {
  1155. QualType RetType = cast<CallExpr>(CE->getSubExpr()->IgnoreImplicit())
  1156. ->getCallReturnType(getContext())
  1157. ->getPointeeType();
  1158. return MakeNaturalAlignAddrLValue(Result, RetType);
  1159. }
  1160. return EmitLValue(cast<ConstantExpr>(E)->getSubExpr());
  1161. }
  1162. case Expr::ParenExprClass:
  1163. return EmitLValue(cast<ParenExpr>(E)->getSubExpr());
  1164. case Expr::GenericSelectionExprClass:
  1165. return EmitLValue(cast<GenericSelectionExpr>(E)->getResultExpr());
  1166. case Expr::PredefinedExprClass:
  1167. return EmitPredefinedLValue(cast<PredefinedExpr>(E));
  1168. case Expr::StringLiteralClass:
  1169. return EmitStringLiteralLValue(cast<StringLiteral>(E));
  1170. case Expr::ObjCEncodeExprClass:
  1171. return EmitObjCEncodeExprLValue(cast<ObjCEncodeExpr>(E));
  1172. case Expr::PseudoObjectExprClass:
  1173. return EmitPseudoObjectLValue(cast<PseudoObjectExpr>(E));
  1174. case Expr::InitListExprClass:
  1175. return EmitInitListLValue(cast<InitListExpr>(E));
  1176. case Expr::CXXTemporaryObjectExprClass:
  1177. case Expr::CXXConstructExprClass:
  1178. return EmitCXXConstructLValue(cast<CXXConstructExpr>(E));
  1179. case Expr::CXXBindTemporaryExprClass:
  1180. return EmitCXXBindTemporaryLValue(cast<CXXBindTemporaryExpr>(E));
  1181. case Expr::CXXUuidofExprClass:
  1182. return EmitCXXUuidofLValue(cast<CXXUuidofExpr>(E));
  1183. case Expr::LambdaExprClass:
  1184. return EmitAggExprToLValue(E);
  1185. case Expr::ExprWithCleanupsClass: {
  1186. const auto *cleanups = cast<ExprWithCleanups>(E);
  1187. RunCleanupsScope Scope(*this);
  1188. LValue LV = EmitLValue(cleanups->getSubExpr());
  1189. if (LV.isSimple()) {
  1190. // Defend against branches out of gnu statement expressions surrounded by
  1191. // cleanups.
  1192. Address Addr = LV.getAddress(*this);
  1193. llvm::Value *V = Addr.getPointer();
  1194. Scope.ForceCleanup({&V});
  1195. return LValue::MakeAddr(Addr.withPointer(V), LV.getType(), getContext(),
  1196. LV.getBaseInfo(), LV.getTBAAInfo());
  1197. }
  1198. // FIXME: Is it possible to create an ExprWithCleanups that produces a
  1199. // bitfield lvalue or some other non-simple lvalue?
  1200. return LV;
  1201. }
  1202. case Expr::CXXDefaultArgExprClass: {
  1203. auto *DAE = cast<CXXDefaultArgExpr>(E);
  1204. CXXDefaultArgExprScope Scope(*this, DAE);
  1205. return EmitLValue(DAE->getExpr());
  1206. }
  1207. case Expr::CXXDefaultInitExprClass: {
  1208. auto *DIE = cast<CXXDefaultInitExpr>(E);
  1209. CXXDefaultInitExprScope Scope(*this, DIE);
  1210. return EmitLValue(DIE->getExpr());
  1211. }
  1212. case Expr::CXXTypeidExprClass:
  1213. return EmitCXXTypeidLValue(cast<CXXTypeidExpr>(E));
  1214. case Expr::ObjCMessageExprClass:
  1215. return EmitObjCMessageExprLValue(cast<ObjCMessageExpr>(E));
  1216. case Expr::ObjCIvarRefExprClass:
  1217. return EmitObjCIvarRefLValue(cast<ObjCIvarRefExpr>(E));
  1218. case Expr::StmtExprClass:
  1219. return EmitStmtExprLValue(cast<StmtExpr>(E));
  1220. case Expr::UnaryOperatorClass:
  1221. return EmitUnaryOpLValue(cast<UnaryOperator>(E));
  1222. case Expr::ArraySubscriptExprClass:
  1223. return EmitArraySubscriptExpr(cast<ArraySubscriptExpr>(E));
  1224. case Expr::MatrixSubscriptExprClass:
  1225. return EmitMatrixSubscriptExpr(cast<MatrixSubscriptExpr>(E));
  1226. case Expr::OMPArraySectionExprClass:
  1227. return EmitOMPArraySectionExpr(cast<OMPArraySectionExpr>(E));
  1228. case Expr::ExtVectorElementExprClass:
  1229. return EmitExtVectorElementExpr(cast<ExtVectorElementExpr>(E));
  1230. case Expr::CXXThisExprClass:
  1231. return MakeAddrLValue(LoadCXXThisAddress(), E->getType());
  1232. case Expr::MemberExprClass:
  1233. return EmitMemberExpr(cast<MemberExpr>(E));
  1234. case Expr::CompoundLiteralExprClass:
  1235. return EmitCompoundLiteralLValue(cast<CompoundLiteralExpr>(E));
  1236. case Expr::ConditionalOperatorClass:
  1237. return EmitConditionalOperatorLValue(cast<ConditionalOperator>(E));
  1238. case Expr::BinaryConditionalOperatorClass:
  1239. return EmitConditionalOperatorLValue(cast<BinaryConditionalOperator>(E));
  1240. case Expr::ChooseExprClass:
  1241. return EmitLValue(cast<ChooseExpr>(E)->getChosenSubExpr());
  1242. case Expr::OpaqueValueExprClass:
  1243. return EmitOpaqueValueLValue(cast<OpaqueValueExpr>(E));
  1244. case Expr::SubstNonTypeTemplateParmExprClass:
  1245. return EmitLValue(cast<SubstNonTypeTemplateParmExpr>(E)->getReplacement());
  1246. case Expr::ImplicitCastExprClass:
  1247. case Expr::CStyleCastExprClass:
  1248. case Expr::CXXFunctionalCastExprClass:
  1249. case Expr::CXXStaticCastExprClass:
  1250. case Expr::CXXDynamicCastExprClass:
  1251. case Expr::CXXReinterpretCastExprClass:
  1252. case Expr::CXXConstCastExprClass:
  1253. case Expr::CXXAddrspaceCastExprClass:
  1254. case Expr::ObjCBridgedCastExprClass:
  1255. return EmitCastLValue(cast<CastExpr>(E));
  1256. case Expr::MaterializeTemporaryExprClass:
  1257. return EmitMaterializeTemporaryExpr(cast<MaterializeTemporaryExpr>(E));
  1258. case Expr::CoawaitExprClass:
  1259. return EmitCoawaitLValue(cast<CoawaitExpr>(E));
  1260. case Expr::CoyieldExprClass:
  1261. return EmitCoyieldLValue(cast<CoyieldExpr>(E));
  1262. }
  1263. }
  1264. /// Given an object of the given canonical type, can we safely copy a
  1265. /// value out of it based on its initializer?
  1266. static bool isConstantEmittableObjectType(QualType type) {
  1267. assert(type.isCanonical());
  1268. assert(!type->isReferenceType());
  1269. // Must be const-qualified but non-volatile.
  1270. Qualifiers qs = type.getLocalQualifiers();
  1271. if (!qs.hasConst() || qs.hasVolatile()) return false;
  1272. // Otherwise, all object types satisfy this except C++ classes with
  1273. // mutable subobjects or non-trivial copy/destroy behavior.
  1274. if (const auto *RT = dyn_cast<RecordType>(type))
  1275. if (const auto *RD = dyn_cast<CXXRecordDecl>(RT->getDecl()))
  1276. if (RD->hasMutableFields() || !RD->isTrivial())
  1277. return false;
  1278. return true;
  1279. }
  1280. /// Can we constant-emit a load of a reference to a variable of the
  1281. /// given type? This is different from predicates like
  1282. /// Decl::mightBeUsableInConstantExpressions because we do want it to apply
  1283. /// in situations that don't necessarily satisfy the language's rules
  1284. /// for this (e.g. C++'s ODR-use rules). For example, we want to able
  1285. /// to do this with const float variables even if those variables
  1286. /// aren't marked 'constexpr'.
  1287. enum ConstantEmissionKind {
  1288. CEK_None,
  1289. CEK_AsReferenceOnly,
  1290. CEK_AsValueOrReference,
  1291. CEK_AsValueOnly
  1292. };
  1293. static ConstantEmissionKind checkVarTypeForConstantEmission(QualType type) {
  1294. type = type.getCanonicalType();
  1295. if (const auto *ref = dyn_cast<ReferenceType>(type)) {
  1296. if (isConstantEmittableObjectType(ref->getPointeeType()))
  1297. return CEK_AsValueOrReference;
  1298. return CEK_AsReferenceOnly;
  1299. }
  1300. if (isConstantEmittableObjectType(type))
  1301. return CEK_AsValueOnly;
  1302. return CEK_None;
  1303. }
  1304. /// Try to emit a reference to the given value without producing it as
  1305. /// an l-value. This is just an optimization, but it avoids us needing
  1306. /// to emit global copies of variables if they're named without triggering
  1307. /// a formal use in a context where we can't emit a direct reference to them,
  1308. /// for instance if a block or lambda or a member of a local class uses a
  1309. /// const int variable or constexpr variable from an enclosing function.
  1310. CodeGenFunction::ConstantEmission
  1311. CodeGenFunction::tryEmitAsConstant(DeclRefExpr *refExpr) {
  1312. ValueDecl *value = refExpr->getDecl();
  1313. // The value needs to be an enum constant or a constant variable.
  1314. ConstantEmissionKind CEK;
  1315. if (isa<ParmVarDecl>(value)) {
  1316. CEK = CEK_None;
  1317. } else if (auto *var = dyn_cast<VarDecl>(value)) {
  1318. CEK = checkVarTypeForConstantEmission(var->getType());
  1319. } else if (isa<EnumConstantDecl>(value)) {
  1320. CEK = CEK_AsValueOnly;
  1321. } else {
  1322. CEK = CEK_None;
  1323. }
  1324. if (CEK == CEK_None) return ConstantEmission();
  1325. Expr::EvalResult result;
  1326. bool resultIsReference;
  1327. QualType resultType;
  1328. // It's best to evaluate all the way as an r-value if that's permitted.
  1329. if (CEK != CEK_AsReferenceOnly &&
  1330. refExpr->EvaluateAsRValue(result, getContext())) {
  1331. resultIsReference = false;
  1332. resultType = refExpr->getType();
  1333. // Otherwise, try to evaluate as an l-value.
  1334. } else if (CEK != CEK_AsValueOnly &&
  1335. refExpr->EvaluateAsLValue(result, getContext())) {
  1336. resultIsReference = true;
  1337. resultType = value->getType();
  1338. // Failure.
  1339. } else {
  1340. return ConstantEmission();
  1341. }
  1342. // In any case, if the initializer has side-effects, abandon ship.
  1343. if (result.HasSideEffects)
  1344. return ConstantEmission();
  1345. // In CUDA/HIP device compilation, a lambda may capture a reference variable
  1346. // referencing a global host variable by copy. In this case the lambda should
  1347. // make a copy of the value of the global host variable. The DRE of the
  1348. // captured reference variable cannot be emitted as load from the host
  1349. // global variable as compile time constant, since the host variable is not
  1350. // accessible on device. The DRE of the captured reference variable has to be
  1351. // loaded from captures.
  1352. if (CGM.getLangOpts().CUDAIsDevice && result.Val.isLValue() &&
  1353. refExpr->refersToEnclosingVariableOrCapture()) {
  1354. auto *MD = dyn_cast_or_null<CXXMethodDecl>(CurCodeDecl);
  1355. if (MD && MD->getParent()->isLambda() &&
  1356. MD->getOverloadedOperator() == OO_Call) {
  1357. const APValue::LValueBase &base = result.Val.getLValueBase();
  1358. if (const ValueDecl *D = base.dyn_cast<const ValueDecl *>()) {
  1359. if (const VarDecl *VD = dyn_cast<const VarDecl>(D)) {
  1360. if (!VD->hasAttr<CUDADeviceAttr>()) {
  1361. return ConstantEmission();
  1362. }
  1363. }
  1364. }
  1365. }
  1366. }
  1367. // Emit as a constant.
  1368. auto C = ConstantEmitter(*this).emitAbstract(refExpr->getLocation(),
  1369. result.Val, resultType);
  1370. // Make sure we emit a debug reference to the global variable.
  1371. // This should probably fire even for
  1372. if (isa<VarDecl>(value)) {
  1373. if (!getContext().DeclMustBeEmitted(cast<VarDecl>(value)))
  1374. EmitDeclRefExprDbgValue(refExpr, result.Val);
  1375. } else {
  1376. assert(isa<EnumConstantDecl>(value));
  1377. EmitDeclRefExprDbgValue(refExpr, result.Val);
  1378. }
  1379. // If we emitted a reference constant, we need to dereference that.
  1380. if (resultIsReference)
  1381. return ConstantEmission::forReference(C);
  1382. return ConstantEmission::forValue(C);
  1383. }
  1384. static DeclRefExpr *tryToConvertMemberExprToDeclRefExpr(CodeGenFunction &CGF,
  1385. const MemberExpr *ME) {
  1386. if (auto *VD = dyn_cast<VarDecl>(ME->getMemberDecl())) {
  1387. // Try to emit static variable member expressions as DREs.
  1388. return DeclRefExpr::Create(
  1389. CGF.getContext(), NestedNameSpecifierLoc(), SourceLocation(), VD,
  1390. /*RefersToEnclosingVariableOrCapture=*/false, ME->getExprLoc(),
  1391. ME->getType(), ME->getValueKind(), nullptr, nullptr, ME->isNonOdrUse());
  1392. }
  1393. return nullptr;
  1394. }
  1395. CodeGenFunction::ConstantEmission
  1396. CodeGenFunction::tryEmitAsConstant(const MemberExpr *ME) {
  1397. if (DeclRefExpr *DRE = tryToConvertMemberExprToDeclRefExpr(*this, ME))
  1398. return tryEmitAsConstant(DRE);
  1399. return ConstantEmission();
  1400. }
  1401. llvm::Value *CodeGenFunction::emitScalarConstant(
  1402. const CodeGenFunction::ConstantEmission &Constant, Expr *E) {
  1403. assert(Constant && "not a constant");
  1404. if (Constant.isReference())
  1405. return EmitLoadOfLValue(Constant.getReferenceLValue(*this, E),
  1406. E->getExprLoc())
  1407. .getScalarVal();
  1408. return Constant.getValue();
  1409. }
  1410. llvm::Value *CodeGenFunction::EmitLoadOfScalar(LValue lvalue,
  1411. SourceLocation Loc) {
  1412. return EmitLoadOfScalar(lvalue.getAddress(*this), lvalue.isVolatile(),
  1413. lvalue.getType(), Loc, lvalue.getBaseInfo(),
  1414. lvalue.getTBAAInfo(), lvalue.isNontemporal());
  1415. }
  1416. static bool hasBooleanRepresentation(QualType Ty) {
  1417. if (Ty->isBooleanType())
  1418. return true;
  1419. if (const EnumType *ET = Ty->getAs<EnumType>())
  1420. return ET->getDecl()->getIntegerType()->isBooleanType();
  1421. if (const AtomicType *AT = Ty->getAs<AtomicType>())
  1422. return hasBooleanRepresentation(AT->getValueType());
  1423. return false;
  1424. }
  1425. static bool getRangeForType(CodeGenFunction &CGF, QualType Ty,
  1426. llvm::APInt &Min, llvm::APInt &End,
  1427. bool StrictEnums, bool IsBool) {
  1428. const EnumType *ET = Ty->getAs<EnumType>();
  1429. bool IsRegularCPlusPlusEnum = CGF.getLangOpts().CPlusPlus && StrictEnums &&
  1430. ET && !ET->getDecl()->isFixed();
  1431. if (!IsBool && !IsRegularCPlusPlusEnum)
  1432. return false;
  1433. if (IsBool) {
  1434. Min = llvm::APInt(CGF.getContext().getTypeSize(Ty), 0);
  1435. End = llvm::APInt(CGF.getContext().getTypeSize(Ty), 2);
  1436. } else {
  1437. const EnumDecl *ED = ET->getDecl();
  1438. ED->getValueRange(End, Min);
  1439. }
  1440. return true;
  1441. }
  1442. llvm::MDNode *CodeGenFunction::getRangeForLoadFromType(QualType Ty) {
  1443. llvm::APInt Min, End;
  1444. if (!getRangeForType(*this, Ty, Min, End, CGM.getCodeGenOpts().StrictEnums,
  1445. hasBooleanRepresentation(Ty)))
  1446. return nullptr;
  1447. llvm::MDBuilder MDHelper(getLLVMContext());
  1448. return MDHelper.createRange(Min, End);
  1449. }
  1450. bool CodeGenFunction::EmitScalarRangeCheck(llvm::Value *Value, QualType Ty,
  1451. SourceLocation Loc) {
  1452. bool HasBoolCheck = SanOpts.has(SanitizerKind::Bool);
  1453. bool HasEnumCheck = SanOpts.has(SanitizerKind::Enum);
  1454. if (!HasBoolCheck && !HasEnumCheck)
  1455. return false;
  1456. bool IsBool = hasBooleanRepresentation(Ty) ||
  1457. NSAPI(CGM.getContext()).isObjCBOOLType(Ty);
  1458. bool NeedsBoolCheck = HasBoolCheck && IsBool;
  1459. bool NeedsEnumCheck = HasEnumCheck && Ty->getAs<EnumType>();
  1460. if (!NeedsBoolCheck && !NeedsEnumCheck)
  1461. return false;
  1462. // Single-bit booleans don't need to be checked. Special-case this to avoid
  1463. // a bit width mismatch when handling bitfield values. This is handled by
  1464. // EmitFromMemory for the non-bitfield case.
  1465. if (IsBool &&
  1466. cast<llvm::IntegerType>(Value->getType())->getBitWidth() == 1)
  1467. return false;
  1468. llvm::APInt Min, End;
  1469. if (!getRangeForType(*this, Ty, Min, End, /*StrictEnums=*/true, IsBool))
  1470. return true;
  1471. auto &Ctx = getLLVMContext();
  1472. SanitizerScope SanScope(this);
  1473. llvm::Value *Check;
  1474. --End;
  1475. if (!Min) {
  1476. Check = Builder.CreateICmpULE(Value, llvm::ConstantInt::get(Ctx, End));
  1477. } else {
  1478. llvm::Value *Upper =
  1479. Builder.CreateICmpSLE(Value, llvm::ConstantInt::get(Ctx, End));
  1480. llvm::Value *Lower =
  1481. Builder.CreateICmpSGE(Value, llvm::ConstantInt::get(Ctx, Min));
  1482. Check = Builder.CreateAnd(Upper, Lower);
  1483. }
  1484. llvm::Constant *StaticArgs[] = {EmitCheckSourceLocation(Loc),
  1485. EmitCheckTypeDescriptor(Ty)};
  1486. SanitizerMask Kind =
  1487. NeedsEnumCheck ? SanitizerKind::Enum : SanitizerKind::Bool;
  1488. EmitCheck(std::make_pair(Check, Kind), SanitizerHandler::LoadInvalidValue,
  1489. StaticArgs, EmitCheckValue(Value));
  1490. return true;
  1491. }
  1492. llvm::Value *CodeGenFunction::EmitLoadOfScalar(Address Addr, bool Volatile,
  1493. QualType Ty,
  1494. SourceLocation Loc,
  1495. LValueBaseInfo BaseInfo,
  1496. TBAAAccessInfo TBAAInfo,
  1497. bool isNontemporal) {
  1498. if (auto *GV = dyn_cast<llvm::GlobalValue>(Addr.getPointer()))
  1499. if (GV->isThreadLocal())
  1500. Addr = Addr.withPointer(Builder.CreateThreadLocalAddress(GV));
  1501. if (const auto *ClangVecTy = Ty->getAs<VectorType>()) {
  1502. // Boolean vectors use `iN` as storage type.
  1503. if (ClangVecTy->isExtVectorBoolType()) {
  1504. llvm::Type *ValTy = ConvertType(Ty);
  1505. unsigned ValNumElems =
  1506. cast<llvm::FixedVectorType>(ValTy)->getNumElements();
  1507. // Load the `iP` storage object (P is the padded vector size).
  1508. auto *RawIntV = Builder.CreateLoad(Addr, Volatile, "load_bits");
  1509. const auto *RawIntTy = RawIntV->getType();
  1510. assert(RawIntTy->isIntegerTy() && "compressed iN storage for bitvectors");
  1511. // Bitcast iP --> <P x i1>.
  1512. auto *PaddedVecTy = llvm::FixedVectorType::get(
  1513. Builder.getInt1Ty(), RawIntTy->getPrimitiveSizeInBits());
  1514. llvm::Value *V = Builder.CreateBitCast(RawIntV, PaddedVecTy);
  1515. // Shuffle <P x i1> --> <N x i1> (N is the actual bit size).
  1516. V = emitBoolVecConversion(V, ValNumElems, "extractvec");
  1517. return EmitFromMemory(V, Ty);
  1518. }
  1519. // Handle vectors of size 3 like size 4 for better performance.
  1520. const llvm::Type *EltTy = Addr.getElementType();
  1521. const auto *VTy = cast<llvm::FixedVectorType>(EltTy);
  1522. if (!CGM.getCodeGenOpts().PreserveVec3Type && VTy->getNumElements() == 3) {
  1523. // Bitcast to vec4 type.
  1524. llvm::VectorType *vec4Ty =
  1525. llvm::FixedVectorType::get(VTy->getElementType(), 4);
  1526. Address Cast = Builder.CreateElementBitCast(Addr, vec4Ty, "castToVec4");
  1527. // Now load value.
  1528. llvm::Value *V = Builder.CreateLoad(Cast, Volatile, "loadVec4");
  1529. // Shuffle vector to get vec3.
  1530. V = Builder.CreateShuffleVector(V, ArrayRef<int>{0, 1, 2}, "extractVec");
  1531. return EmitFromMemory(V, Ty);
  1532. }
  1533. }
  1534. // Atomic operations have to be done on integral types.
  1535. LValue AtomicLValue =
  1536. LValue::MakeAddr(Addr, Ty, getContext(), BaseInfo, TBAAInfo);
  1537. if (Ty->isAtomicType() || LValueIsSuitableForInlineAtomic(AtomicLValue)) {
  1538. return EmitAtomicLoad(AtomicLValue, Loc).getScalarVal();
  1539. }
  1540. llvm::LoadInst *Load = Builder.CreateLoad(Addr, Volatile);
  1541. if (isNontemporal) {
  1542. llvm::MDNode *Node = llvm::MDNode::get(
  1543. Load->getContext(), llvm::ConstantAsMetadata::get(Builder.getInt32(1)));
  1544. Load->setMetadata(CGM.getModule().getMDKindID("nontemporal"), Node);
  1545. }
  1546. CGM.DecorateInstructionWithTBAA(Load, TBAAInfo);
  1547. if (EmitScalarRangeCheck(Load, Ty, Loc)) {
  1548. // In order to prevent the optimizer from throwing away the check, don't
  1549. // attach range metadata to the load.
  1550. } else if (CGM.getCodeGenOpts().OptimizationLevel > 0)
  1551. if (llvm::MDNode *RangeInfo = getRangeForLoadFromType(Ty)) {
  1552. Load->setMetadata(llvm::LLVMContext::MD_range, RangeInfo);
  1553. Load->setMetadata(llvm::LLVMContext::MD_noundef,
  1554. llvm::MDNode::get(getLLVMContext(), std::nullopt));
  1555. }
  1556. return EmitFromMemory(Load, Ty);
  1557. }
  1558. llvm::Value *CodeGenFunction::EmitToMemory(llvm::Value *Value, QualType Ty) {
  1559. // Bool has a different representation in memory than in registers.
  1560. if (hasBooleanRepresentation(Ty)) {
  1561. // This should really always be an i1, but sometimes it's already
  1562. // an i8, and it's awkward to track those cases down.
  1563. if (Value->getType()->isIntegerTy(1))
  1564. return Builder.CreateZExt(Value, ConvertTypeForMem(Ty), "frombool");
  1565. assert(Value->getType()->isIntegerTy(getContext().getTypeSize(Ty)) &&
  1566. "wrong value rep of bool");
  1567. }
  1568. return Value;
  1569. }
  1570. llvm::Value *CodeGenFunction::EmitFromMemory(llvm::Value *Value, QualType Ty) {
  1571. // Bool has a different representation in memory than in registers.
  1572. if (hasBooleanRepresentation(Ty)) {
  1573. assert(Value->getType()->isIntegerTy(getContext().getTypeSize(Ty)) &&
  1574. "wrong value rep of bool");
  1575. return Builder.CreateTrunc(Value, Builder.getInt1Ty(), "tobool");
  1576. }
  1577. if (Ty->isExtVectorBoolType()) {
  1578. const auto *RawIntTy = Value->getType();
  1579. // Bitcast iP --> <P x i1>.
  1580. auto *PaddedVecTy = llvm::FixedVectorType::get(
  1581. Builder.getInt1Ty(), RawIntTy->getPrimitiveSizeInBits());
  1582. auto *V = Builder.CreateBitCast(Value, PaddedVecTy);
  1583. // Shuffle <P x i1> --> <N x i1> (N is the actual bit size).
  1584. llvm::Type *ValTy = ConvertType(Ty);
  1585. unsigned ValNumElems = cast<llvm::FixedVectorType>(ValTy)->getNumElements();
  1586. return emitBoolVecConversion(V, ValNumElems, "extractvec");
  1587. }
  1588. return Value;
  1589. }
  1590. // Convert the pointer of \p Addr to a pointer to a vector (the value type of
  1591. // MatrixType), if it points to a array (the memory type of MatrixType).
  1592. static Address MaybeConvertMatrixAddress(Address Addr, CodeGenFunction &CGF,
  1593. bool IsVector = true) {
  1594. auto *ArrayTy = dyn_cast<llvm::ArrayType>(Addr.getElementType());
  1595. if (ArrayTy && IsVector) {
  1596. auto *VectorTy = llvm::FixedVectorType::get(ArrayTy->getElementType(),
  1597. ArrayTy->getNumElements());
  1598. return Address(CGF.Builder.CreateElementBitCast(Addr, VectorTy));
  1599. }
  1600. auto *VectorTy = dyn_cast<llvm::VectorType>(Addr.getElementType());
  1601. if (VectorTy && !IsVector) {
  1602. auto *ArrayTy = llvm::ArrayType::get(
  1603. VectorTy->getElementType(),
  1604. cast<llvm::FixedVectorType>(VectorTy)->getNumElements());
  1605. return Address(CGF.Builder.CreateElementBitCast(Addr, ArrayTy));
  1606. }
  1607. return Addr;
  1608. }
  1609. // Emit a store of a matrix LValue. This may require casting the original
  1610. // pointer to memory address (ArrayType) to a pointer to the value type
  1611. // (VectorType).
  1612. static void EmitStoreOfMatrixScalar(llvm::Value *value, LValue lvalue,
  1613. bool isInit, CodeGenFunction &CGF) {
  1614. Address Addr = MaybeConvertMatrixAddress(lvalue.getAddress(CGF), CGF,
  1615. value->getType()->isVectorTy());
  1616. CGF.EmitStoreOfScalar(value, Addr, lvalue.isVolatile(), lvalue.getType(),
  1617. lvalue.getBaseInfo(), lvalue.getTBAAInfo(), isInit,
  1618. lvalue.isNontemporal());
  1619. }
  1620. void CodeGenFunction::EmitStoreOfScalar(llvm::Value *Value, Address Addr,
  1621. bool Volatile, QualType Ty,
  1622. LValueBaseInfo BaseInfo,
  1623. TBAAAccessInfo TBAAInfo,
  1624. bool isInit, bool isNontemporal) {
  1625. if (auto *GV = dyn_cast<llvm::GlobalValue>(Addr.getPointer()))
  1626. if (GV->isThreadLocal())
  1627. Addr = Addr.withPointer(Builder.CreateThreadLocalAddress(GV));
  1628. llvm::Type *SrcTy = Value->getType();
  1629. if (const auto *ClangVecTy = Ty->getAs<VectorType>()) {
  1630. auto *VecTy = dyn_cast<llvm::FixedVectorType>(SrcTy);
  1631. if (VecTy && ClangVecTy->isExtVectorBoolType()) {
  1632. auto *MemIntTy = cast<llvm::IntegerType>(Addr.getElementType());
  1633. // Expand to the memory bit width.
  1634. unsigned MemNumElems = MemIntTy->getPrimitiveSizeInBits();
  1635. // <N x i1> --> <P x i1>.
  1636. Value = emitBoolVecConversion(Value, MemNumElems, "insertvec");
  1637. // <P x i1> --> iP.
  1638. Value = Builder.CreateBitCast(Value, MemIntTy);
  1639. } else if (!CGM.getCodeGenOpts().PreserveVec3Type) {
  1640. // Handle vec3 special.
  1641. if (VecTy && cast<llvm::FixedVectorType>(VecTy)->getNumElements() == 3) {
  1642. // Our source is a vec3, do a shuffle vector to make it a vec4.
  1643. Value = Builder.CreateShuffleVector(Value, ArrayRef<int>{0, 1, 2, -1},
  1644. "extractVec");
  1645. SrcTy = llvm::FixedVectorType::get(VecTy->getElementType(), 4);
  1646. }
  1647. if (Addr.getElementType() != SrcTy) {
  1648. Addr = Builder.CreateElementBitCast(Addr, SrcTy, "storetmp");
  1649. }
  1650. }
  1651. }
  1652. Value = EmitToMemory(Value, Ty);
  1653. LValue AtomicLValue =
  1654. LValue::MakeAddr(Addr, Ty, getContext(), BaseInfo, TBAAInfo);
  1655. if (Ty->isAtomicType() ||
  1656. (!isInit && LValueIsSuitableForInlineAtomic(AtomicLValue))) {
  1657. EmitAtomicStore(RValue::get(Value), AtomicLValue, isInit);
  1658. return;
  1659. }
  1660. llvm::StoreInst *Store = Builder.CreateStore(Value, Addr, Volatile);
  1661. if (isNontemporal) {
  1662. llvm::MDNode *Node =
  1663. llvm::MDNode::get(Store->getContext(),
  1664. llvm::ConstantAsMetadata::get(Builder.getInt32(1)));
  1665. Store->setMetadata(CGM.getModule().getMDKindID("nontemporal"), Node);
  1666. }
  1667. CGM.DecorateInstructionWithTBAA(Store, TBAAInfo);
  1668. }
  1669. void CodeGenFunction::EmitStoreOfScalar(llvm::Value *value, LValue lvalue,
  1670. bool isInit) {
  1671. if (lvalue.getType()->isConstantMatrixType()) {
  1672. EmitStoreOfMatrixScalar(value, lvalue, isInit, *this);
  1673. return;
  1674. }
  1675. EmitStoreOfScalar(value, lvalue.getAddress(*this), lvalue.isVolatile(),
  1676. lvalue.getType(), lvalue.getBaseInfo(),
  1677. lvalue.getTBAAInfo(), isInit, lvalue.isNontemporal());
  1678. }
  1679. // Emit a load of a LValue of matrix type. This may require casting the pointer
  1680. // to memory address (ArrayType) to a pointer to the value type (VectorType).
  1681. static RValue EmitLoadOfMatrixLValue(LValue LV, SourceLocation Loc,
  1682. CodeGenFunction &CGF) {
  1683. assert(LV.getType()->isConstantMatrixType());
  1684. Address Addr = MaybeConvertMatrixAddress(LV.getAddress(CGF), CGF);
  1685. LV.setAddress(Addr);
  1686. return RValue::get(CGF.EmitLoadOfScalar(LV, Loc));
  1687. }
  1688. /// EmitLoadOfLValue - Given an expression that represents a value lvalue, this
  1689. /// method emits the address of the lvalue, then loads the result as an rvalue,
  1690. /// returning the rvalue.
  1691. RValue CodeGenFunction::EmitLoadOfLValue(LValue LV, SourceLocation Loc) {
  1692. if (LV.isObjCWeak()) {
  1693. // load of a __weak object.
  1694. Address AddrWeakObj = LV.getAddress(*this);
  1695. return RValue::get(CGM.getObjCRuntime().EmitObjCWeakRead(*this,
  1696. AddrWeakObj));
  1697. }
  1698. if (LV.getQuals().getObjCLifetime() == Qualifiers::OCL_Weak) {
  1699. // In MRC mode, we do a load+autorelease.
  1700. if (!getLangOpts().ObjCAutoRefCount) {
  1701. return RValue::get(EmitARCLoadWeak(LV.getAddress(*this)));
  1702. }
  1703. // In ARC mode, we load retained and then consume the value.
  1704. llvm::Value *Object = EmitARCLoadWeakRetained(LV.getAddress(*this));
  1705. Object = EmitObjCConsumeObject(LV.getType(), Object);
  1706. return RValue::get(Object);
  1707. }
  1708. if (LV.isSimple()) {
  1709. assert(!LV.getType()->isFunctionType());
  1710. if (LV.getType()->isConstantMatrixType())
  1711. return EmitLoadOfMatrixLValue(LV, Loc, *this);
  1712. // Everything needs a load.
  1713. return RValue::get(EmitLoadOfScalar(LV, Loc));
  1714. }
  1715. if (LV.isVectorElt()) {
  1716. llvm::LoadInst *Load = Builder.CreateLoad(LV.getVectorAddress(),
  1717. LV.isVolatileQualified());
  1718. return RValue::get(Builder.CreateExtractElement(Load, LV.getVectorIdx(),
  1719. "vecext"));
  1720. }
  1721. // If this is a reference to a subset of the elements of a vector, either
  1722. // shuffle the input or extract/insert them as appropriate.
  1723. if (LV.isExtVectorElt()) {
  1724. return EmitLoadOfExtVectorElementLValue(LV);
  1725. }
  1726. // Global Register variables always invoke intrinsics
  1727. if (LV.isGlobalReg())
  1728. return EmitLoadOfGlobalRegLValue(LV);
  1729. if (LV.isMatrixElt()) {
  1730. llvm::Value *Idx = LV.getMatrixIdx();
  1731. if (CGM.getCodeGenOpts().OptimizationLevel > 0) {
  1732. const auto *const MatTy = LV.getType()->castAs<ConstantMatrixType>();
  1733. llvm::MatrixBuilder MB(Builder);
  1734. MB.CreateIndexAssumption(Idx, MatTy->getNumElementsFlattened());
  1735. }
  1736. llvm::LoadInst *Load =
  1737. Builder.CreateLoad(LV.getMatrixAddress(), LV.isVolatileQualified());
  1738. return RValue::get(Builder.CreateExtractElement(Load, Idx, "matrixext"));
  1739. }
  1740. assert(LV.isBitField() && "Unknown LValue type!");
  1741. return EmitLoadOfBitfieldLValue(LV, Loc);
  1742. }
  1743. RValue CodeGenFunction::EmitLoadOfBitfieldLValue(LValue LV,
  1744. SourceLocation Loc) {
  1745. const CGBitFieldInfo &Info = LV.getBitFieldInfo();
  1746. // Get the output type.
  1747. llvm::Type *ResLTy = ConvertType(LV.getType());
  1748. Address Ptr = LV.getBitFieldAddress();
  1749. llvm::Value *Val =
  1750. Builder.CreateLoad(Ptr, LV.isVolatileQualified(), "bf.load");
  1751. bool UseVolatile = LV.isVolatileQualified() &&
  1752. Info.VolatileStorageSize != 0 && isAAPCS(CGM.getTarget());
  1753. const unsigned Offset = UseVolatile ? Info.VolatileOffset : Info.Offset;
  1754. const unsigned StorageSize =
  1755. UseVolatile ? Info.VolatileStorageSize : Info.StorageSize;
  1756. if (Info.IsSigned) {
  1757. assert(static_cast<unsigned>(Offset + Info.Size) <= StorageSize);
  1758. unsigned HighBits = StorageSize - Offset - Info.Size;
  1759. if (HighBits)
  1760. Val = Builder.CreateShl(Val, HighBits, "bf.shl");
  1761. if (Offset + HighBits)
  1762. Val = Builder.CreateAShr(Val, Offset + HighBits, "bf.ashr");
  1763. } else {
  1764. if (Offset)
  1765. Val = Builder.CreateLShr(Val, Offset, "bf.lshr");
  1766. if (static_cast<unsigned>(Offset) + Info.Size < StorageSize)
  1767. Val = Builder.CreateAnd(
  1768. Val, llvm::APInt::getLowBitsSet(StorageSize, Info.Size), "bf.clear");
  1769. }
  1770. Val = Builder.CreateIntCast(Val, ResLTy, Info.IsSigned, "bf.cast");
  1771. EmitScalarRangeCheck(Val, LV.getType(), Loc);
  1772. return RValue::get(Val);
  1773. }
  1774. // If this is a reference to a subset of the elements of a vector, create an
  1775. // appropriate shufflevector.
  1776. RValue CodeGenFunction::EmitLoadOfExtVectorElementLValue(LValue LV) {
  1777. llvm::Value *Vec = Builder.CreateLoad(LV.getExtVectorAddress(),
  1778. LV.isVolatileQualified());
  1779. const llvm::Constant *Elts = LV.getExtVectorElts();
  1780. // If the result of the expression is a non-vector type, we must be extracting
  1781. // a single element. Just codegen as an extractelement.
  1782. const VectorType *ExprVT = LV.getType()->getAs<VectorType>();
  1783. if (!ExprVT) {
  1784. unsigned InIdx = getAccessedFieldNo(0, Elts);
  1785. llvm::Value *Elt = llvm::ConstantInt::get(SizeTy, InIdx);
  1786. return RValue::get(Builder.CreateExtractElement(Vec, Elt));
  1787. }
  1788. // Always use shuffle vector to try to retain the original program structure
  1789. unsigned NumResultElts = ExprVT->getNumElements();
  1790. SmallVector<int, 4> Mask;
  1791. for (unsigned i = 0; i != NumResultElts; ++i)
  1792. Mask.push_back(getAccessedFieldNo(i, Elts));
  1793. Vec = Builder.CreateShuffleVector(Vec, Mask);
  1794. return RValue::get(Vec);
  1795. }
  1796. /// Generates lvalue for partial ext_vector access.
  1797. Address CodeGenFunction::EmitExtVectorElementLValue(LValue LV) {
  1798. Address VectorAddress = LV.getExtVectorAddress();
  1799. QualType EQT = LV.getType()->castAs<VectorType>()->getElementType();
  1800. llvm::Type *VectorElementTy = CGM.getTypes().ConvertType(EQT);
  1801. Address CastToPointerElement =
  1802. Builder.CreateElementBitCast(VectorAddress, VectorElementTy,
  1803. "conv.ptr.element");
  1804. const llvm::Constant *Elts = LV.getExtVectorElts();
  1805. unsigned ix = getAccessedFieldNo(0, Elts);
  1806. Address VectorBasePtrPlusIx =
  1807. Builder.CreateConstInBoundsGEP(CastToPointerElement, ix,
  1808. "vector.elt");
  1809. return VectorBasePtrPlusIx;
  1810. }
  1811. /// Load of global gamed gegisters are always calls to intrinsics.
  1812. RValue CodeGenFunction::EmitLoadOfGlobalRegLValue(LValue LV) {
  1813. assert((LV.getType()->isIntegerType() || LV.getType()->isPointerType()) &&
  1814. "Bad type for register variable");
  1815. llvm::MDNode *RegName = cast<llvm::MDNode>(
  1816. cast<llvm::MetadataAsValue>(LV.getGlobalReg())->getMetadata());
  1817. // We accept integer and pointer types only
  1818. llvm::Type *OrigTy = CGM.getTypes().ConvertType(LV.getType());
  1819. llvm::Type *Ty = OrigTy;
  1820. if (OrigTy->isPointerTy())
  1821. Ty = CGM.getTypes().getDataLayout().getIntPtrType(OrigTy);
  1822. llvm::Type *Types[] = { Ty };
  1823. llvm::Function *F = CGM.getIntrinsic(llvm::Intrinsic::read_register, Types);
  1824. llvm::Value *Call = Builder.CreateCall(
  1825. F, llvm::MetadataAsValue::get(Ty->getContext(), RegName));
  1826. if (OrigTy->isPointerTy())
  1827. Call = Builder.CreateIntToPtr(Call, OrigTy);
  1828. return RValue::get(Call);
  1829. }
  1830. /// EmitStoreThroughLValue - Store the specified rvalue into the specified
  1831. /// lvalue, where both are guaranteed to the have the same type, and that type
  1832. /// is 'Ty'.
  1833. void CodeGenFunction::EmitStoreThroughLValue(RValue Src, LValue Dst,
  1834. bool isInit) {
  1835. if (!Dst.isSimple()) {
  1836. if (Dst.isVectorElt()) {
  1837. // Read/modify/write the vector, inserting the new element.
  1838. llvm::Value *Vec = Builder.CreateLoad(Dst.getVectorAddress(),
  1839. Dst.isVolatileQualified());
  1840. auto *IRStoreTy = dyn_cast<llvm::IntegerType>(Vec->getType());
  1841. if (IRStoreTy) {
  1842. auto *IRVecTy = llvm::FixedVectorType::get(
  1843. Builder.getInt1Ty(), IRStoreTy->getPrimitiveSizeInBits());
  1844. Vec = Builder.CreateBitCast(Vec, IRVecTy);
  1845. // iN --> <N x i1>.
  1846. }
  1847. Vec = Builder.CreateInsertElement(Vec, Src.getScalarVal(),
  1848. Dst.getVectorIdx(), "vecins");
  1849. if (IRStoreTy) {
  1850. // <N x i1> --> <iN>.
  1851. Vec = Builder.CreateBitCast(Vec, IRStoreTy);
  1852. }
  1853. Builder.CreateStore(Vec, Dst.getVectorAddress(),
  1854. Dst.isVolatileQualified());
  1855. return;
  1856. }
  1857. // If this is an update of extended vector elements, insert them as
  1858. // appropriate.
  1859. if (Dst.isExtVectorElt())
  1860. return EmitStoreThroughExtVectorComponentLValue(Src, Dst);
  1861. if (Dst.isGlobalReg())
  1862. return EmitStoreThroughGlobalRegLValue(Src, Dst);
  1863. if (Dst.isMatrixElt()) {
  1864. llvm::Value *Idx = Dst.getMatrixIdx();
  1865. if (CGM.getCodeGenOpts().OptimizationLevel > 0) {
  1866. const auto *const MatTy = Dst.getType()->castAs<ConstantMatrixType>();
  1867. llvm::MatrixBuilder MB(Builder);
  1868. MB.CreateIndexAssumption(Idx, MatTy->getNumElementsFlattened());
  1869. }
  1870. llvm::Instruction *Load = Builder.CreateLoad(Dst.getMatrixAddress());
  1871. llvm::Value *Vec =
  1872. Builder.CreateInsertElement(Load, Src.getScalarVal(), Idx, "matins");
  1873. Builder.CreateStore(Vec, Dst.getMatrixAddress(),
  1874. Dst.isVolatileQualified());
  1875. return;
  1876. }
  1877. assert(Dst.isBitField() && "Unknown LValue type");
  1878. return EmitStoreThroughBitfieldLValue(Src, Dst);
  1879. }
  1880. // There's special magic for assigning into an ARC-qualified l-value.
  1881. if (Qualifiers::ObjCLifetime Lifetime = Dst.getQuals().getObjCLifetime()) {
  1882. switch (Lifetime) {
  1883. case Qualifiers::OCL_None:
  1884. llvm_unreachable("present but none");
  1885. case Qualifiers::OCL_ExplicitNone:
  1886. // nothing special
  1887. break;
  1888. case Qualifiers::OCL_Strong:
  1889. if (isInit) {
  1890. Src = RValue::get(EmitARCRetain(Dst.getType(), Src.getScalarVal()));
  1891. break;
  1892. }
  1893. EmitARCStoreStrong(Dst, Src.getScalarVal(), /*ignore*/ true);
  1894. return;
  1895. case Qualifiers::OCL_Weak:
  1896. if (isInit)
  1897. // Initialize and then skip the primitive store.
  1898. EmitARCInitWeak(Dst.getAddress(*this), Src.getScalarVal());
  1899. else
  1900. EmitARCStoreWeak(Dst.getAddress(*this), Src.getScalarVal(),
  1901. /*ignore*/ true);
  1902. return;
  1903. case Qualifiers::OCL_Autoreleasing:
  1904. Src = RValue::get(EmitObjCExtendObjectLifetime(Dst.getType(),
  1905. Src.getScalarVal()));
  1906. // fall into the normal path
  1907. break;
  1908. }
  1909. }
  1910. if (Dst.isObjCWeak() && !Dst.isNonGC()) {
  1911. // load of a __weak object.
  1912. Address LvalueDst = Dst.getAddress(*this);
  1913. llvm::Value *src = Src.getScalarVal();
  1914. CGM.getObjCRuntime().EmitObjCWeakAssign(*this, src, LvalueDst);
  1915. return;
  1916. }
  1917. if (Dst.isObjCStrong() && !Dst.isNonGC()) {
  1918. // load of a __strong object.
  1919. Address LvalueDst = Dst.getAddress(*this);
  1920. llvm::Value *src = Src.getScalarVal();
  1921. if (Dst.isObjCIvar()) {
  1922. assert(Dst.getBaseIvarExp() && "BaseIvarExp is NULL");
  1923. llvm::Type *ResultType = IntPtrTy;
  1924. Address dst = EmitPointerWithAlignment(Dst.getBaseIvarExp());
  1925. llvm::Value *RHS = dst.getPointer();
  1926. RHS = Builder.CreatePtrToInt(RHS, ResultType, "sub.ptr.rhs.cast");
  1927. llvm::Value *LHS =
  1928. Builder.CreatePtrToInt(LvalueDst.getPointer(), ResultType,
  1929. "sub.ptr.lhs.cast");
  1930. llvm::Value *BytesBetween = Builder.CreateSub(LHS, RHS, "ivar.offset");
  1931. CGM.getObjCRuntime().EmitObjCIvarAssign(*this, src, dst,
  1932. BytesBetween);
  1933. } else if (Dst.isGlobalObjCRef()) {
  1934. CGM.getObjCRuntime().EmitObjCGlobalAssign(*this, src, LvalueDst,
  1935. Dst.isThreadLocalRef());
  1936. }
  1937. else
  1938. CGM.getObjCRuntime().EmitObjCStrongCastAssign(*this, src, LvalueDst);
  1939. return;
  1940. }
  1941. assert(Src.isScalar() && "Can't emit an agg store with this method");
  1942. EmitStoreOfScalar(Src.getScalarVal(), Dst, isInit);
  1943. }
  1944. void CodeGenFunction::EmitStoreThroughBitfieldLValue(RValue Src, LValue Dst,
  1945. llvm::Value **Result) {
  1946. const CGBitFieldInfo &Info = Dst.getBitFieldInfo();
  1947. llvm::Type *ResLTy = ConvertTypeForMem(Dst.getType());
  1948. Address Ptr = Dst.getBitFieldAddress();
  1949. // Get the source value, truncated to the width of the bit-field.
  1950. llvm::Value *SrcVal = Src.getScalarVal();
  1951. // Cast the source to the storage type and shift it into place.
  1952. SrcVal = Builder.CreateIntCast(SrcVal, Ptr.getElementType(),
  1953. /*isSigned=*/false);
  1954. llvm::Value *MaskedVal = SrcVal;
  1955. const bool UseVolatile =
  1956. CGM.getCodeGenOpts().AAPCSBitfieldWidth && Dst.isVolatileQualified() &&
  1957. Info.VolatileStorageSize != 0 && isAAPCS(CGM.getTarget());
  1958. const unsigned StorageSize =
  1959. UseVolatile ? Info.VolatileStorageSize : Info.StorageSize;
  1960. const unsigned Offset = UseVolatile ? Info.VolatileOffset : Info.Offset;
  1961. // See if there are other bits in the bitfield's storage we'll need to load
  1962. // and mask together with source before storing.
  1963. if (StorageSize != Info.Size) {
  1964. assert(StorageSize > Info.Size && "Invalid bitfield size.");
  1965. llvm::Value *Val =
  1966. Builder.CreateLoad(Ptr, Dst.isVolatileQualified(), "bf.load");
  1967. // Mask the source value as needed.
  1968. if (!hasBooleanRepresentation(Dst.getType()))
  1969. SrcVal = Builder.CreateAnd(
  1970. SrcVal, llvm::APInt::getLowBitsSet(StorageSize, Info.Size),
  1971. "bf.value");
  1972. MaskedVal = SrcVal;
  1973. if (Offset)
  1974. SrcVal = Builder.CreateShl(SrcVal, Offset, "bf.shl");
  1975. // Mask out the original value.
  1976. Val = Builder.CreateAnd(
  1977. Val, ~llvm::APInt::getBitsSet(StorageSize, Offset, Offset + Info.Size),
  1978. "bf.clear");
  1979. // Or together the unchanged values and the source value.
  1980. SrcVal = Builder.CreateOr(Val, SrcVal, "bf.set");
  1981. } else {
  1982. assert(Offset == 0);
  1983. // According to the AACPS:
  1984. // When a volatile bit-field is written, and its container does not overlap
  1985. // with any non-bit-field member, its container must be read exactly once
  1986. // and written exactly once using the access width appropriate to the type
  1987. // of the container. The two accesses are not atomic.
  1988. if (Dst.isVolatileQualified() && isAAPCS(CGM.getTarget()) &&
  1989. CGM.getCodeGenOpts().ForceAAPCSBitfieldLoad)
  1990. Builder.CreateLoad(Ptr, true, "bf.load");
  1991. }
  1992. // Write the new value back out.
  1993. Builder.CreateStore(SrcVal, Ptr, Dst.isVolatileQualified());
  1994. // Return the new value of the bit-field, if requested.
  1995. if (Result) {
  1996. llvm::Value *ResultVal = MaskedVal;
  1997. // Sign extend the value if needed.
  1998. if (Info.IsSigned) {
  1999. assert(Info.Size <= StorageSize);
  2000. unsigned HighBits = StorageSize - Info.Size;
  2001. if (HighBits) {
  2002. ResultVal = Builder.CreateShl(ResultVal, HighBits, "bf.result.shl");
  2003. ResultVal = Builder.CreateAShr(ResultVal, HighBits, "bf.result.ashr");
  2004. }
  2005. }
  2006. ResultVal = Builder.CreateIntCast(ResultVal, ResLTy, Info.IsSigned,
  2007. "bf.result.cast");
  2008. *Result = EmitFromMemory(ResultVal, Dst.getType());
  2009. }
  2010. }
  2011. void CodeGenFunction::EmitStoreThroughExtVectorComponentLValue(RValue Src,
  2012. LValue Dst) {
  2013. // This access turns into a read/modify/write of the vector. Load the input
  2014. // value now.
  2015. llvm::Value *Vec = Builder.CreateLoad(Dst.getExtVectorAddress(),
  2016. Dst.isVolatileQualified());
  2017. const llvm::Constant *Elts = Dst.getExtVectorElts();
  2018. llvm::Value *SrcVal = Src.getScalarVal();
  2019. if (const VectorType *VTy = Dst.getType()->getAs<VectorType>()) {
  2020. unsigned NumSrcElts = VTy->getNumElements();
  2021. unsigned NumDstElts =
  2022. cast<llvm::FixedVectorType>(Vec->getType())->getNumElements();
  2023. if (NumDstElts == NumSrcElts) {
  2024. // Use shuffle vector is the src and destination are the same number of
  2025. // elements and restore the vector mask since it is on the side it will be
  2026. // stored.
  2027. SmallVector<int, 4> Mask(NumDstElts);
  2028. for (unsigned i = 0; i != NumSrcElts; ++i)
  2029. Mask[getAccessedFieldNo(i, Elts)] = i;
  2030. Vec = Builder.CreateShuffleVector(SrcVal, Mask);
  2031. } else if (NumDstElts > NumSrcElts) {
  2032. // Extended the source vector to the same length and then shuffle it
  2033. // into the destination.
  2034. // FIXME: since we're shuffling with undef, can we just use the indices
  2035. // into that? This could be simpler.
  2036. SmallVector<int, 4> ExtMask;
  2037. for (unsigned i = 0; i != NumSrcElts; ++i)
  2038. ExtMask.push_back(i);
  2039. ExtMask.resize(NumDstElts, -1);
  2040. llvm::Value *ExtSrcVal = Builder.CreateShuffleVector(SrcVal, ExtMask);
  2041. // build identity
  2042. SmallVector<int, 4> Mask;
  2043. for (unsigned i = 0; i != NumDstElts; ++i)
  2044. Mask.push_back(i);
  2045. // When the vector size is odd and .odd or .hi is used, the last element
  2046. // of the Elts constant array will be one past the size of the vector.
  2047. // Ignore the last element here, if it is greater than the mask size.
  2048. if (getAccessedFieldNo(NumSrcElts - 1, Elts) == Mask.size())
  2049. NumSrcElts--;
  2050. // modify when what gets shuffled in
  2051. for (unsigned i = 0; i != NumSrcElts; ++i)
  2052. Mask[getAccessedFieldNo(i, Elts)] = i + NumDstElts;
  2053. Vec = Builder.CreateShuffleVector(Vec, ExtSrcVal, Mask);
  2054. } else {
  2055. // We should never shorten the vector
  2056. llvm_unreachable("unexpected shorten vector length");
  2057. }
  2058. } else {
  2059. // If the Src is a scalar (not a vector) it must be updating one element.
  2060. unsigned InIdx = getAccessedFieldNo(0, Elts);
  2061. llvm::Value *Elt = llvm::ConstantInt::get(SizeTy, InIdx);
  2062. Vec = Builder.CreateInsertElement(Vec, SrcVal, Elt);
  2063. }
  2064. Builder.CreateStore(Vec, Dst.getExtVectorAddress(),
  2065. Dst.isVolatileQualified());
  2066. }
  2067. /// Store of global named registers are always calls to intrinsics.
  2068. void CodeGenFunction::EmitStoreThroughGlobalRegLValue(RValue Src, LValue Dst) {
  2069. assert((Dst.getType()->isIntegerType() || Dst.getType()->isPointerType()) &&
  2070. "Bad type for register variable");
  2071. llvm::MDNode *RegName = cast<llvm::MDNode>(
  2072. cast<llvm::MetadataAsValue>(Dst.getGlobalReg())->getMetadata());
  2073. assert(RegName && "Register LValue is not metadata");
  2074. // We accept integer and pointer types only
  2075. llvm::Type *OrigTy = CGM.getTypes().ConvertType(Dst.getType());
  2076. llvm::Type *Ty = OrigTy;
  2077. if (OrigTy->isPointerTy())
  2078. Ty = CGM.getTypes().getDataLayout().getIntPtrType(OrigTy);
  2079. llvm::Type *Types[] = { Ty };
  2080. llvm::Function *F = CGM.getIntrinsic(llvm::Intrinsic::write_register, Types);
  2081. llvm::Value *Value = Src.getScalarVal();
  2082. if (OrigTy->isPointerTy())
  2083. Value = Builder.CreatePtrToInt(Value, Ty);
  2084. Builder.CreateCall(
  2085. F, {llvm::MetadataAsValue::get(Ty->getContext(), RegName), Value});
  2086. }
  2087. // setObjCGCLValueClass - sets class of the lvalue for the purpose of
  2088. // generating write-barries API. It is currently a global, ivar,
  2089. // or neither.
  2090. static void setObjCGCLValueClass(const ASTContext &Ctx, const Expr *E,
  2091. LValue &LV,
  2092. bool IsMemberAccess=false) {
  2093. if (Ctx.getLangOpts().getGC() == LangOptions::NonGC)
  2094. return;
  2095. if (isa<ObjCIvarRefExpr>(E)) {
  2096. QualType ExpTy = E->getType();
  2097. if (IsMemberAccess && ExpTy->isPointerType()) {
  2098. // If ivar is a structure pointer, assigning to field of
  2099. // this struct follows gcc's behavior and makes it a non-ivar
  2100. // writer-barrier conservatively.
  2101. ExpTy = ExpTy->castAs<PointerType>()->getPointeeType();
  2102. if (ExpTy->isRecordType()) {
  2103. LV.setObjCIvar(false);
  2104. return;
  2105. }
  2106. }
  2107. LV.setObjCIvar(true);
  2108. auto *Exp = cast<ObjCIvarRefExpr>(const_cast<Expr *>(E));
  2109. LV.setBaseIvarExp(Exp->getBase());
  2110. LV.setObjCArray(E->getType()->isArrayType());
  2111. return;
  2112. }
  2113. if (const auto *Exp = dyn_cast<DeclRefExpr>(E)) {
  2114. if (const auto *VD = dyn_cast<VarDecl>(Exp->getDecl())) {
  2115. if (VD->hasGlobalStorage()) {
  2116. LV.setGlobalObjCRef(true);
  2117. LV.setThreadLocalRef(VD->getTLSKind() != VarDecl::TLS_None);
  2118. }
  2119. }
  2120. LV.setObjCArray(E->getType()->isArrayType());
  2121. return;
  2122. }
  2123. if (const auto *Exp = dyn_cast<UnaryOperator>(E)) {
  2124. setObjCGCLValueClass(Ctx, Exp->getSubExpr(), LV, IsMemberAccess);
  2125. return;
  2126. }
  2127. if (const auto *Exp = dyn_cast<ParenExpr>(E)) {
  2128. setObjCGCLValueClass(Ctx, Exp->getSubExpr(), LV, IsMemberAccess);
  2129. if (LV.isObjCIvar()) {
  2130. // If cast is to a structure pointer, follow gcc's behavior and make it
  2131. // a non-ivar write-barrier.
  2132. QualType ExpTy = E->getType();
  2133. if (ExpTy->isPointerType())
  2134. ExpTy = ExpTy->castAs<PointerType>()->getPointeeType();
  2135. if (ExpTy->isRecordType())
  2136. LV.setObjCIvar(false);
  2137. }
  2138. return;
  2139. }
  2140. if (const auto *Exp = dyn_cast<GenericSelectionExpr>(E)) {
  2141. setObjCGCLValueClass(Ctx, Exp->getResultExpr(), LV);
  2142. return;
  2143. }
  2144. if (const auto *Exp = dyn_cast<ImplicitCastExpr>(E)) {
  2145. setObjCGCLValueClass(Ctx, Exp->getSubExpr(), LV, IsMemberAccess);
  2146. return;
  2147. }
  2148. if (const auto *Exp = dyn_cast<CStyleCastExpr>(E)) {
  2149. setObjCGCLValueClass(Ctx, Exp->getSubExpr(), LV, IsMemberAccess);
  2150. return;
  2151. }
  2152. if (const auto *Exp = dyn_cast<ObjCBridgedCastExpr>(E)) {
  2153. setObjCGCLValueClass(Ctx, Exp->getSubExpr(), LV, IsMemberAccess);
  2154. return;
  2155. }
  2156. if (const auto *Exp = dyn_cast<ArraySubscriptExpr>(E)) {
  2157. setObjCGCLValueClass(Ctx, Exp->getBase(), LV);
  2158. if (LV.isObjCIvar() && !LV.isObjCArray())
  2159. // Using array syntax to assigning to what an ivar points to is not
  2160. // same as assigning to the ivar itself. {id *Names;} Names[i] = 0;
  2161. LV.setObjCIvar(false);
  2162. else if (LV.isGlobalObjCRef() && !LV.isObjCArray())
  2163. // Using array syntax to assigning to what global points to is not
  2164. // same as assigning to the global itself. {id *G;} G[i] = 0;
  2165. LV.setGlobalObjCRef(false);
  2166. return;
  2167. }
  2168. if (const auto *Exp = dyn_cast<MemberExpr>(E)) {
  2169. setObjCGCLValueClass(Ctx, Exp->getBase(), LV, true);
  2170. // We don't know if member is an 'ivar', but this flag is looked at
  2171. // only in the context of LV.isObjCIvar().
  2172. LV.setObjCArray(E->getType()->isArrayType());
  2173. return;
  2174. }
  2175. }
  2176. static llvm::Value *
  2177. EmitBitCastOfLValueToProperType(CodeGenFunction &CGF,
  2178. llvm::Value *V, llvm::Type *IRType,
  2179. StringRef Name = StringRef()) {
  2180. unsigned AS = cast<llvm::PointerType>(V->getType())->getAddressSpace();
  2181. return CGF.Builder.CreateBitCast(V, IRType->getPointerTo(AS), Name);
  2182. }
  2183. static LValue EmitThreadPrivateVarDeclLValue(
  2184. CodeGenFunction &CGF, const VarDecl *VD, QualType T, Address Addr,
  2185. llvm::Type *RealVarTy, SourceLocation Loc) {
  2186. if (CGF.CGM.getLangOpts().OpenMPIRBuilder)
  2187. Addr = CodeGenFunction::OMPBuilderCBHelpers::getAddrOfThreadPrivate(
  2188. CGF, VD, Addr, Loc);
  2189. else
  2190. Addr =
  2191. CGF.CGM.getOpenMPRuntime().getAddrOfThreadPrivate(CGF, VD, Addr, Loc);
  2192. Addr = CGF.Builder.CreateElementBitCast(Addr, RealVarTy);
  2193. return CGF.MakeAddrLValue(Addr, T, AlignmentSource::Decl);
  2194. }
  2195. static Address emitDeclTargetVarDeclLValue(CodeGenFunction &CGF,
  2196. const VarDecl *VD, QualType T) {
  2197. std::optional<OMPDeclareTargetDeclAttr::MapTypeTy> Res =
  2198. OMPDeclareTargetDeclAttr::isDeclareTargetDeclaration(VD);
  2199. // Return an invalid address if variable is MT_To (or MT_Enter starting with
  2200. // OpenMP 5.2) and unified memory is not enabled. For all other cases: MT_Link
  2201. // and MT_To (or MT_Enter) with unified memory, return a valid address.
  2202. if (!Res || ((*Res == OMPDeclareTargetDeclAttr::MT_To ||
  2203. *Res == OMPDeclareTargetDeclAttr::MT_Enter) &&
  2204. !CGF.CGM.getOpenMPRuntime().hasRequiresUnifiedSharedMemory()))
  2205. return Address::invalid();
  2206. assert(((*Res == OMPDeclareTargetDeclAttr::MT_Link) ||
  2207. ((*Res == OMPDeclareTargetDeclAttr::MT_To ||
  2208. *Res == OMPDeclareTargetDeclAttr::MT_Enter) &&
  2209. CGF.CGM.getOpenMPRuntime().hasRequiresUnifiedSharedMemory())) &&
  2210. "Expected link clause OR to clause with unified memory enabled.");
  2211. QualType PtrTy = CGF.getContext().getPointerType(VD->getType());
  2212. Address Addr = CGF.CGM.getOpenMPRuntime().getAddrOfDeclareTargetVar(VD);
  2213. return CGF.EmitLoadOfPointer(Addr, PtrTy->castAs<PointerType>());
  2214. }
  2215. Address
  2216. CodeGenFunction::EmitLoadOfReference(LValue RefLVal,
  2217. LValueBaseInfo *PointeeBaseInfo,
  2218. TBAAAccessInfo *PointeeTBAAInfo) {
  2219. llvm::LoadInst *Load =
  2220. Builder.CreateLoad(RefLVal.getAddress(*this), RefLVal.isVolatile());
  2221. CGM.DecorateInstructionWithTBAA(Load, RefLVal.getTBAAInfo());
  2222. QualType PointeeType = RefLVal.getType()->getPointeeType();
  2223. CharUnits Align = CGM.getNaturalTypeAlignment(
  2224. PointeeType, PointeeBaseInfo, PointeeTBAAInfo,
  2225. /* forPointeeType= */ true);
  2226. return Address(Load, ConvertTypeForMem(PointeeType), Align);
  2227. }
  2228. LValue CodeGenFunction::EmitLoadOfReferenceLValue(LValue RefLVal) {
  2229. LValueBaseInfo PointeeBaseInfo;
  2230. TBAAAccessInfo PointeeTBAAInfo;
  2231. Address PointeeAddr = EmitLoadOfReference(RefLVal, &PointeeBaseInfo,
  2232. &PointeeTBAAInfo);
  2233. return MakeAddrLValue(PointeeAddr, RefLVal.getType()->getPointeeType(),
  2234. PointeeBaseInfo, PointeeTBAAInfo);
  2235. }
  2236. Address CodeGenFunction::EmitLoadOfPointer(Address Ptr,
  2237. const PointerType *PtrTy,
  2238. LValueBaseInfo *BaseInfo,
  2239. TBAAAccessInfo *TBAAInfo) {
  2240. llvm::Value *Addr = Builder.CreateLoad(Ptr);
  2241. return Address(Addr, ConvertTypeForMem(PtrTy->getPointeeType()),
  2242. CGM.getNaturalTypeAlignment(PtrTy->getPointeeType(), BaseInfo,
  2243. TBAAInfo,
  2244. /*forPointeeType=*/true));
  2245. }
  2246. LValue CodeGenFunction::EmitLoadOfPointerLValue(Address PtrAddr,
  2247. const PointerType *PtrTy) {
  2248. LValueBaseInfo BaseInfo;
  2249. TBAAAccessInfo TBAAInfo;
  2250. Address Addr = EmitLoadOfPointer(PtrAddr, PtrTy, &BaseInfo, &TBAAInfo);
  2251. return MakeAddrLValue(Addr, PtrTy->getPointeeType(), BaseInfo, TBAAInfo);
  2252. }
  2253. static LValue EmitGlobalVarDeclLValue(CodeGenFunction &CGF,
  2254. const Expr *E, const VarDecl *VD) {
  2255. QualType T = E->getType();
  2256. // If it's thread_local, emit a call to its wrapper function instead.
  2257. if (VD->getTLSKind() == VarDecl::TLS_Dynamic &&
  2258. CGF.CGM.getCXXABI().usesThreadWrapperFunction(VD))
  2259. return CGF.CGM.getCXXABI().EmitThreadLocalVarDeclLValue(CGF, VD, T);
  2260. // Check if the variable is marked as declare target with link clause in
  2261. // device codegen.
  2262. if (CGF.getLangOpts().OpenMPIsDevice) {
  2263. Address Addr = emitDeclTargetVarDeclLValue(CGF, VD, T);
  2264. if (Addr.isValid())
  2265. return CGF.MakeAddrLValue(Addr, T, AlignmentSource::Decl);
  2266. }
  2267. llvm::Value *V = CGF.CGM.GetAddrOfGlobalVar(VD);
  2268. if (VD->getTLSKind() != VarDecl::TLS_None)
  2269. V = CGF.Builder.CreateThreadLocalAddress(V);
  2270. llvm::Type *RealVarTy = CGF.getTypes().ConvertTypeForMem(VD->getType());
  2271. V = EmitBitCastOfLValueToProperType(CGF, V, RealVarTy);
  2272. CharUnits Alignment = CGF.getContext().getDeclAlign(VD);
  2273. Address Addr(V, RealVarTy, Alignment);
  2274. // Emit reference to the private copy of the variable if it is an OpenMP
  2275. // threadprivate variable.
  2276. if (CGF.getLangOpts().OpenMP && !CGF.getLangOpts().OpenMPSimd &&
  2277. VD->hasAttr<OMPThreadPrivateDeclAttr>()) {
  2278. return EmitThreadPrivateVarDeclLValue(CGF, VD, T, Addr, RealVarTy,
  2279. E->getExprLoc());
  2280. }
  2281. LValue LV = VD->getType()->isReferenceType() ?
  2282. CGF.EmitLoadOfReferenceLValue(Addr, VD->getType(),
  2283. AlignmentSource::Decl) :
  2284. CGF.MakeAddrLValue(Addr, T, AlignmentSource::Decl);
  2285. setObjCGCLValueClass(CGF.getContext(), E, LV);
  2286. return LV;
  2287. }
  2288. static llvm::Constant *EmitFunctionDeclPointer(CodeGenModule &CGM,
  2289. GlobalDecl GD) {
  2290. const FunctionDecl *FD = cast<FunctionDecl>(GD.getDecl());
  2291. if (FD->hasAttr<WeakRefAttr>()) {
  2292. ConstantAddress aliasee = CGM.GetWeakRefReference(FD);
  2293. return aliasee.getPointer();
  2294. }
  2295. llvm::Constant *V = CGM.GetAddrOfFunction(GD);
  2296. if (!FD->hasPrototype()) {
  2297. if (const FunctionProtoType *Proto =
  2298. FD->getType()->getAs<FunctionProtoType>()) {
  2299. // Ugly case: for a K&R-style definition, the type of the definition
  2300. // isn't the same as the type of a use. Correct for this with a
  2301. // bitcast.
  2302. QualType NoProtoType =
  2303. CGM.getContext().getFunctionNoProtoType(Proto->getReturnType());
  2304. NoProtoType = CGM.getContext().getPointerType(NoProtoType);
  2305. V = llvm::ConstantExpr::getBitCast(V,
  2306. CGM.getTypes().ConvertType(NoProtoType));
  2307. }
  2308. }
  2309. return V;
  2310. }
  2311. static LValue EmitFunctionDeclLValue(CodeGenFunction &CGF, const Expr *E,
  2312. GlobalDecl GD) {
  2313. const FunctionDecl *FD = cast<FunctionDecl>(GD.getDecl());
  2314. llvm::Value *V = EmitFunctionDeclPointer(CGF.CGM, GD);
  2315. CharUnits Alignment = CGF.getContext().getDeclAlign(FD);
  2316. return CGF.MakeAddrLValue(V, E->getType(), Alignment,
  2317. AlignmentSource::Decl);
  2318. }
  2319. static LValue EmitCapturedFieldLValue(CodeGenFunction &CGF, const FieldDecl *FD,
  2320. llvm::Value *ThisValue) {
  2321. QualType TagType = CGF.getContext().getTagDeclType(FD->getParent());
  2322. LValue LV = CGF.MakeNaturalAlignAddrLValue(ThisValue, TagType);
  2323. return CGF.EmitLValueForField(LV, FD);
  2324. }
  2325. /// Named Registers are named metadata pointing to the register name
  2326. /// which will be read from/written to as an argument to the intrinsic
  2327. /// @llvm.read/write_register.
  2328. /// So far, only the name is being passed down, but other options such as
  2329. /// register type, allocation type or even optimization options could be
  2330. /// passed down via the metadata node.
  2331. static LValue EmitGlobalNamedRegister(const VarDecl *VD, CodeGenModule &CGM) {
  2332. SmallString<64> Name("llvm.named.register.");
  2333. AsmLabelAttr *Asm = VD->getAttr<AsmLabelAttr>();
  2334. assert(Asm->getLabel().size() < 64-Name.size() &&
  2335. "Register name too big");
  2336. Name.append(Asm->getLabel());
  2337. llvm::NamedMDNode *M =
  2338. CGM.getModule().getOrInsertNamedMetadata(Name);
  2339. if (M->getNumOperands() == 0) {
  2340. llvm::MDString *Str = llvm::MDString::get(CGM.getLLVMContext(),
  2341. Asm->getLabel());
  2342. llvm::Metadata *Ops[] = {Str};
  2343. M->addOperand(llvm::MDNode::get(CGM.getLLVMContext(), Ops));
  2344. }
  2345. CharUnits Alignment = CGM.getContext().getDeclAlign(VD);
  2346. llvm::Value *Ptr =
  2347. llvm::MetadataAsValue::get(CGM.getLLVMContext(), M->getOperand(0));
  2348. return LValue::MakeGlobalReg(Ptr, Alignment, VD->getType());
  2349. }
  2350. /// Determine whether we can emit a reference to \p VD from the current
  2351. /// context, despite not necessarily having seen an odr-use of the variable in
  2352. /// this context.
  2353. static bool canEmitSpuriousReferenceToVariable(CodeGenFunction &CGF,
  2354. const DeclRefExpr *E,
  2355. const VarDecl *VD,
  2356. bool IsConstant) {
  2357. // For a variable declared in an enclosing scope, do not emit a spurious
  2358. // reference even if we have a capture, as that will emit an unwarranted
  2359. // reference to our capture state, and will likely generate worse code than
  2360. // emitting a local copy.
  2361. if (E->refersToEnclosingVariableOrCapture())
  2362. return false;
  2363. // For a local declaration declared in this function, we can always reference
  2364. // it even if we don't have an odr-use.
  2365. if (VD->hasLocalStorage()) {
  2366. return VD->getDeclContext() ==
  2367. dyn_cast_or_null<DeclContext>(CGF.CurCodeDecl);
  2368. }
  2369. // For a global declaration, we can emit a reference to it if we know
  2370. // for sure that we are able to emit a definition of it.
  2371. VD = VD->getDefinition(CGF.getContext());
  2372. if (!VD)
  2373. return false;
  2374. // Don't emit a spurious reference if it might be to a variable that only
  2375. // exists on a different device / target.
  2376. // FIXME: This is unnecessarily broad. Check whether this would actually be a
  2377. // cross-target reference.
  2378. if (CGF.getLangOpts().OpenMP || CGF.getLangOpts().CUDA ||
  2379. CGF.getLangOpts().OpenCL) {
  2380. return false;
  2381. }
  2382. // We can emit a spurious reference only if the linkage implies that we'll
  2383. // be emitting a non-interposable symbol that will be retained until link
  2384. // time.
  2385. switch (CGF.CGM.getLLVMLinkageVarDefinition(VD, IsConstant)) {
  2386. case llvm::GlobalValue::ExternalLinkage:
  2387. case llvm::GlobalValue::LinkOnceODRLinkage:
  2388. case llvm::GlobalValue::WeakODRLinkage:
  2389. case llvm::GlobalValue::InternalLinkage:
  2390. case llvm::GlobalValue::PrivateLinkage:
  2391. return true;
  2392. default:
  2393. return false;
  2394. }
  2395. }
  2396. LValue CodeGenFunction::EmitDeclRefLValue(const DeclRefExpr *E) {
  2397. const NamedDecl *ND = E->getDecl();
  2398. QualType T = E->getType();
  2399. assert(E->isNonOdrUse() != NOUR_Unevaluated &&
  2400. "should not emit an unevaluated operand");
  2401. if (const auto *VD = dyn_cast<VarDecl>(ND)) {
  2402. // Global Named registers access via intrinsics only
  2403. if (VD->getStorageClass() == SC_Register &&
  2404. VD->hasAttr<AsmLabelAttr>() && !VD->isLocalVarDecl())
  2405. return EmitGlobalNamedRegister(VD, CGM);
  2406. // If this DeclRefExpr does not constitute an odr-use of the variable,
  2407. // we're not permitted to emit a reference to it in general, and it might
  2408. // not be captured if capture would be necessary for a use. Emit the
  2409. // constant value directly instead.
  2410. if (E->isNonOdrUse() == NOUR_Constant &&
  2411. (VD->getType()->isReferenceType() ||
  2412. !canEmitSpuriousReferenceToVariable(*this, E, VD, true))) {
  2413. VD->getAnyInitializer(VD);
  2414. llvm::Constant *Val = ConstantEmitter(*this).emitAbstract(
  2415. E->getLocation(), *VD->evaluateValue(), VD->getType());
  2416. assert(Val && "failed to emit constant expression");
  2417. Address Addr = Address::invalid();
  2418. if (!VD->getType()->isReferenceType()) {
  2419. // Spill the constant value to a global.
  2420. Addr = CGM.createUnnamedGlobalFrom(*VD, Val,
  2421. getContext().getDeclAlign(VD));
  2422. llvm::Type *VarTy = getTypes().ConvertTypeForMem(VD->getType());
  2423. auto *PTy = llvm::PointerType::get(
  2424. VarTy, getTypes().getTargetAddressSpace(VD->getType()));
  2425. Addr = Builder.CreatePointerBitCastOrAddrSpaceCast(Addr, PTy, VarTy);
  2426. } else {
  2427. // Should we be using the alignment of the constant pointer we emitted?
  2428. CharUnits Alignment =
  2429. CGM.getNaturalTypeAlignment(E->getType(),
  2430. /* BaseInfo= */ nullptr,
  2431. /* TBAAInfo= */ nullptr,
  2432. /* forPointeeType= */ true);
  2433. Addr = Address(Val, ConvertTypeForMem(E->getType()), Alignment);
  2434. }
  2435. return MakeAddrLValue(Addr, T, AlignmentSource::Decl);
  2436. }
  2437. // FIXME: Handle other kinds of non-odr-use DeclRefExprs.
  2438. // Check for captured variables.
  2439. if (E->refersToEnclosingVariableOrCapture()) {
  2440. VD = VD->getCanonicalDecl();
  2441. if (auto *FD = LambdaCaptureFields.lookup(VD))
  2442. return EmitCapturedFieldLValue(*this, FD, CXXABIThisValue);
  2443. if (CapturedStmtInfo) {
  2444. auto I = LocalDeclMap.find(VD);
  2445. if (I != LocalDeclMap.end()) {
  2446. LValue CapLVal;
  2447. if (VD->getType()->isReferenceType())
  2448. CapLVal = EmitLoadOfReferenceLValue(I->second, VD->getType(),
  2449. AlignmentSource::Decl);
  2450. else
  2451. CapLVal = MakeAddrLValue(I->second, T);
  2452. // Mark lvalue as nontemporal if the variable is marked as nontemporal
  2453. // in simd context.
  2454. if (getLangOpts().OpenMP &&
  2455. CGM.getOpenMPRuntime().isNontemporalDecl(VD))
  2456. CapLVal.setNontemporal(/*Value=*/true);
  2457. return CapLVal;
  2458. }
  2459. LValue CapLVal =
  2460. EmitCapturedFieldLValue(*this, CapturedStmtInfo->lookup(VD),
  2461. CapturedStmtInfo->getContextValue());
  2462. Address LValueAddress = CapLVal.getAddress(*this);
  2463. CapLVal = MakeAddrLValue(
  2464. Address(LValueAddress.getPointer(), LValueAddress.getElementType(),
  2465. getContext().getDeclAlign(VD)),
  2466. CapLVal.getType(), LValueBaseInfo(AlignmentSource::Decl),
  2467. CapLVal.getTBAAInfo());
  2468. // Mark lvalue as nontemporal if the variable is marked as nontemporal
  2469. // in simd context.
  2470. if (getLangOpts().OpenMP &&
  2471. CGM.getOpenMPRuntime().isNontemporalDecl(VD))
  2472. CapLVal.setNontemporal(/*Value=*/true);
  2473. return CapLVal;
  2474. }
  2475. assert(isa<BlockDecl>(CurCodeDecl));
  2476. Address addr = GetAddrOfBlockDecl(VD);
  2477. return MakeAddrLValue(addr, T, AlignmentSource::Decl);
  2478. }
  2479. }
  2480. // FIXME: We should be able to assert this for FunctionDecls as well!
  2481. // FIXME: We should be able to assert this for all DeclRefExprs, not just
  2482. // those with a valid source location.
  2483. assert((ND->isUsed(false) || !isa<VarDecl>(ND) || E->isNonOdrUse() ||
  2484. !E->getLocation().isValid()) &&
  2485. "Should not use decl without marking it used!");
  2486. if (ND->hasAttr<WeakRefAttr>()) {
  2487. const auto *VD = cast<ValueDecl>(ND);
  2488. ConstantAddress Aliasee = CGM.GetWeakRefReference(VD);
  2489. return MakeAddrLValue(Aliasee, T, AlignmentSource::Decl);
  2490. }
  2491. if (const auto *VD = dyn_cast<VarDecl>(ND)) {
  2492. // Check if this is a global variable.
  2493. if (VD->hasLinkage() || VD->isStaticDataMember())
  2494. return EmitGlobalVarDeclLValue(*this, E, VD);
  2495. Address addr = Address::invalid();
  2496. // The variable should generally be present in the local decl map.
  2497. auto iter = LocalDeclMap.find(VD);
  2498. if (iter != LocalDeclMap.end()) {
  2499. addr = iter->second;
  2500. // Otherwise, it might be static local we haven't emitted yet for
  2501. // some reason; most likely, because it's in an outer function.
  2502. } else if (VD->isStaticLocal()) {
  2503. llvm::Constant *var = CGM.getOrCreateStaticVarDecl(
  2504. *VD, CGM.getLLVMLinkageVarDefinition(VD, /*IsConstant=*/false));
  2505. addr = Address(
  2506. var, ConvertTypeForMem(VD->getType()), getContext().getDeclAlign(VD));
  2507. // No other cases for now.
  2508. } else {
  2509. llvm_unreachable("DeclRefExpr for Decl not entered in LocalDeclMap?");
  2510. }
  2511. // Handle threadlocal function locals.
  2512. if (VD->getTLSKind() != VarDecl::TLS_None)
  2513. addr =
  2514. addr.withPointer(Builder.CreateThreadLocalAddress(addr.getPointer()));
  2515. // Check for OpenMP threadprivate variables.
  2516. if (getLangOpts().OpenMP && !getLangOpts().OpenMPSimd &&
  2517. VD->hasAttr<OMPThreadPrivateDeclAttr>()) {
  2518. return EmitThreadPrivateVarDeclLValue(
  2519. *this, VD, T, addr, getTypes().ConvertTypeForMem(VD->getType()),
  2520. E->getExprLoc());
  2521. }
  2522. // Drill into block byref variables.
  2523. bool isBlockByref = VD->isEscapingByref();
  2524. if (isBlockByref) {
  2525. addr = emitBlockByrefAddress(addr, VD);
  2526. }
  2527. // Drill into reference types.
  2528. LValue LV = VD->getType()->isReferenceType() ?
  2529. EmitLoadOfReferenceLValue(addr, VD->getType(), AlignmentSource::Decl) :
  2530. MakeAddrLValue(addr, T, AlignmentSource::Decl);
  2531. bool isLocalStorage = VD->hasLocalStorage();
  2532. bool NonGCable = isLocalStorage &&
  2533. !VD->getType()->isReferenceType() &&
  2534. !isBlockByref;
  2535. if (NonGCable) {
  2536. LV.getQuals().removeObjCGCAttr();
  2537. LV.setNonGC(true);
  2538. }
  2539. bool isImpreciseLifetime =
  2540. (isLocalStorage && !VD->hasAttr<ObjCPreciseLifetimeAttr>());
  2541. if (isImpreciseLifetime)
  2542. LV.setARCPreciseLifetime(ARCImpreciseLifetime);
  2543. setObjCGCLValueClass(getContext(), E, LV);
  2544. return LV;
  2545. }
  2546. if (const auto *FD = dyn_cast<FunctionDecl>(ND)) {
  2547. LValue LV = EmitFunctionDeclLValue(*this, E, FD);
  2548. // Emit debuginfo for the function declaration if the target wants to.
  2549. if (getContext().getTargetInfo().allowDebugInfoForExternalRef()) {
  2550. if (CGDebugInfo *DI = CGM.getModuleDebugInfo()) {
  2551. auto *Fn =
  2552. cast<llvm::Function>(LV.getPointer(*this)->stripPointerCasts());
  2553. if (!Fn->getSubprogram())
  2554. DI->EmitFunctionDecl(FD, FD->getLocation(), T, Fn);
  2555. }
  2556. }
  2557. return LV;
  2558. }
  2559. // FIXME: While we're emitting a binding from an enclosing scope, all other
  2560. // DeclRefExprs we see should be implicitly treated as if they also refer to
  2561. // an enclosing scope.
  2562. if (const auto *BD = dyn_cast<BindingDecl>(ND)) {
  2563. if (E->refersToEnclosingVariableOrCapture()) {
  2564. auto *FD = LambdaCaptureFields.lookup(BD);
  2565. return EmitCapturedFieldLValue(*this, FD, CXXABIThisValue);
  2566. }
  2567. return EmitLValue(BD->getBinding());
  2568. }
  2569. // We can form DeclRefExprs naming GUID declarations when reconstituting
  2570. // non-type template parameters into expressions.
  2571. if (const auto *GD = dyn_cast<MSGuidDecl>(ND))
  2572. return MakeAddrLValue(CGM.GetAddrOfMSGuidDecl(GD), T,
  2573. AlignmentSource::Decl);
  2574. if (const auto *TPO = dyn_cast<TemplateParamObjectDecl>(ND))
  2575. return MakeAddrLValue(CGM.GetAddrOfTemplateParamObject(TPO), T,
  2576. AlignmentSource::Decl);
  2577. llvm_unreachable("Unhandled DeclRefExpr");
  2578. }
  2579. LValue CodeGenFunction::EmitUnaryOpLValue(const UnaryOperator *E) {
  2580. // __extension__ doesn't affect lvalue-ness.
  2581. if (E->getOpcode() == UO_Extension)
  2582. return EmitLValue(E->getSubExpr());
  2583. QualType ExprTy = getContext().getCanonicalType(E->getSubExpr()->getType());
  2584. switch (E->getOpcode()) {
  2585. default: llvm_unreachable("Unknown unary operator lvalue!");
  2586. case UO_Deref: {
  2587. QualType T = E->getSubExpr()->getType()->getPointeeType();
  2588. assert(!T.isNull() && "CodeGenFunction::EmitUnaryOpLValue: Illegal type");
  2589. LValueBaseInfo BaseInfo;
  2590. TBAAAccessInfo TBAAInfo;
  2591. Address Addr = EmitPointerWithAlignment(E->getSubExpr(), &BaseInfo,
  2592. &TBAAInfo);
  2593. LValue LV = MakeAddrLValue(Addr, T, BaseInfo, TBAAInfo);
  2594. LV.getQuals().setAddressSpace(ExprTy.getAddressSpace());
  2595. // We should not generate __weak write barrier on indirect reference
  2596. // of a pointer to object; as in void foo (__weak id *param); *param = 0;
  2597. // But, we continue to generate __strong write barrier on indirect write
  2598. // into a pointer to object.
  2599. if (getLangOpts().ObjC &&
  2600. getLangOpts().getGC() != LangOptions::NonGC &&
  2601. LV.isObjCWeak())
  2602. LV.setNonGC(!E->isOBJCGCCandidate(getContext()));
  2603. return LV;
  2604. }
  2605. case UO_Real:
  2606. case UO_Imag: {
  2607. LValue LV = EmitLValue(E->getSubExpr());
  2608. assert(LV.isSimple() && "real/imag on non-ordinary l-value");
  2609. // __real is valid on scalars. This is a faster way of testing that.
  2610. // __imag can only produce an rvalue on scalars.
  2611. if (E->getOpcode() == UO_Real &&
  2612. !LV.getAddress(*this).getElementType()->isStructTy()) {
  2613. assert(E->getSubExpr()->getType()->isArithmeticType());
  2614. return LV;
  2615. }
  2616. QualType T = ExprTy->castAs<ComplexType>()->getElementType();
  2617. Address Component =
  2618. (E->getOpcode() == UO_Real
  2619. ? emitAddrOfRealComponent(LV.getAddress(*this), LV.getType())
  2620. : emitAddrOfImagComponent(LV.getAddress(*this), LV.getType()));
  2621. LValue ElemLV = MakeAddrLValue(Component, T, LV.getBaseInfo(),
  2622. CGM.getTBAAInfoForSubobject(LV, T));
  2623. ElemLV.getQuals().addQualifiers(LV.getQuals());
  2624. return ElemLV;
  2625. }
  2626. case UO_PreInc:
  2627. case UO_PreDec: {
  2628. LValue LV = EmitLValue(E->getSubExpr());
  2629. bool isInc = E->getOpcode() == UO_PreInc;
  2630. if (E->getType()->isAnyComplexType())
  2631. EmitComplexPrePostIncDec(E, LV, isInc, true/*isPre*/);
  2632. else
  2633. EmitScalarPrePostIncDec(E, LV, isInc, true/*isPre*/);
  2634. return LV;
  2635. }
  2636. }
  2637. }
  2638. LValue CodeGenFunction::EmitStringLiteralLValue(const StringLiteral *E) {
  2639. return MakeAddrLValue(CGM.GetAddrOfConstantStringFromLiteral(E),
  2640. E->getType(), AlignmentSource::Decl);
  2641. }
  2642. LValue CodeGenFunction::EmitObjCEncodeExprLValue(const ObjCEncodeExpr *E) {
  2643. return MakeAddrLValue(CGM.GetAddrOfConstantStringFromObjCEncode(E),
  2644. E->getType(), AlignmentSource::Decl);
  2645. }
  2646. LValue CodeGenFunction::EmitPredefinedLValue(const PredefinedExpr *E) {
  2647. auto SL = E->getFunctionName();
  2648. assert(SL != nullptr && "No StringLiteral name in PredefinedExpr");
  2649. StringRef FnName = CurFn->getName();
  2650. if (FnName.startswith("\01"))
  2651. FnName = FnName.substr(1);
  2652. StringRef NameItems[] = {
  2653. PredefinedExpr::getIdentKindName(E->getIdentKind()), FnName};
  2654. std::string GVName = llvm::join(NameItems, NameItems + 2, ".");
  2655. if (auto *BD = dyn_cast_or_null<BlockDecl>(CurCodeDecl)) {
  2656. std::string Name = std::string(SL->getString());
  2657. if (!Name.empty()) {
  2658. unsigned Discriminator =
  2659. CGM.getCXXABI().getMangleContext().getBlockId(BD, true);
  2660. if (Discriminator)
  2661. Name += "_" + Twine(Discriminator + 1).str();
  2662. auto C = CGM.GetAddrOfConstantCString(Name, GVName.c_str());
  2663. return MakeAddrLValue(C, E->getType(), AlignmentSource::Decl);
  2664. } else {
  2665. auto C =
  2666. CGM.GetAddrOfConstantCString(std::string(FnName), GVName.c_str());
  2667. return MakeAddrLValue(C, E->getType(), AlignmentSource::Decl);
  2668. }
  2669. }
  2670. auto C = CGM.GetAddrOfConstantStringFromLiteral(SL, GVName);
  2671. return MakeAddrLValue(C, E->getType(), AlignmentSource::Decl);
  2672. }
  2673. /// Emit a type description suitable for use by a runtime sanitizer library. The
  2674. /// format of a type descriptor is
  2675. ///
  2676. /// \code
  2677. /// { i16 TypeKind, i16 TypeInfo }
  2678. /// \endcode
  2679. ///
  2680. /// followed by an array of i8 containing the type name. TypeKind is 0 for an
  2681. /// integer, 1 for a floating point value, and -1 for anything else.
  2682. llvm::Constant *CodeGenFunction::EmitCheckTypeDescriptor(QualType T) {
  2683. // Only emit each type's descriptor once.
  2684. if (llvm::Constant *C = CGM.getTypeDescriptorFromMap(T))
  2685. return C;
  2686. uint16_t TypeKind = -1;
  2687. uint16_t TypeInfo = 0;
  2688. if (T->isIntegerType()) {
  2689. TypeKind = 0;
  2690. TypeInfo = (llvm::Log2_32(getContext().getTypeSize(T)) << 1) |
  2691. (T->isSignedIntegerType() ? 1 : 0);
  2692. } else if (T->isFloatingType()) {
  2693. TypeKind = 1;
  2694. TypeInfo = getContext().getTypeSize(T);
  2695. }
  2696. // Format the type name as if for a diagnostic, including quotes and
  2697. // optionally an 'aka'.
  2698. SmallString<32> Buffer;
  2699. CGM.getDiags().ConvertArgToString(
  2700. DiagnosticsEngine::ak_qualtype, (intptr_t)T.getAsOpaquePtr(), StringRef(),
  2701. StringRef(), std::nullopt, Buffer, std::nullopt);
  2702. llvm::Constant *Components[] = {
  2703. Builder.getInt16(TypeKind), Builder.getInt16(TypeInfo),
  2704. llvm::ConstantDataArray::getString(getLLVMContext(), Buffer)
  2705. };
  2706. llvm::Constant *Descriptor = llvm::ConstantStruct::getAnon(Components);
  2707. auto *GV = new llvm::GlobalVariable(
  2708. CGM.getModule(), Descriptor->getType(),
  2709. /*isConstant=*/true, llvm::GlobalVariable::PrivateLinkage, Descriptor);
  2710. GV->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
  2711. CGM.getSanitizerMetadata()->disableSanitizerForGlobal(GV);
  2712. // Remember the descriptor for this type.
  2713. CGM.setTypeDescriptorInMap(T, GV);
  2714. return GV;
  2715. }
  2716. llvm::Value *CodeGenFunction::EmitCheckValue(llvm::Value *V) {
  2717. llvm::Type *TargetTy = IntPtrTy;
  2718. if (V->getType() == TargetTy)
  2719. return V;
  2720. // Floating-point types which fit into intptr_t are bitcast to integers
  2721. // and then passed directly (after zero-extension, if necessary).
  2722. if (V->getType()->isFloatingPointTy()) {
  2723. unsigned Bits = V->getType()->getPrimitiveSizeInBits().getFixedValue();
  2724. if (Bits <= TargetTy->getIntegerBitWidth())
  2725. V = Builder.CreateBitCast(V, llvm::Type::getIntNTy(getLLVMContext(),
  2726. Bits));
  2727. }
  2728. // Integers which fit in intptr_t are zero-extended and passed directly.
  2729. if (V->getType()->isIntegerTy() &&
  2730. V->getType()->getIntegerBitWidth() <= TargetTy->getIntegerBitWidth())
  2731. return Builder.CreateZExt(V, TargetTy);
  2732. // Pointers are passed directly, everything else is passed by address.
  2733. if (!V->getType()->isPointerTy()) {
  2734. Address Ptr = CreateDefaultAlignTempAlloca(V->getType());
  2735. Builder.CreateStore(V, Ptr);
  2736. V = Ptr.getPointer();
  2737. }
  2738. return Builder.CreatePtrToInt(V, TargetTy);
  2739. }
  2740. /// Emit a representation of a SourceLocation for passing to a handler
  2741. /// in a sanitizer runtime library. The format for this data is:
  2742. /// \code
  2743. /// struct SourceLocation {
  2744. /// const char *Filename;
  2745. /// int32_t Line, Column;
  2746. /// };
  2747. /// \endcode
  2748. /// For an invalid SourceLocation, the Filename pointer is null.
  2749. llvm::Constant *CodeGenFunction::EmitCheckSourceLocation(SourceLocation Loc) {
  2750. llvm::Constant *Filename;
  2751. int Line, Column;
  2752. PresumedLoc PLoc = getContext().getSourceManager().getPresumedLoc(Loc);
  2753. if (PLoc.isValid()) {
  2754. StringRef FilenameString = PLoc.getFilename();
  2755. int PathComponentsToStrip =
  2756. CGM.getCodeGenOpts().EmitCheckPathComponentsToStrip;
  2757. if (PathComponentsToStrip < 0) {
  2758. assert(PathComponentsToStrip != INT_MIN);
  2759. int PathComponentsToKeep = -PathComponentsToStrip;
  2760. auto I = llvm::sys::path::rbegin(FilenameString);
  2761. auto E = llvm::sys::path::rend(FilenameString);
  2762. while (I != E && --PathComponentsToKeep)
  2763. ++I;
  2764. FilenameString = FilenameString.substr(I - E);
  2765. } else if (PathComponentsToStrip > 0) {
  2766. auto I = llvm::sys::path::begin(FilenameString);
  2767. auto E = llvm::sys::path::end(FilenameString);
  2768. while (I != E && PathComponentsToStrip--)
  2769. ++I;
  2770. if (I != E)
  2771. FilenameString =
  2772. FilenameString.substr(I - llvm::sys::path::begin(FilenameString));
  2773. else
  2774. FilenameString = llvm::sys::path::filename(FilenameString);
  2775. }
  2776. auto FilenameGV =
  2777. CGM.GetAddrOfConstantCString(std::string(FilenameString), ".src");
  2778. CGM.getSanitizerMetadata()->disableSanitizerForGlobal(
  2779. cast<llvm::GlobalVariable>(
  2780. FilenameGV.getPointer()->stripPointerCasts()));
  2781. Filename = FilenameGV.getPointer();
  2782. Line = PLoc.getLine();
  2783. Column = PLoc.getColumn();
  2784. } else {
  2785. Filename = llvm::Constant::getNullValue(Int8PtrTy);
  2786. Line = Column = 0;
  2787. }
  2788. llvm::Constant *Data[] = {Filename, Builder.getInt32(Line),
  2789. Builder.getInt32(Column)};
  2790. return llvm::ConstantStruct::getAnon(Data);
  2791. }
  2792. namespace {
  2793. /// Specify under what conditions this check can be recovered
  2794. enum class CheckRecoverableKind {
  2795. /// Always terminate program execution if this check fails.
  2796. Unrecoverable,
  2797. /// Check supports recovering, runtime has both fatal (noreturn) and
  2798. /// non-fatal handlers for this check.
  2799. Recoverable,
  2800. /// Runtime conditionally aborts, always need to support recovery.
  2801. AlwaysRecoverable
  2802. };
  2803. }
  2804. static CheckRecoverableKind getRecoverableKind(SanitizerMask Kind) {
  2805. assert(Kind.countPopulation() == 1);
  2806. if (Kind == SanitizerKind::Function || Kind == SanitizerKind::Vptr)
  2807. return CheckRecoverableKind::AlwaysRecoverable;
  2808. else if (Kind == SanitizerKind::Return || Kind == SanitizerKind::Unreachable)
  2809. return CheckRecoverableKind::Unrecoverable;
  2810. else
  2811. return CheckRecoverableKind::Recoverable;
  2812. }
  2813. namespace {
  2814. struct SanitizerHandlerInfo {
  2815. char const *const Name;
  2816. unsigned Version;
  2817. };
  2818. }
  2819. const SanitizerHandlerInfo SanitizerHandlers[] = {
  2820. #define SANITIZER_CHECK(Enum, Name, Version) {#Name, Version},
  2821. LIST_SANITIZER_CHECKS
  2822. #undef SANITIZER_CHECK
  2823. };
  2824. static void emitCheckHandlerCall(CodeGenFunction &CGF,
  2825. llvm::FunctionType *FnType,
  2826. ArrayRef<llvm::Value *> FnArgs,
  2827. SanitizerHandler CheckHandler,
  2828. CheckRecoverableKind RecoverKind, bool IsFatal,
  2829. llvm::BasicBlock *ContBB) {
  2830. assert(IsFatal || RecoverKind != CheckRecoverableKind::Unrecoverable);
  2831. std::optional<ApplyDebugLocation> DL;
  2832. if (!CGF.Builder.getCurrentDebugLocation()) {
  2833. // Ensure that the call has at least an artificial debug location.
  2834. DL.emplace(CGF, SourceLocation());
  2835. }
  2836. bool NeedsAbortSuffix =
  2837. IsFatal && RecoverKind != CheckRecoverableKind::Unrecoverable;
  2838. bool MinimalRuntime = CGF.CGM.getCodeGenOpts().SanitizeMinimalRuntime;
  2839. const SanitizerHandlerInfo &CheckInfo = SanitizerHandlers[CheckHandler];
  2840. const StringRef CheckName = CheckInfo.Name;
  2841. std::string FnName = "__ubsan_handle_" + CheckName.str();
  2842. if (CheckInfo.Version && !MinimalRuntime)
  2843. FnName += "_v" + llvm::utostr(CheckInfo.Version);
  2844. if (MinimalRuntime)
  2845. FnName += "_minimal";
  2846. if (NeedsAbortSuffix)
  2847. FnName += "_abort";
  2848. bool MayReturn =
  2849. !IsFatal || RecoverKind == CheckRecoverableKind::AlwaysRecoverable;
  2850. llvm::AttrBuilder B(CGF.getLLVMContext());
  2851. if (!MayReturn) {
  2852. B.addAttribute(llvm::Attribute::NoReturn)
  2853. .addAttribute(llvm::Attribute::NoUnwind);
  2854. }
  2855. B.addUWTableAttr(llvm::UWTableKind::Default);
  2856. llvm::FunctionCallee Fn = CGF.CGM.CreateRuntimeFunction(
  2857. FnType, FnName,
  2858. llvm::AttributeList::get(CGF.getLLVMContext(),
  2859. llvm::AttributeList::FunctionIndex, B),
  2860. /*Local=*/true);
  2861. llvm::CallInst *HandlerCall = CGF.EmitNounwindRuntimeCall(Fn, FnArgs);
  2862. if (!MayReturn) {
  2863. HandlerCall->setDoesNotReturn();
  2864. CGF.Builder.CreateUnreachable();
  2865. } else {
  2866. CGF.Builder.CreateBr(ContBB);
  2867. }
  2868. }
  2869. void CodeGenFunction::EmitCheck(
  2870. ArrayRef<std::pair<llvm::Value *, SanitizerMask>> Checked,
  2871. SanitizerHandler CheckHandler, ArrayRef<llvm::Constant *> StaticArgs,
  2872. ArrayRef<llvm::Value *> DynamicArgs) {
  2873. assert(IsSanitizerScope);
  2874. assert(Checked.size() > 0);
  2875. assert(CheckHandler >= 0 &&
  2876. size_t(CheckHandler) < std::size(SanitizerHandlers));
  2877. const StringRef CheckName = SanitizerHandlers[CheckHandler].Name;
  2878. llvm::Value *FatalCond = nullptr;
  2879. llvm::Value *RecoverableCond = nullptr;
  2880. llvm::Value *TrapCond = nullptr;
  2881. for (int i = 0, n = Checked.size(); i < n; ++i) {
  2882. llvm::Value *Check = Checked[i].first;
  2883. // -fsanitize-trap= overrides -fsanitize-recover=.
  2884. llvm::Value *&Cond =
  2885. CGM.getCodeGenOpts().SanitizeTrap.has(Checked[i].second)
  2886. ? TrapCond
  2887. : CGM.getCodeGenOpts().SanitizeRecover.has(Checked[i].second)
  2888. ? RecoverableCond
  2889. : FatalCond;
  2890. Cond = Cond ? Builder.CreateAnd(Cond, Check) : Check;
  2891. }
  2892. if (TrapCond)
  2893. EmitTrapCheck(TrapCond, CheckHandler);
  2894. if (!FatalCond && !RecoverableCond)
  2895. return;
  2896. llvm::Value *JointCond;
  2897. if (FatalCond && RecoverableCond)
  2898. JointCond = Builder.CreateAnd(FatalCond, RecoverableCond);
  2899. else
  2900. JointCond = FatalCond ? FatalCond : RecoverableCond;
  2901. assert(JointCond);
  2902. CheckRecoverableKind RecoverKind = getRecoverableKind(Checked[0].second);
  2903. assert(SanOpts.has(Checked[0].second));
  2904. #ifndef NDEBUG
  2905. for (int i = 1, n = Checked.size(); i < n; ++i) {
  2906. assert(RecoverKind == getRecoverableKind(Checked[i].second) &&
  2907. "All recoverable kinds in a single check must be same!");
  2908. assert(SanOpts.has(Checked[i].second));
  2909. }
  2910. #endif
  2911. llvm::BasicBlock *Cont = createBasicBlock("cont");
  2912. llvm::BasicBlock *Handlers = createBasicBlock("handler." + CheckName);
  2913. llvm::Instruction *Branch = Builder.CreateCondBr(JointCond, Cont, Handlers);
  2914. // Give hint that we very much don't expect to execute the handler
  2915. // Value chosen to match UR_NONTAKEN_WEIGHT, see BranchProbabilityInfo.cpp
  2916. llvm::MDBuilder MDHelper(getLLVMContext());
  2917. llvm::MDNode *Node = MDHelper.createBranchWeights((1U << 20) - 1, 1);
  2918. Branch->setMetadata(llvm::LLVMContext::MD_prof, Node);
  2919. EmitBlock(Handlers);
  2920. // Handler functions take an i8* pointing to the (handler-specific) static
  2921. // information block, followed by a sequence of intptr_t arguments
  2922. // representing operand values.
  2923. SmallVector<llvm::Value *, 4> Args;
  2924. SmallVector<llvm::Type *, 4> ArgTypes;
  2925. if (!CGM.getCodeGenOpts().SanitizeMinimalRuntime) {
  2926. Args.reserve(DynamicArgs.size() + 1);
  2927. ArgTypes.reserve(DynamicArgs.size() + 1);
  2928. // Emit handler arguments and create handler function type.
  2929. if (!StaticArgs.empty()) {
  2930. llvm::Constant *Info = llvm::ConstantStruct::getAnon(StaticArgs);
  2931. auto *InfoPtr = new llvm::GlobalVariable(
  2932. CGM.getModule(), Info->getType(), false,
  2933. llvm::GlobalVariable::PrivateLinkage, Info, "", nullptr,
  2934. llvm::GlobalVariable::NotThreadLocal,
  2935. CGM.getDataLayout().getDefaultGlobalsAddressSpace());
  2936. InfoPtr->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
  2937. CGM.getSanitizerMetadata()->disableSanitizerForGlobal(InfoPtr);
  2938. Args.push_back(EmitCastToVoidPtr(InfoPtr));
  2939. ArgTypes.push_back(Args.back()->getType());
  2940. }
  2941. for (size_t i = 0, n = DynamicArgs.size(); i != n; ++i) {
  2942. Args.push_back(EmitCheckValue(DynamicArgs[i]));
  2943. ArgTypes.push_back(IntPtrTy);
  2944. }
  2945. }
  2946. llvm::FunctionType *FnType =
  2947. llvm::FunctionType::get(CGM.VoidTy, ArgTypes, false);
  2948. if (!FatalCond || !RecoverableCond) {
  2949. // Simple case: we need to generate a single handler call, either
  2950. // fatal, or non-fatal.
  2951. emitCheckHandlerCall(*this, FnType, Args, CheckHandler, RecoverKind,
  2952. (FatalCond != nullptr), Cont);
  2953. } else {
  2954. // Emit two handler calls: first one for set of unrecoverable checks,
  2955. // another one for recoverable.
  2956. llvm::BasicBlock *NonFatalHandlerBB =
  2957. createBasicBlock("non_fatal." + CheckName);
  2958. llvm::BasicBlock *FatalHandlerBB = createBasicBlock("fatal." + CheckName);
  2959. Builder.CreateCondBr(FatalCond, NonFatalHandlerBB, FatalHandlerBB);
  2960. EmitBlock(FatalHandlerBB);
  2961. emitCheckHandlerCall(*this, FnType, Args, CheckHandler, RecoverKind, true,
  2962. NonFatalHandlerBB);
  2963. EmitBlock(NonFatalHandlerBB);
  2964. emitCheckHandlerCall(*this, FnType, Args, CheckHandler, RecoverKind, false,
  2965. Cont);
  2966. }
  2967. EmitBlock(Cont);
  2968. }
  2969. void CodeGenFunction::EmitCfiSlowPathCheck(
  2970. SanitizerMask Kind, llvm::Value *Cond, llvm::ConstantInt *TypeId,
  2971. llvm::Value *Ptr, ArrayRef<llvm::Constant *> StaticArgs) {
  2972. llvm::BasicBlock *Cont = createBasicBlock("cfi.cont");
  2973. llvm::BasicBlock *CheckBB = createBasicBlock("cfi.slowpath");
  2974. llvm::BranchInst *BI = Builder.CreateCondBr(Cond, Cont, CheckBB);
  2975. llvm::MDBuilder MDHelper(getLLVMContext());
  2976. llvm::MDNode *Node = MDHelper.createBranchWeights((1U << 20) - 1, 1);
  2977. BI->setMetadata(llvm::LLVMContext::MD_prof, Node);
  2978. EmitBlock(CheckBB);
  2979. bool WithDiag = !CGM.getCodeGenOpts().SanitizeTrap.has(Kind);
  2980. llvm::CallInst *CheckCall;
  2981. llvm::FunctionCallee SlowPathFn;
  2982. if (WithDiag) {
  2983. llvm::Constant *Info = llvm::ConstantStruct::getAnon(StaticArgs);
  2984. auto *InfoPtr =
  2985. new llvm::GlobalVariable(CGM.getModule(), Info->getType(), false,
  2986. llvm::GlobalVariable::PrivateLinkage, Info);
  2987. InfoPtr->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
  2988. CGM.getSanitizerMetadata()->disableSanitizerForGlobal(InfoPtr);
  2989. SlowPathFn = CGM.getModule().getOrInsertFunction(
  2990. "__cfi_slowpath_diag",
  2991. llvm::FunctionType::get(VoidTy, {Int64Ty, Int8PtrTy, Int8PtrTy},
  2992. false));
  2993. CheckCall = Builder.CreateCall(
  2994. SlowPathFn, {TypeId, Ptr, Builder.CreateBitCast(InfoPtr, Int8PtrTy)});
  2995. } else {
  2996. SlowPathFn = CGM.getModule().getOrInsertFunction(
  2997. "__cfi_slowpath",
  2998. llvm::FunctionType::get(VoidTy, {Int64Ty, Int8PtrTy}, false));
  2999. CheckCall = Builder.CreateCall(SlowPathFn, {TypeId, Ptr});
  3000. }
  3001. CGM.setDSOLocal(
  3002. cast<llvm::GlobalValue>(SlowPathFn.getCallee()->stripPointerCasts()));
  3003. CheckCall->setDoesNotThrow();
  3004. EmitBlock(Cont);
  3005. }
  3006. // Emit a stub for __cfi_check function so that the linker knows about this
  3007. // symbol in LTO mode.
  3008. void CodeGenFunction::EmitCfiCheckStub() {
  3009. llvm::Module *M = &CGM.getModule();
  3010. auto &Ctx = M->getContext();
  3011. llvm::Function *F = llvm::Function::Create(
  3012. llvm::FunctionType::get(VoidTy, {Int64Ty, Int8PtrTy, Int8PtrTy}, false),
  3013. llvm::GlobalValue::WeakAnyLinkage, "__cfi_check", M);
  3014. CGM.setDSOLocal(F);
  3015. llvm::BasicBlock *BB = llvm::BasicBlock::Create(Ctx, "entry", F);
  3016. // FIXME: consider emitting an intrinsic call like
  3017. // call void @llvm.cfi_check(i64 %0, i8* %1, i8* %2)
  3018. // which can be lowered in CrossDSOCFI pass to the actual contents of
  3019. // __cfi_check. This would allow inlining of __cfi_check calls.
  3020. llvm::CallInst::Create(
  3021. llvm::Intrinsic::getDeclaration(M, llvm::Intrinsic::trap), "", BB);
  3022. llvm::ReturnInst::Create(Ctx, nullptr, BB);
  3023. }
  3024. // This function is basically a switch over the CFI failure kind, which is
  3025. // extracted from CFICheckFailData (1st function argument). Each case is either
  3026. // llvm.trap or a call to one of the two runtime handlers, based on
  3027. // -fsanitize-trap and -fsanitize-recover settings. Default case (invalid
  3028. // failure kind) traps, but this should really never happen. CFICheckFailData
  3029. // can be nullptr if the calling module has -fsanitize-trap behavior for this
  3030. // check kind; in this case __cfi_check_fail traps as well.
  3031. void CodeGenFunction::EmitCfiCheckFail() {
  3032. SanitizerScope SanScope(this);
  3033. FunctionArgList Args;
  3034. ImplicitParamDecl ArgData(getContext(), getContext().VoidPtrTy,
  3035. ImplicitParamDecl::Other);
  3036. ImplicitParamDecl ArgAddr(getContext(), getContext().VoidPtrTy,
  3037. ImplicitParamDecl::Other);
  3038. Args.push_back(&ArgData);
  3039. Args.push_back(&ArgAddr);
  3040. const CGFunctionInfo &FI =
  3041. CGM.getTypes().arrangeBuiltinFunctionDeclaration(getContext().VoidTy, Args);
  3042. llvm::Function *F = llvm::Function::Create(
  3043. llvm::FunctionType::get(VoidTy, {VoidPtrTy, VoidPtrTy}, false),
  3044. llvm::GlobalValue::WeakODRLinkage, "__cfi_check_fail", &CGM.getModule());
  3045. CGM.SetLLVMFunctionAttributes(GlobalDecl(), FI, F, /*IsThunk=*/false);
  3046. CGM.SetLLVMFunctionAttributesForDefinition(nullptr, F);
  3047. F->setVisibility(llvm::GlobalValue::HiddenVisibility);
  3048. StartFunction(GlobalDecl(), CGM.getContext().VoidTy, F, FI, Args,
  3049. SourceLocation());
  3050. // This function is not affected by NoSanitizeList. This function does
  3051. // not have a source location, but "src:*" would still apply. Revert any
  3052. // changes to SanOpts made in StartFunction.
  3053. SanOpts = CGM.getLangOpts().Sanitize;
  3054. llvm::Value *Data =
  3055. EmitLoadOfScalar(GetAddrOfLocalVar(&ArgData), /*Volatile=*/false,
  3056. CGM.getContext().VoidPtrTy, ArgData.getLocation());
  3057. llvm::Value *Addr =
  3058. EmitLoadOfScalar(GetAddrOfLocalVar(&ArgAddr), /*Volatile=*/false,
  3059. CGM.getContext().VoidPtrTy, ArgAddr.getLocation());
  3060. // Data == nullptr means the calling module has trap behaviour for this check.
  3061. llvm::Value *DataIsNotNullPtr =
  3062. Builder.CreateICmpNE(Data, llvm::ConstantPointerNull::get(Int8PtrTy));
  3063. EmitTrapCheck(DataIsNotNullPtr, SanitizerHandler::CFICheckFail);
  3064. llvm::StructType *SourceLocationTy =
  3065. llvm::StructType::get(VoidPtrTy, Int32Ty, Int32Ty);
  3066. llvm::StructType *CfiCheckFailDataTy =
  3067. llvm::StructType::get(Int8Ty, SourceLocationTy, VoidPtrTy);
  3068. llvm::Value *V = Builder.CreateConstGEP2_32(
  3069. CfiCheckFailDataTy,
  3070. Builder.CreatePointerCast(Data, CfiCheckFailDataTy->getPointerTo(0)), 0,
  3071. 0);
  3072. Address CheckKindAddr(V, Int8Ty, getIntAlign());
  3073. llvm::Value *CheckKind = Builder.CreateLoad(CheckKindAddr);
  3074. llvm::Value *AllVtables = llvm::MetadataAsValue::get(
  3075. CGM.getLLVMContext(),
  3076. llvm::MDString::get(CGM.getLLVMContext(), "all-vtables"));
  3077. llvm::Value *ValidVtable = Builder.CreateZExt(
  3078. Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::type_test),
  3079. {Addr, AllVtables}),
  3080. IntPtrTy);
  3081. const std::pair<int, SanitizerMask> CheckKinds[] = {
  3082. {CFITCK_VCall, SanitizerKind::CFIVCall},
  3083. {CFITCK_NVCall, SanitizerKind::CFINVCall},
  3084. {CFITCK_DerivedCast, SanitizerKind::CFIDerivedCast},
  3085. {CFITCK_UnrelatedCast, SanitizerKind::CFIUnrelatedCast},
  3086. {CFITCK_ICall, SanitizerKind::CFIICall}};
  3087. SmallVector<std::pair<llvm::Value *, SanitizerMask>, 5> Checks;
  3088. for (auto CheckKindMaskPair : CheckKinds) {
  3089. int Kind = CheckKindMaskPair.first;
  3090. SanitizerMask Mask = CheckKindMaskPair.second;
  3091. llvm::Value *Cond =
  3092. Builder.CreateICmpNE(CheckKind, llvm::ConstantInt::get(Int8Ty, Kind));
  3093. if (CGM.getLangOpts().Sanitize.has(Mask))
  3094. EmitCheck(std::make_pair(Cond, Mask), SanitizerHandler::CFICheckFail, {},
  3095. {Data, Addr, ValidVtable});
  3096. else
  3097. EmitTrapCheck(Cond, SanitizerHandler::CFICheckFail);
  3098. }
  3099. FinishFunction();
  3100. // The only reference to this function will be created during LTO link.
  3101. // Make sure it survives until then.
  3102. CGM.addUsedGlobal(F);
  3103. }
  3104. void CodeGenFunction::EmitUnreachable(SourceLocation Loc) {
  3105. if (SanOpts.has(SanitizerKind::Unreachable)) {
  3106. SanitizerScope SanScope(this);
  3107. EmitCheck(std::make_pair(static_cast<llvm::Value *>(Builder.getFalse()),
  3108. SanitizerKind::Unreachable),
  3109. SanitizerHandler::BuiltinUnreachable,
  3110. EmitCheckSourceLocation(Loc), std::nullopt);
  3111. }
  3112. Builder.CreateUnreachable();
  3113. }
  3114. void CodeGenFunction::EmitTrapCheck(llvm::Value *Checked,
  3115. SanitizerHandler CheckHandlerID) {
  3116. llvm::BasicBlock *Cont = createBasicBlock("cont");
  3117. // If we're optimizing, collapse all calls to trap down to just one per
  3118. // check-type per function to save on code size.
  3119. if (TrapBBs.size() <= CheckHandlerID)
  3120. TrapBBs.resize(CheckHandlerID + 1);
  3121. llvm::BasicBlock *&TrapBB = TrapBBs[CheckHandlerID];
  3122. if (!CGM.getCodeGenOpts().OptimizationLevel || !TrapBB ||
  3123. (CurCodeDecl && CurCodeDecl->hasAttr<OptimizeNoneAttr>())) {
  3124. TrapBB = createBasicBlock("trap");
  3125. Builder.CreateCondBr(Checked, Cont, TrapBB);
  3126. EmitBlock(TrapBB);
  3127. llvm::CallInst *TrapCall =
  3128. Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::ubsantrap),
  3129. llvm::ConstantInt::get(CGM.Int8Ty, CheckHandlerID));
  3130. if (!CGM.getCodeGenOpts().TrapFuncName.empty()) {
  3131. auto A = llvm::Attribute::get(getLLVMContext(), "trap-func-name",
  3132. CGM.getCodeGenOpts().TrapFuncName);
  3133. TrapCall->addFnAttr(A);
  3134. }
  3135. TrapCall->setDoesNotReturn();
  3136. TrapCall->setDoesNotThrow();
  3137. Builder.CreateUnreachable();
  3138. } else {
  3139. auto Call = TrapBB->begin();
  3140. assert(isa<llvm::CallInst>(Call) && "Expected call in trap BB");
  3141. Call->applyMergedLocation(Call->getDebugLoc(),
  3142. Builder.getCurrentDebugLocation());
  3143. Builder.CreateCondBr(Checked, Cont, TrapBB);
  3144. }
  3145. EmitBlock(Cont);
  3146. }
  3147. llvm::CallInst *CodeGenFunction::EmitTrapCall(llvm::Intrinsic::ID IntrID) {
  3148. llvm::CallInst *TrapCall =
  3149. Builder.CreateCall(CGM.getIntrinsic(IntrID));
  3150. if (!CGM.getCodeGenOpts().TrapFuncName.empty()) {
  3151. auto A = llvm::Attribute::get(getLLVMContext(), "trap-func-name",
  3152. CGM.getCodeGenOpts().TrapFuncName);
  3153. TrapCall->addFnAttr(A);
  3154. }
  3155. return TrapCall;
  3156. }
  3157. Address CodeGenFunction::EmitArrayToPointerDecay(const Expr *E,
  3158. LValueBaseInfo *BaseInfo,
  3159. TBAAAccessInfo *TBAAInfo) {
  3160. assert(E->getType()->isArrayType() &&
  3161. "Array to pointer decay must have array source type!");
  3162. // Expressions of array type can't be bitfields or vector elements.
  3163. LValue LV = EmitLValue(E);
  3164. Address Addr = LV.getAddress(*this);
  3165. // If the array type was an incomplete type, we need to make sure
  3166. // the decay ends up being the right type.
  3167. llvm::Type *NewTy = ConvertType(E->getType());
  3168. Addr = Builder.CreateElementBitCast(Addr, NewTy);
  3169. // Note that VLA pointers are always decayed, so we don't need to do
  3170. // anything here.
  3171. if (!E->getType()->isVariableArrayType()) {
  3172. assert(isa<llvm::ArrayType>(Addr.getElementType()) &&
  3173. "Expected pointer to array");
  3174. Addr = Builder.CreateConstArrayGEP(Addr, 0, "arraydecay");
  3175. }
  3176. // The result of this decay conversion points to an array element within the
  3177. // base lvalue. However, since TBAA currently does not support representing
  3178. // accesses to elements of member arrays, we conservatively represent accesses
  3179. // to the pointee object as if it had no any base lvalue specified.
  3180. // TODO: Support TBAA for member arrays.
  3181. QualType EltType = E->getType()->castAsArrayTypeUnsafe()->getElementType();
  3182. if (BaseInfo) *BaseInfo = LV.getBaseInfo();
  3183. if (TBAAInfo) *TBAAInfo = CGM.getTBAAAccessInfo(EltType);
  3184. return Builder.CreateElementBitCast(Addr, ConvertTypeForMem(EltType));
  3185. }
  3186. /// isSimpleArrayDecayOperand - If the specified expr is a simple decay from an
  3187. /// array to pointer, return the array subexpression.
  3188. static const Expr *isSimpleArrayDecayOperand(const Expr *E) {
  3189. // If this isn't just an array->pointer decay, bail out.
  3190. const auto *CE = dyn_cast<CastExpr>(E);
  3191. if (!CE || CE->getCastKind() != CK_ArrayToPointerDecay)
  3192. return nullptr;
  3193. // If this is a decay from variable width array, bail out.
  3194. const Expr *SubExpr = CE->getSubExpr();
  3195. if (SubExpr->getType()->isVariableArrayType())
  3196. return nullptr;
  3197. return SubExpr;
  3198. }
  3199. static llvm::Value *emitArraySubscriptGEP(CodeGenFunction &CGF,
  3200. llvm::Type *elemType,
  3201. llvm::Value *ptr,
  3202. ArrayRef<llvm::Value*> indices,
  3203. bool inbounds,
  3204. bool signedIndices,
  3205. SourceLocation loc,
  3206. const llvm::Twine &name = "arrayidx") {
  3207. if (inbounds) {
  3208. return CGF.EmitCheckedInBoundsGEP(elemType, ptr, indices, signedIndices,
  3209. CodeGenFunction::NotSubtraction, loc,
  3210. name);
  3211. } else {
  3212. return CGF.Builder.CreateGEP(elemType, ptr, indices, name);
  3213. }
  3214. }
  3215. static CharUnits getArrayElementAlign(CharUnits arrayAlign,
  3216. llvm::Value *idx,
  3217. CharUnits eltSize) {
  3218. // If we have a constant index, we can use the exact offset of the
  3219. // element we're accessing.
  3220. if (auto constantIdx = dyn_cast<llvm::ConstantInt>(idx)) {
  3221. CharUnits offset = constantIdx->getZExtValue() * eltSize;
  3222. return arrayAlign.alignmentAtOffset(offset);
  3223. // Otherwise, use the worst-case alignment for any element.
  3224. } else {
  3225. return arrayAlign.alignmentOfArrayElement(eltSize);
  3226. }
  3227. }
  3228. static QualType getFixedSizeElementType(const ASTContext &ctx,
  3229. const VariableArrayType *vla) {
  3230. QualType eltType;
  3231. do {
  3232. eltType = vla->getElementType();
  3233. } while ((vla = ctx.getAsVariableArrayType(eltType)));
  3234. return eltType;
  3235. }
  3236. /// Given an array base, check whether its member access belongs to a record
  3237. /// with preserve_access_index attribute or not.
  3238. static bool IsPreserveAIArrayBase(CodeGenFunction &CGF, const Expr *ArrayBase) {
  3239. if (!ArrayBase || !CGF.getDebugInfo())
  3240. return false;
  3241. // Only support base as either a MemberExpr or DeclRefExpr.
  3242. // DeclRefExpr to cover cases like:
  3243. // struct s { int a; int b[10]; };
  3244. // struct s *p;
  3245. // p[1].a
  3246. // p[1] will generate a DeclRefExpr and p[1].a is a MemberExpr.
  3247. // p->b[5] is a MemberExpr example.
  3248. const Expr *E = ArrayBase->IgnoreImpCasts();
  3249. if (const auto *ME = dyn_cast<MemberExpr>(E))
  3250. return ME->getMemberDecl()->hasAttr<BPFPreserveAccessIndexAttr>();
  3251. if (const auto *DRE = dyn_cast<DeclRefExpr>(E)) {
  3252. const auto *VarDef = dyn_cast<VarDecl>(DRE->getDecl());
  3253. if (!VarDef)
  3254. return false;
  3255. const auto *PtrT = VarDef->getType()->getAs<PointerType>();
  3256. if (!PtrT)
  3257. return false;
  3258. const auto *PointeeT = PtrT->getPointeeType()
  3259. ->getUnqualifiedDesugaredType();
  3260. if (const auto *RecT = dyn_cast<RecordType>(PointeeT))
  3261. return RecT->getDecl()->hasAttr<BPFPreserveAccessIndexAttr>();
  3262. return false;
  3263. }
  3264. return false;
  3265. }
  3266. static Address emitArraySubscriptGEP(CodeGenFunction &CGF, Address addr,
  3267. ArrayRef<llvm::Value *> indices,
  3268. QualType eltType, bool inbounds,
  3269. bool signedIndices, SourceLocation loc,
  3270. QualType *arrayType = nullptr,
  3271. const Expr *Base = nullptr,
  3272. const llvm::Twine &name = "arrayidx") {
  3273. // All the indices except that last must be zero.
  3274. #ifndef NDEBUG
  3275. for (auto *idx : indices.drop_back())
  3276. assert(isa<llvm::ConstantInt>(idx) &&
  3277. cast<llvm::ConstantInt>(idx)->isZero());
  3278. #endif
  3279. // Determine the element size of the statically-sized base. This is
  3280. // the thing that the indices are expressed in terms of.
  3281. if (auto vla = CGF.getContext().getAsVariableArrayType(eltType)) {
  3282. eltType = getFixedSizeElementType(CGF.getContext(), vla);
  3283. }
  3284. // We can use that to compute the best alignment of the element.
  3285. CharUnits eltSize = CGF.getContext().getTypeSizeInChars(eltType);
  3286. CharUnits eltAlign =
  3287. getArrayElementAlign(addr.getAlignment(), indices.back(), eltSize);
  3288. llvm::Value *eltPtr;
  3289. auto LastIndex = dyn_cast<llvm::ConstantInt>(indices.back());
  3290. if (!LastIndex ||
  3291. (!CGF.IsInPreservedAIRegion && !IsPreserveAIArrayBase(CGF, Base))) {
  3292. eltPtr = emitArraySubscriptGEP(
  3293. CGF, addr.getElementType(), addr.getPointer(), indices, inbounds,
  3294. signedIndices, loc, name);
  3295. } else {
  3296. // Remember the original array subscript for bpf target
  3297. unsigned idx = LastIndex->getZExtValue();
  3298. llvm::DIType *DbgInfo = nullptr;
  3299. if (arrayType)
  3300. DbgInfo = CGF.getDebugInfo()->getOrCreateStandaloneType(*arrayType, loc);
  3301. eltPtr = CGF.Builder.CreatePreserveArrayAccessIndex(addr.getElementType(),
  3302. addr.getPointer(),
  3303. indices.size() - 1,
  3304. idx, DbgInfo);
  3305. }
  3306. return Address(eltPtr, CGF.ConvertTypeForMem(eltType), eltAlign);
  3307. }
  3308. LValue CodeGenFunction::EmitArraySubscriptExpr(const ArraySubscriptExpr *E,
  3309. bool Accessed) {
  3310. // The index must always be an integer, which is not an aggregate. Emit it
  3311. // in lexical order (this complexity is, sadly, required by C++17).
  3312. llvm::Value *IdxPre =
  3313. (E->getLHS() == E->getIdx()) ? EmitScalarExpr(E->getIdx()) : nullptr;
  3314. bool SignedIndices = false;
  3315. auto EmitIdxAfterBase = [&, IdxPre](bool Promote) -> llvm::Value * {
  3316. auto *Idx = IdxPre;
  3317. if (E->getLHS() != E->getIdx()) {
  3318. assert(E->getRHS() == E->getIdx() && "index was neither LHS nor RHS");
  3319. Idx = EmitScalarExpr(E->getIdx());
  3320. }
  3321. QualType IdxTy = E->getIdx()->getType();
  3322. bool IdxSigned = IdxTy->isSignedIntegerOrEnumerationType();
  3323. SignedIndices |= IdxSigned;
  3324. if (SanOpts.has(SanitizerKind::ArrayBounds))
  3325. EmitBoundsCheck(E, E->getBase(), Idx, IdxTy, Accessed);
  3326. // Extend or truncate the index type to 32 or 64-bits.
  3327. if (Promote && Idx->getType() != IntPtrTy)
  3328. Idx = Builder.CreateIntCast(Idx, IntPtrTy, IdxSigned, "idxprom");
  3329. return Idx;
  3330. };
  3331. IdxPre = nullptr;
  3332. // If the base is a vector type, then we are forming a vector element lvalue
  3333. // with this subscript.
  3334. if (E->getBase()->getType()->isVectorType() &&
  3335. !isa<ExtVectorElementExpr>(E->getBase())) {
  3336. // Emit the vector as an lvalue to get its address.
  3337. LValue LHS = EmitLValue(E->getBase());
  3338. auto *Idx = EmitIdxAfterBase(/*Promote*/false);
  3339. assert(LHS.isSimple() && "Can only subscript lvalue vectors here!");
  3340. return LValue::MakeVectorElt(LHS.getAddress(*this), Idx,
  3341. E->getBase()->getType(), LHS.getBaseInfo(),
  3342. TBAAAccessInfo());
  3343. }
  3344. // All the other cases basically behave like simple offsetting.
  3345. // Handle the extvector case we ignored above.
  3346. if (isa<ExtVectorElementExpr>(E->getBase())) {
  3347. LValue LV = EmitLValue(E->getBase());
  3348. auto *Idx = EmitIdxAfterBase(/*Promote*/true);
  3349. Address Addr = EmitExtVectorElementLValue(LV);
  3350. QualType EltType = LV.getType()->castAs<VectorType>()->getElementType();
  3351. Addr = emitArraySubscriptGEP(*this, Addr, Idx, EltType, /*inbounds*/ true,
  3352. SignedIndices, E->getExprLoc());
  3353. return MakeAddrLValue(Addr, EltType, LV.getBaseInfo(),
  3354. CGM.getTBAAInfoForSubobject(LV, EltType));
  3355. }
  3356. LValueBaseInfo EltBaseInfo;
  3357. TBAAAccessInfo EltTBAAInfo;
  3358. Address Addr = Address::invalid();
  3359. if (const VariableArrayType *vla =
  3360. getContext().getAsVariableArrayType(E->getType())) {
  3361. // The base must be a pointer, which is not an aggregate. Emit
  3362. // it. It needs to be emitted first in case it's what captures
  3363. // the VLA bounds.
  3364. Addr = EmitPointerWithAlignment(E->getBase(), &EltBaseInfo, &EltTBAAInfo);
  3365. auto *Idx = EmitIdxAfterBase(/*Promote*/true);
  3366. // The element count here is the total number of non-VLA elements.
  3367. llvm::Value *numElements = getVLASize(vla).NumElts;
  3368. // Effectively, the multiply by the VLA size is part of the GEP.
  3369. // GEP indexes are signed, and scaling an index isn't permitted to
  3370. // signed-overflow, so we use the same semantics for our explicit
  3371. // multiply. We suppress this if overflow is not undefined behavior.
  3372. if (getLangOpts().isSignedOverflowDefined()) {
  3373. Idx = Builder.CreateMul(Idx, numElements);
  3374. } else {
  3375. Idx = Builder.CreateNSWMul(Idx, numElements);
  3376. }
  3377. Addr = emitArraySubscriptGEP(*this, Addr, Idx, vla->getElementType(),
  3378. !getLangOpts().isSignedOverflowDefined(),
  3379. SignedIndices, E->getExprLoc());
  3380. } else if (const ObjCObjectType *OIT = E->getType()->getAs<ObjCObjectType>()){
  3381. // Indexing over an interface, as in "NSString *P; P[4];"
  3382. // Emit the base pointer.
  3383. Addr = EmitPointerWithAlignment(E->getBase(), &EltBaseInfo, &EltTBAAInfo);
  3384. auto *Idx = EmitIdxAfterBase(/*Promote*/true);
  3385. CharUnits InterfaceSize = getContext().getTypeSizeInChars(OIT);
  3386. llvm::Value *InterfaceSizeVal =
  3387. llvm::ConstantInt::get(Idx->getType(), InterfaceSize.getQuantity());
  3388. llvm::Value *ScaledIdx = Builder.CreateMul(Idx, InterfaceSizeVal);
  3389. // We don't necessarily build correct LLVM struct types for ObjC
  3390. // interfaces, so we can't rely on GEP to do this scaling
  3391. // correctly, so we need to cast to i8*. FIXME: is this actually
  3392. // true? A lot of other things in the fragile ABI would break...
  3393. llvm::Type *OrigBaseElemTy = Addr.getElementType();
  3394. Addr = Builder.CreateElementBitCast(Addr, Int8Ty);
  3395. // Do the GEP.
  3396. CharUnits EltAlign =
  3397. getArrayElementAlign(Addr.getAlignment(), Idx, InterfaceSize);
  3398. llvm::Value *EltPtr =
  3399. emitArraySubscriptGEP(*this, Addr.getElementType(), Addr.getPointer(),
  3400. ScaledIdx, false, SignedIndices, E->getExprLoc());
  3401. Addr = Address(EltPtr, Addr.getElementType(), EltAlign);
  3402. // Cast back.
  3403. Addr = Builder.CreateElementBitCast(Addr, OrigBaseElemTy);
  3404. } else if (const Expr *Array = isSimpleArrayDecayOperand(E->getBase())) {
  3405. // If this is A[i] where A is an array, the frontend will have decayed the
  3406. // base to be a ArrayToPointerDecay implicit cast. While correct, it is
  3407. // inefficient at -O0 to emit a "gep A, 0, 0" when codegen'ing it, then a
  3408. // "gep x, i" here. Emit one "gep A, 0, i".
  3409. assert(Array->getType()->isArrayType() &&
  3410. "Array to pointer decay must have array source type!");
  3411. LValue ArrayLV;
  3412. // For simple multidimensional array indexing, set the 'accessed' flag for
  3413. // better bounds-checking of the base expression.
  3414. if (const auto *ASE = dyn_cast<ArraySubscriptExpr>(Array))
  3415. ArrayLV = EmitArraySubscriptExpr(ASE, /*Accessed*/ true);
  3416. else
  3417. ArrayLV = EmitLValue(Array);
  3418. auto *Idx = EmitIdxAfterBase(/*Promote*/true);
  3419. // Propagate the alignment from the array itself to the result.
  3420. QualType arrayType = Array->getType();
  3421. Addr = emitArraySubscriptGEP(
  3422. *this, ArrayLV.getAddress(*this), {CGM.getSize(CharUnits::Zero()), Idx},
  3423. E->getType(), !getLangOpts().isSignedOverflowDefined(), SignedIndices,
  3424. E->getExprLoc(), &arrayType, E->getBase());
  3425. EltBaseInfo = ArrayLV.getBaseInfo();
  3426. EltTBAAInfo = CGM.getTBAAInfoForSubobject(ArrayLV, E->getType());
  3427. } else {
  3428. // The base must be a pointer; emit it with an estimate of its alignment.
  3429. Addr = EmitPointerWithAlignment(E->getBase(), &EltBaseInfo, &EltTBAAInfo);
  3430. auto *Idx = EmitIdxAfterBase(/*Promote*/true);
  3431. QualType ptrType = E->getBase()->getType();
  3432. Addr = emitArraySubscriptGEP(*this, Addr, Idx, E->getType(),
  3433. !getLangOpts().isSignedOverflowDefined(),
  3434. SignedIndices, E->getExprLoc(), &ptrType,
  3435. E->getBase());
  3436. }
  3437. LValue LV = MakeAddrLValue(Addr, E->getType(), EltBaseInfo, EltTBAAInfo);
  3438. if (getLangOpts().ObjC &&
  3439. getLangOpts().getGC() != LangOptions::NonGC) {
  3440. LV.setNonGC(!E->isOBJCGCCandidate(getContext()));
  3441. setObjCGCLValueClass(getContext(), E, LV);
  3442. }
  3443. return LV;
  3444. }
  3445. LValue CodeGenFunction::EmitMatrixSubscriptExpr(const MatrixSubscriptExpr *E) {
  3446. assert(
  3447. !E->isIncomplete() &&
  3448. "incomplete matrix subscript expressions should be rejected during Sema");
  3449. LValue Base = EmitLValue(E->getBase());
  3450. llvm::Value *RowIdx = EmitScalarExpr(E->getRowIdx());
  3451. llvm::Value *ColIdx = EmitScalarExpr(E->getColumnIdx());
  3452. llvm::Value *NumRows = Builder.getIntN(
  3453. RowIdx->getType()->getScalarSizeInBits(),
  3454. E->getBase()->getType()->castAs<ConstantMatrixType>()->getNumRows());
  3455. llvm::Value *FinalIdx =
  3456. Builder.CreateAdd(Builder.CreateMul(ColIdx, NumRows), RowIdx);
  3457. return LValue::MakeMatrixElt(
  3458. MaybeConvertMatrixAddress(Base.getAddress(*this), *this), FinalIdx,
  3459. E->getBase()->getType(), Base.getBaseInfo(), TBAAAccessInfo());
  3460. }
  3461. static Address emitOMPArraySectionBase(CodeGenFunction &CGF, const Expr *Base,
  3462. LValueBaseInfo &BaseInfo,
  3463. TBAAAccessInfo &TBAAInfo,
  3464. QualType BaseTy, QualType ElTy,
  3465. bool IsLowerBound) {
  3466. LValue BaseLVal;
  3467. if (auto *ASE = dyn_cast<OMPArraySectionExpr>(Base->IgnoreParenImpCasts())) {
  3468. BaseLVal = CGF.EmitOMPArraySectionExpr(ASE, IsLowerBound);
  3469. if (BaseTy->isArrayType()) {
  3470. Address Addr = BaseLVal.getAddress(CGF);
  3471. BaseInfo = BaseLVal.getBaseInfo();
  3472. // If the array type was an incomplete type, we need to make sure
  3473. // the decay ends up being the right type.
  3474. llvm::Type *NewTy = CGF.ConvertType(BaseTy);
  3475. Addr = CGF.Builder.CreateElementBitCast(Addr, NewTy);
  3476. // Note that VLA pointers are always decayed, so we don't need to do
  3477. // anything here.
  3478. if (!BaseTy->isVariableArrayType()) {
  3479. assert(isa<llvm::ArrayType>(Addr.getElementType()) &&
  3480. "Expected pointer to array");
  3481. Addr = CGF.Builder.CreateConstArrayGEP(Addr, 0, "arraydecay");
  3482. }
  3483. return CGF.Builder.CreateElementBitCast(Addr,
  3484. CGF.ConvertTypeForMem(ElTy));
  3485. }
  3486. LValueBaseInfo TypeBaseInfo;
  3487. TBAAAccessInfo TypeTBAAInfo;
  3488. CharUnits Align =
  3489. CGF.CGM.getNaturalTypeAlignment(ElTy, &TypeBaseInfo, &TypeTBAAInfo);
  3490. BaseInfo.mergeForCast(TypeBaseInfo);
  3491. TBAAInfo = CGF.CGM.mergeTBAAInfoForCast(TBAAInfo, TypeTBAAInfo);
  3492. return Address(CGF.Builder.CreateLoad(BaseLVal.getAddress(CGF)),
  3493. CGF.ConvertTypeForMem(ElTy), Align);
  3494. }
  3495. return CGF.EmitPointerWithAlignment(Base, &BaseInfo, &TBAAInfo);
  3496. }
  3497. LValue CodeGenFunction::EmitOMPArraySectionExpr(const OMPArraySectionExpr *E,
  3498. bool IsLowerBound) {
  3499. QualType BaseTy = OMPArraySectionExpr::getBaseOriginalType(E->getBase());
  3500. QualType ResultExprTy;
  3501. if (auto *AT = getContext().getAsArrayType(BaseTy))
  3502. ResultExprTy = AT->getElementType();
  3503. else
  3504. ResultExprTy = BaseTy->getPointeeType();
  3505. llvm::Value *Idx = nullptr;
  3506. if (IsLowerBound || E->getColonLocFirst().isInvalid()) {
  3507. // Requesting lower bound or upper bound, but without provided length and
  3508. // without ':' symbol for the default length -> length = 1.
  3509. // Idx = LowerBound ?: 0;
  3510. if (auto *LowerBound = E->getLowerBound()) {
  3511. Idx = Builder.CreateIntCast(
  3512. EmitScalarExpr(LowerBound), IntPtrTy,
  3513. LowerBound->getType()->hasSignedIntegerRepresentation());
  3514. } else
  3515. Idx = llvm::ConstantInt::getNullValue(IntPtrTy);
  3516. } else {
  3517. // Try to emit length or lower bound as constant. If this is possible, 1
  3518. // is subtracted from constant length or lower bound. Otherwise, emit LLVM
  3519. // IR (LB + Len) - 1.
  3520. auto &C = CGM.getContext();
  3521. auto *Length = E->getLength();
  3522. llvm::APSInt ConstLength;
  3523. if (Length) {
  3524. // Idx = LowerBound + Length - 1;
  3525. if (std::optional<llvm::APSInt> CL = Length->getIntegerConstantExpr(C)) {
  3526. ConstLength = CL->zextOrTrunc(PointerWidthInBits);
  3527. Length = nullptr;
  3528. }
  3529. auto *LowerBound = E->getLowerBound();
  3530. llvm::APSInt ConstLowerBound(PointerWidthInBits, /*isUnsigned=*/false);
  3531. if (LowerBound) {
  3532. if (std::optional<llvm::APSInt> LB =
  3533. LowerBound->getIntegerConstantExpr(C)) {
  3534. ConstLowerBound = LB->zextOrTrunc(PointerWidthInBits);
  3535. LowerBound = nullptr;
  3536. }
  3537. }
  3538. if (!Length)
  3539. --ConstLength;
  3540. else if (!LowerBound)
  3541. --ConstLowerBound;
  3542. if (Length || LowerBound) {
  3543. auto *LowerBoundVal =
  3544. LowerBound
  3545. ? Builder.CreateIntCast(
  3546. EmitScalarExpr(LowerBound), IntPtrTy,
  3547. LowerBound->getType()->hasSignedIntegerRepresentation())
  3548. : llvm::ConstantInt::get(IntPtrTy, ConstLowerBound);
  3549. auto *LengthVal =
  3550. Length
  3551. ? Builder.CreateIntCast(
  3552. EmitScalarExpr(Length), IntPtrTy,
  3553. Length->getType()->hasSignedIntegerRepresentation())
  3554. : llvm::ConstantInt::get(IntPtrTy, ConstLength);
  3555. Idx = Builder.CreateAdd(LowerBoundVal, LengthVal, "lb_add_len",
  3556. /*HasNUW=*/false,
  3557. !getLangOpts().isSignedOverflowDefined());
  3558. if (Length && LowerBound) {
  3559. Idx = Builder.CreateSub(
  3560. Idx, llvm::ConstantInt::get(IntPtrTy, /*V=*/1), "idx_sub_1",
  3561. /*HasNUW=*/false, !getLangOpts().isSignedOverflowDefined());
  3562. }
  3563. } else
  3564. Idx = llvm::ConstantInt::get(IntPtrTy, ConstLength + ConstLowerBound);
  3565. } else {
  3566. // Idx = ArraySize - 1;
  3567. QualType ArrayTy = BaseTy->isPointerType()
  3568. ? E->getBase()->IgnoreParenImpCasts()->getType()
  3569. : BaseTy;
  3570. if (auto *VAT = C.getAsVariableArrayType(ArrayTy)) {
  3571. Length = VAT->getSizeExpr();
  3572. if (std::optional<llvm::APSInt> L = Length->getIntegerConstantExpr(C)) {
  3573. ConstLength = *L;
  3574. Length = nullptr;
  3575. }
  3576. } else {
  3577. auto *CAT = C.getAsConstantArrayType(ArrayTy);
  3578. ConstLength = CAT->getSize();
  3579. }
  3580. if (Length) {
  3581. auto *LengthVal = Builder.CreateIntCast(
  3582. EmitScalarExpr(Length), IntPtrTy,
  3583. Length->getType()->hasSignedIntegerRepresentation());
  3584. Idx = Builder.CreateSub(
  3585. LengthVal, llvm::ConstantInt::get(IntPtrTy, /*V=*/1), "len_sub_1",
  3586. /*HasNUW=*/false, !getLangOpts().isSignedOverflowDefined());
  3587. } else {
  3588. ConstLength = ConstLength.zextOrTrunc(PointerWidthInBits);
  3589. --ConstLength;
  3590. Idx = llvm::ConstantInt::get(IntPtrTy, ConstLength);
  3591. }
  3592. }
  3593. }
  3594. assert(Idx);
  3595. Address EltPtr = Address::invalid();
  3596. LValueBaseInfo BaseInfo;
  3597. TBAAAccessInfo TBAAInfo;
  3598. if (auto *VLA = getContext().getAsVariableArrayType(ResultExprTy)) {
  3599. // The base must be a pointer, which is not an aggregate. Emit
  3600. // it. It needs to be emitted first in case it's what captures
  3601. // the VLA bounds.
  3602. Address Base =
  3603. emitOMPArraySectionBase(*this, E->getBase(), BaseInfo, TBAAInfo,
  3604. BaseTy, VLA->getElementType(), IsLowerBound);
  3605. // The element count here is the total number of non-VLA elements.
  3606. llvm::Value *NumElements = getVLASize(VLA).NumElts;
  3607. // Effectively, the multiply by the VLA size is part of the GEP.
  3608. // GEP indexes are signed, and scaling an index isn't permitted to
  3609. // signed-overflow, so we use the same semantics for our explicit
  3610. // multiply. We suppress this if overflow is not undefined behavior.
  3611. if (getLangOpts().isSignedOverflowDefined())
  3612. Idx = Builder.CreateMul(Idx, NumElements);
  3613. else
  3614. Idx = Builder.CreateNSWMul(Idx, NumElements);
  3615. EltPtr = emitArraySubscriptGEP(*this, Base, Idx, VLA->getElementType(),
  3616. !getLangOpts().isSignedOverflowDefined(),
  3617. /*signedIndices=*/false, E->getExprLoc());
  3618. } else if (const Expr *Array = isSimpleArrayDecayOperand(E->getBase())) {
  3619. // If this is A[i] where A is an array, the frontend will have decayed the
  3620. // base to be a ArrayToPointerDecay implicit cast. While correct, it is
  3621. // inefficient at -O0 to emit a "gep A, 0, 0" when codegen'ing it, then a
  3622. // "gep x, i" here. Emit one "gep A, 0, i".
  3623. assert(Array->getType()->isArrayType() &&
  3624. "Array to pointer decay must have array source type!");
  3625. LValue ArrayLV;
  3626. // For simple multidimensional array indexing, set the 'accessed' flag for
  3627. // better bounds-checking of the base expression.
  3628. if (const auto *ASE = dyn_cast<ArraySubscriptExpr>(Array))
  3629. ArrayLV = EmitArraySubscriptExpr(ASE, /*Accessed*/ true);
  3630. else
  3631. ArrayLV = EmitLValue(Array);
  3632. // Propagate the alignment from the array itself to the result.
  3633. EltPtr = emitArraySubscriptGEP(
  3634. *this, ArrayLV.getAddress(*this), {CGM.getSize(CharUnits::Zero()), Idx},
  3635. ResultExprTy, !getLangOpts().isSignedOverflowDefined(),
  3636. /*signedIndices=*/false, E->getExprLoc());
  3637. BaseInfo = ArrayLV.getBaseInfo();
  3638. TBAAInfo = CGM.getTBAAInfoForSubobject(ArrayLV, ResultExprTy);
  3639. } else {
  3640. Address Base = emitOMPArraySectionBase(*this, E->getBase(), BaseInfo,
  3641. TBAAInfo, BaseTy, ResultExprTy,
  3642. IsLowerBound);
  3643. EltPtr = emitArraySubscriptGEP(*this, Base, Idx, ResultExprTy,
  3644. !getLangOpts().isSignedOverflowDefined(),
  3645. /*signedIndices=*/false, E->getExprLoc());
  3646. }
  3647. return MakeAddrLValue(EltPtr, ResultExprTy, BaseInfo, TBAAInfo);
  3648. }
  3649. LValue CodeGenFunction::
  3650. EmitExtVectorElementExpr(const ExtVectorElementExpr *E) {
  3651. // Emit the base vector as an l-value.
  3652. LValue Base;
  3653. // ExtVectorElementExpr's base can either be a vector or pointer to vector.
  3654. if (E->isArrow()) {
  3655. // If it is a pointer to a vector, emit the address and form an lvalue with
  3656. // it.
  3657. LValueBaseInfo BaseInfo;
  3658. TBAAAccessInfo TBAAInfo;
  3659. Address Ptr = EmitPointerWithAlignment(E->getBase(), &BaseInfo, &TBAAInfo);
  3660. const auto *PT = E->getBase()->getType()->castAs<PointerType>();
  3661. Base = MakeAddrLValue(Ptr, PT->getPointeeType(), BaseInfo, TBAAInfo);
  3662. Base.getQuals().removeObjCGCAttr();
  3663. } else if (E->getBase()->isGLValue()) {
  3664. // Otherwise, if the base is an lvalue ( as in the case of foo.x.x),
  3665. // emit the base as an lvalue.
  3666. assert(E->getBase()->getType()->isVectorType());
  3667. Base = EmitLValue(E->getBase());
  3668. } else {
  3669. // Otherwise, the base is a normal rvalue (as in (V+V).x), emit it as such.
  3670. assert(E->getBase()->getType()->isVectorType() &&
  3671. "Result must be a vector");
  3672. llvm::Value *Vec = EmitScalarExpr(E->getBase());
  3673. // Store the vector to memory (because LValue wants an address).
  3674. Address VecMem = CreateMemTemp(E->getBase()->getType());
  3675. Builder.CreateStore(Vec, VecMem);
  3676. Base = MakeAddrLValue(VecMem, E->getBase()->getType(),
  3677. AlignmentSource::Decl);
  3678. }
  3679. QualType type =
  3680. E->getType().withCVRQualifiers(Base.getQuals().getCVRQualifiers());
  3681. // Encode the element access list into a vector of unsigned indices.
  3682. SmallVector<uint32_t, 4> Indices;
  3683. E->getEncodedElementAccess(Indices);
  3684. if (Base.isSimple()) {
  3685. llvm::Constant *CV =
  3686. llvm::ConstantDataVector::get(getLLVMContext(), Indices);
  3687. return LValue::MakeExtVectorElt(Base.getAddress(*this), CV, type,
  3688. Base.getBaseInfo(), TBAAAccessInfo());
  3689. }
  3690. assert(Base.isExtVectorElt() && "Can only subscript lvalue vec elts here!");
  3691. llvm::Constant *BaseElts = Base.getExtVectorElts();
  3692. SmallVector<llvm::Constant *, 4> CElts;
  3693. for (unsigned i = 0, e = Indices.size(); i != e; ++i)
  3694. CElts.push_back(BaseElts->getAggregateElement(Indices[i]));
  3695. llvm::Constant *CV = llvm::ConstantVector::get(CElts);
  3696. return LValue::MakeExtVectorElt(Base.getExtVectorAddress(), CV, type,
  3697. Base.getBaseInfo(), TBAAAccessInfo());
  3698. }
  3699. LValue CodeGenFunction::EmitMemberExpr(const MemberExpr *E) {
  3700. if (DeclRefExpr *DRE = tryToConvertMemberExprToDeclRefExpr(*this, E)) {
  3701. EmitIgnoredExpr(E->getBase());
  3702. return EmitDeclRefLValue(DRE);
  3703. }
  3704. Expr *BaseExpr = E->getBase();
  3705. // If this is s.x, emit s as an lvalue. If it is s->x, emit s as a scalar.
  3706. LValue BaseLV;
  3707. if (E->isArrow()) {
  3708. LValueBaseInfo BaseInfo;
  3709. TBAAAccessInfo TBAAInfo;
  3710. Address Addr = EmitPointerWithAlignment(BaseExpr, &BaseInfo, &TBAAInfo);
  3711. QualType PtrTy = BaseExpr->getType()->getPointeeType();
  3712. SanitizerSet SkippedChecks;
  3713. bool IsBaseCXXThis = IsWrappedCXXThis(BaseExpr);
  3714. if (IsBaseCXXThis)
  3715. SkippedChecks.set(SanitizerKind::Alignment, true);
  3716. if (IsBaseCXXThis || isa<DeclRefExpr>(BaseExpr))
  3717. SkippedChecks.set(SanitizerKind::Null, true);
  3718. EmitTypeCheck(TCK_MemberAccess, E->getExprLoc(), Addr.getPointer(), PtrTy,
  3719. /*Alignment=*/CharUnits::Zero(), SkippedChecks);
  3720. BaseLV = MakeAddrLValue(Addr, PtrTy, BaseInfo, TBAAInfo);
  3721. } else
  3722. BaseLV = EmitCheckedLValue(BaseExpr, TCK_MemberAccess);
  3723. NamedDecl *ND = E->getMemberDecl();
  3724. if (auto *Field = dyn_cast<FieldDecl>(ND)) {
  3725. LValue LV = EmitLValueForField(BaseLV, Field);
  3726. setObjCGCLValueClass(getContext(), E, LV);
  3727. if (getLangOpts().OpenMP) {
  3728. // If the member was explicitly marked as nontemporal, mark it as
  3729. // nontemporal. If the base lvalue is marked as nontemporal, mark access
  3730. // to children as nontemporal too.
  3731. if ((IsWrappedCXXThis(BaseExpr) &&
  3732. CGM.getOpenMPRuntime().isNontemporalDecl(Field)) ||
  3733. BaseLV.isNontemporal())
  3734. LV.setNontemporal(/*Value=*/true);
  3735. }
  3736. return LV;
  3737. }
  3738. if (const auto *FD = dyn_cast<FunctionDecl>(ND))
  3739. return EmitFunctionDeclLValue(*this, E, FD);
  3740. llvm_unreachable("Unhandled member declaration!");
  3741. }
  3742. /// Given that we are currently emitting a lambda, emit an l-value for
  3743. /// one of its members.
  3744. LValue CodeGenFunction::EmitLValueForLambdaField(const FieldDecl *Field) {
  3745. if (CurCodeDecl) {
  3746. assert(cast<CXXMethodDecl>(CurCodeDecl)->getParent()->isLambda());
  3747. assert(cast<CXXMethodDecl>(CurCodeDecl)->getParent() == Field->getParent());
  3748. }
  3749. QualType LambdaTagType =
  3750. getContext().getTagDeclType(Field->getParent());
  3751. LValue LambdaLV = MakeNaturalAlignAddrLValue(CXXABIThisValue, LambdaTagType);
  3752. return EmitLValueForField(LambdaLV, Field);
  3753. }
  3754. /// Get the field index in the debug info. The debug info structure/union
  3755. /// will ignore the unnamed bitfields.
  3756. unsigned CodeGenFunction::getDebugInfoFIndex(const RecordDecl *Rec,
  3757. unsigned FieldIndex) {
  3758. unsigned I = 0, Skipped = 0;
  3759. for (auto *F : Rec->getDefinition()->fields()) {
  3760. if (I == FieldIndex)
  3761. break;
  3762. if (F->isUnnamedBitfield())
  3763. Skipped++;
  3764. I++;
  3765. }
  3766. return FieldIndex - Skipped;
  3767. }
  3768. /// Get the address of a zero-sized field within a record. The resulting
  3769. /// address doesn't necessarily have the right type.
  3770. static Address emitAddrOfZeroSizeField(CodeGenFunction &CGF, Address Base,
  3771. const FieldDecl *Field) {
  3772. CharUnits Offset = CGF.getContext().toCharUnitsFromBits(
  3773. CGF.getContext().getFieldOffset(Field));
  3774. if (Offset.isZero())
  3775. return Base;
  3776. Base = CGF.Builder.CreateElementBitCast(Base, CGF.Int8Ty);
  3777. return CGF.Builder.CreateConstInBoundsByteGEP(Base, Offset);
  3778. }
  3779. /// Drill down to the storage of a field without walking into
  3780. /// reference types.
  3781. ///
  3782. /// The resulting address doesn't necessarily have the right type.
  3783. static Address emitAddrOfFieldStorage(CodeGenFunction &CGF, Address base,
  3784. const FieldDecl *field) {
  3785. if (field->isZeroSize(CGF.getContext()))
  3786. return emitAddrOfZeroSizeField(CGF, base, field);
  3787. const RecordDecl *rec = field->getParent();
  3788. unsigned idx =
  3789. CGF.CGM.getTypes().getCGRecordLayout(rec).getLLVMFieldNo(field);
  3790. return CGF.Builder.CreateStructGEP(base, idx, field->getName());
  3791. }
  3792. static Address emitPreserveStructAccess(CodeGenFunction &CGF, LValue base,
  3793. Address addr, const FieldDecl *field) {
  3794. const RecordDecl *rec = field->getParent();
  3795. llvm::DIType *DbgInfo = CGF.getDebugInfo()->getOrCreateStandaloneType(
  3796. base.getType(), rec->getLocation());
  3797. unsigned idx =
  3798. CGF.CGM.getTypes().getCGRecordLayout(rec).getLLVMFieldNo(field);
  3799. return CGF.Builder.CreatePreserveStructAccessIndex(
  3800. addr, idx, CGF.getDebugInfoFIndex(rec, field->getFieldIndex()), DbgInfo);
  3801. }
  3802. static bool hasAnyVptr(const QualType Type, const ASTContext &Context) {
  3803. const auto *RD = Type.getTypePtr()->getAsCXXRecordDecl();
  3804. if (!RD)
  3805. return false;
  3806. if (RD->isDynamicClass())
  3807. return true;
  3808. for (const auto &Base : RD->bases())
  3809. if (hasAnyVptr(Base.getType(), Context))
  3810. return true;
  3811. for (const FieldDecl *Field : RD->fields())
  3812. if (hasAnyVptr(Field->getType(), Context))
  3813. return true;
  3814. return false;
  3815. }
  3816. LValue CodeGenFunction::EmitLValueForField(LValue base,
  3817. const FieldDecl *field) {
  3818. LValueBaseInfo BaseInfo = base.getBaseInfo();
  3819. if (field->isBitField()) {
  3820. const CGRecordLayout &RL =
  3821. CGM.getTypes().getCGRecordLayout(field->getParent());
  3822. const CGBitFieldInfo &Info = RL.getBitFieldInfo(field);
  3823. const bool UseVolatile = isAAPCS(CGM.getTarget()) &&
  3824. CGM.getCodeGenOpts().AAPCSBitfieldWidth &&
  3825. Info.VolatileStorageSize != 0 &&
  3826. field->getType()
  3827. .withCVRQualifiers(base.getVRQualifiers())
  3828. .isVolatileQualified();
  3829. Address Addr = base.getAddress(*this);
  3830. unsigned Idx = RL.getLLVMFieldNo(field);
  3831. const RecordDecl *rec = field->getParent();
  3832. if (!UseVolatile) {
  3833. if (!IsInPreservedAIRegion &&
  3834. (!getDebugInfo() || !rec->hasAttr<BPFPreserveAccessIndexAttr>())) {
  3835. if (Idx != 0)
  3836. // For structs, we GEP to the field that the record layout suggests.
  3837. Addr = Builder.CreateStructGEP(Addr, Idx, field->getName());
  3838. } else {
  3839. llvm::DIType *DbgInfo = getDebugInfo()->getOrCreateRecordType(
  3840. getContext().getRecordType(rec), rec->getLocation());
  3841. Addr = Builder.CreatePreserveStructAccessIndex(
  3842. Addr, Idx, getDebugInfoFIndex(rec, field->getFieldIndex()),
  3843. DbgInfo);
  3844. }
  3845. }
  3846. const unsigned SS =
  3847. UseVolatile ? Info.VolatileStorageSize : Info.StorageSize;
  3848. // Get the access type.
  3849. llvm::Type *FieldIntTy = llvm::Type::getIntNTy(getLLVMContext(), SS);
  3850. if (Addr.getElementType() != FieldIntTy)
  3851. Addr = Builder.CreateElementBitCast(Addr, FieldIntTy);
  3852. if (UseVolatile) {
  3853. const unsigned VolatileOffset = Info.VolatileStorageOffset.getQuantity();
  3854. if (VolatileOffset)
  3855. Addr = Builder.CreateConstInBoundsGEP(Addr, VolatileOffset);
  3856. }
  3857. QualType fieldType =
  3858. field->getType().withCVRQualifiers(base.getVRQualifiers());
  3859. // TODO: Support TBAA for bit fields.
  3860. LValueBaseInfo FieldBaseInfo(BaseInfo.getAlignmentSource());
  3861. return LValue::MakeBitfield(Addr, Info, fieldType, FieldBaseInfo,
  3862. TBAAAccessInfo());
  3863. }
  3864. // Fields of may-alias structures are may-alias themselves.
  3865. // FIXME: this should get propagated down through anonymous structs
  3866. // and unions.
  3867. QualType FieldType = field->getType();
  3868. const RecordDecl *rec = field->getParent();
  3869. AlignmentSource BaseAlignSource = BaseInfo.getAlignmentSource();
  3870. LValueBaseInfo FieldBaseInfo(getFieldAlignmentSource(BaseAlignSource));
  3871. TBAAAccessInfo FieldTBAAInfo;
  3872. if (base.getTBAAInfo().isMayAlias() ||
  3873. rec->hasAttr<MayAliasAttr>() || FieldType->isVectorType()) {
  3874. FieldTBAAInfo = TBAAAccessInfo::getMayAliasInfo();
  3875. } else if (rec->isUnion()) {
  3876. // TODO: Support TBAA for unions.
  3877. FieldTBAAInfo = TBAAAccessInfo::getMayAliasInfo();
  3878. } else {
  3879. // If no base type been assigned for the base access, then try to generate
  3880. // one for this base lvalue.
  3881. FieldTBAAInfo = base.getTBAAInfo();
  3882. if (!FieldTBAAInfo.BaseType) {
  3883. FieldTBAAInfo.BaseType = CGM.getTBAABaseTypeInfo(base.getType());
  3884. assert(!FieldTBAAInfo.Offset &&
  3885. "Nonzero offset for an access with no base type!");
  3886. }
  3887. // Adjust offset to be relative to the base type.
  3888. const ASTRecordLayout &Layout =
  3889. getContext().getASTRecordLayout(field->getParent());
  3890. unsigned CharWidth = getContext().getCharWidth();
  3891. if (FieldTBAAInfo.BaseType)
  3892. FieldTBAAInfo.Offset +=
  3893. Layout.getFieldOffset(field->getFieldIndex()) / CharWidth;
  3894. // Update the final access type and size.
  3895. FieldTBAAInfo.AccessType = CGM.getTBAATypeInfo(FieldType);
  3896. FieldTBAAInfo.Size =
  3897. getContext().getTypeSizeInChars(FieldType).getQuantity();
  3898. }
  3899. Address addr = base.getAddress(*this);
  3900. if (auto *ClassDef = dyn_cast<CXXRecordDecl>(rec)) {
  3901. if (CGM.getCodeGenOpts().StrictVTablePointers &&
  3902. ClassDef->isDynamicClass()) {
  3903. // Getting to any field of dynamic object requires stripping dynamic
  3904. // information provided by invariant.group. This is because accessing
  3905. // fields may leak the real address of dynamic object, which could result
  3906. // in miscompilation when leaked pointer would be compared.
  3907. auto *stripped = Builder.CreateStripInvariantGroup(addr.getPointer());
  3908. addr = Address(stripped, addr.getElementType(), addr.getAlignment());
  3909. }
  3910. }
  3911. unsigned RecordCVR = base.getVRQualifiers();
  3912. if (rec->isUnion()) {
  3913. // For unions, there is no pointer adjustment.
  3914. if (CGM.getCodeGenOpts().StrictVTablePointers &&
  3915. hasAnyVptr(FieldType, getContext()))
  3916. // Because unions can easily skip invariant.barriers, we need to add
  3917. // a barrier every time CXXRecord field with vptr is referenced.
  3918. addr = Builder.CreateLaunderInvariantGroup(addr);
  3919. if (IsInPreservedAIRegion ||
  3920. (getDebugInfo() && rec->hasAttr<BPFPreserveAccessIndexAttr>())) {
  3921. // Remember the original union field index
  3922. llvm::DIType *DbgInfo = getDebugInfo()->getOrCreateStandaloneType(base.getType(),
  3923. rec->getLocation());
  3924. addr = Address(
  3925. Builder.CreatePreserveUnionAccessIndex(
  3926. addr.getPointer(), getDebugInfoFIndex(rec, field->getFieldIndex()), DbgInfo),
  3927. addr.getElementType(), addr.getAlignment());
  3928. }
  3929. if (FieldType->isReferenceType())
  3930. addr = Builder.CreateElementBitCast(
  3931. addr, CGM.getTypes().ConvertTypeForMem(FieldType), field->getName());
  3932. } else {
  3933. if (!IsInPreservedAIRegion &&
  3934. (!getDebugInfo() || !rec->hasAttr<BPFPreserveAccessIndexAttr>()))
  3935. // For structs, we GEP to the field that the record layout suggests.
  3936. addr = emitAddrOfFieldStorage(*this, addr, field);
  3937. else
  3938. // Remember the original struct field index
  3939. addr = emitPreserveStructAccess(*this, base, addr, field);
  3940. }
  3941. // If this is a reference field, load the reference right now.
  3942. if (FieldType->isReferenceType()) {
  3943. LValue RefLVal =
  3944. MakeAddrLValue(addr, FieldType, FieldBaseInfo, FieldTBAAInfo);
  3945. if (RecordCVR & Qualifiers::Volatile)
  3946. RefLVal.getQuals().addVolatile();
  3947. addr = EmitLoadOfReference(RefLVal, &FieldBaseInfo, &FieldTBAAInfo);
  3948. // Qualifiers on the struct don't apply to the referencee.
  3949. RecordCVR = 0;
  3950. FieldType = FieldType->getPointeeType();
  3951. }
  3952. // Make sure that the address is pointing to the right type. This is critical
  3953. // for both unions and structs. A union needs a bitcast, a struct element
  3954. // will need a bitcast if the LLVM type laid out doesn't match the desired
  3955. // type.
  3956. addr = Builder.CreateElementBitCast(
  3957. addr, CGM.getTypes().ConvertTypeForMem(FieldType), field->getName());
  3958. if (field->hasAttr<AnnotateAttr>())
  3959. addr = EmitFieldAnnotations(field, addr);
  3960. LValue LV = MakeAddrLValue(addr, FieldType, FieldBaseInfo, FieldTBAAInfo);
  3961. LV.getQuals().addCVRQualifiers(RecordCVR);
  3962. // __weak attribute on a field is ignored.
  3963. if (LV.getQuals().getObjCGCAttr() == Qualifiers::Weak)
  3964. LV.getQuals().removeObjCGCAttr();
  3965. return LV;
  3966. }
  3967. LValue
  3968. CodeGenFunction::EmitLValueForFieldInitialization(LValue Base,
  3969. const FieldDecl *Field) {
  3970. QualType FieldType = Field->getType();
  3971. if (!FieldType->isReferenceType())
  3972. return EmitLValueForField(Base, Field);
  3973. Address V = emitAddrOfFieldStorage(*this, Base.getAddress(*this), Field);
  3974. // Make sure that the address is pointing to the right type.
  3975. llvm::Type *llvmType = ConvertTypeForMem(FieldType);
  3976. V = Builder.CreateElementBitCast(V, llvmType, Field->getName());
  3977. // TODO: Generate TBAA information that describes this access as a structure
  3978. // member access and not just an access to an object of the field's type. This
  3979. // should be similar to what we do in EmitLValueForField().
  3980. LValueBaseInfo BaseInfo = Base.getBaseInfo();
  3981. AlignmentSource FieldAlignSource = BaseInfo.getAlignmentSource();
  3982. LValueBaseInfo FieldBaseInfo(getFieldAlignmentSource(FieldAlignSource));
  3983. return MakeAddrLValue(V, FieldType, FieldBaseInfo,
  3984. CGM.getTBAAInfoForSubobject(Base, FieldType));
  3985. }
  3986. LValue CodeGenFunction::EmitCompoundLiteralLValue(const CompoundLiteralExpr *E){
  3987. if (E->isFileScope()) {
  3988. ConstantAddress GlobalPtr = CGM.GetAddrOfConstantCompoundLiteral(E);
  3989. return MakeAddrLValue(GlobalPtr, E->getType(), AlignmentSource::Decl);
  3990. }
  3991. if (E->getType()->isVariablyModifiedType())
  3992. // make sure to emit the VLA size.
  3993. EmitVariablyModifiedType(E->getType());
  3994. Address DeclPtr = CreateMemTemp(E->getType(), ".compoundliteral");
  3995. const Expr *InitExpr = E->getInitializer();
  3996. LValue Result = MakeAddrLValue(DeclPtr, E->getType(), AlignmentSource::Decl);
  3997. EmitAnyExprToMem(InitExpr, DeclPtr, E->getType().getQualifiers(),
  3998. /*Init*/ true);
  3999. // Block-scope compound literals are destroyed at the end of the enclosing
  4000. // scope in C.
  4001. if (!getLangOpts().CPlusPlus)
  4002. if (QualType::DestructionKind DtorKind = E->getType().isDestructedType())
  4003. pushLifetimeExtendedDestroy(getCleanupKind(DtorKind), DeclPtr,
  4004. E->getType(), getDestroyer(DtorKind),
  4005. DtorKind & EHCleanup);
  4006. return Result;
  4007. }
  4008. LValue CodeGenFunction::EmitInitListLValue(const InitListExpr *E) {
  4009. if (!E->isGLValue())
  4010. // Initializing an aggregate temporary in C++11: T{...}.
  4011. return EmitAggExprToLValue(E);
  4012. // An lvalue initializer list must be initializing a reference.
  4013. assert(E->isTransparent() && "non-transparent glvalue init list");
  4014. return EmitLValue(E->getInit(0));
  4015. }
  4016. /// Emit the operand of a glvalue conditional operator. This is either a glvalue
  4017. /// or a (possibly-parenthesized) throw-expression. If this is a throw, no
  4018. /// LValue is returned and the current block has been terminated.
  4019. static std::optional<LValue> EmitLValueOrThrowExpression(CodeGenFunction &CGF,
  4020. const Expr *Operand) {
  4021. if (auto *ThrowExpr = dyn_cast<CXXThrowExpr>(Operand->IgnoreParens())) {
  4022. CGF.EmitCXXThrowExpr(ThrowExpr, /*KeepInsertionPoint*/false);
  4023. return std::nullopt;
  4024. }
  4025. return CGF.EmitLValue(Operand);
  4026. }
  4027. namespace {
  4028. // Handle the case where the condition is a constant evaluatable simple integer,
  4029. // which means we don't have to separately handle the true/false blocks.
  4030. std::optional<LValue> HandleConditionalOperatorLValueSimpleCase(
  4031. CodeGenFunction &CGF, const AbstractConditionalOperator *E) {
  4032. const Expr *condExpr = E->getCond();
  4033. bool CondExprBool;
  4034. if (CGF.ConstantFoldsToSimpleInteger(condExpr, CondExprBool)) {
  4035. const Expr *Live = E->getTrueExpr(), *Dead = E->getFalseExpr();
  4036. if (!CondExprBool)
  4037. std::swap(Live, Dead);
  4038. if (!CGF.ContainsLabel(Dead)) {
  4039. // If the true case is live, we need to track its region.
  4040. if (CondExprBool)
  4041. CGF.incrementProfileCounter(E);
  4042. // If a throw expression we emit it and return an undefined lvalue
  4043. // because it can't be used.
  4044. if (auto *ThrowExpr = dyn_cast<CXXThrowExpr>(Live->IgnoreParens())) {
  4045. CGF.EmitCXXThrowExpr(ThrowExpr);
  4046. llvm::Type *ElemTy = CGF.ConvertType(Dead->getType());
  4047. llvm::Type *Ty = llvm::PointerType::getUnqual(ElemTy);
  4048. return CGF.MakeAddrLValue(
  4049. Address(llvm::UndefValue::get(Ty), ElemTy, CharUnits::One()),
  4050. Dead->getType());
  4051. }
  4052. return CGF.EmitLValue(Live);
  4053. }
  4054. }
  4055. return std::nullopt;
  4056. }
  4057. struct ConditionalInfo {
  4058. llvm::BasicBlock *lhsBlock, *rhsBlock;
  4059. std::optional<LValue> LHS, RHS;
  4060. };
  4061. // Create and generate the 3 blocks for a conditional operator.
  4062. // Leaves the 'current block' in the continuation basic block.
  4063. template<typename FuncTy>
  4064. ConditionalInfo EmitConditionalBlocks(CodeGenFunction &CGF,
  4065. const AbstractConditionalOperator *E,
  4066. const FuncTy &BranchGenFunc) {
  4067. ConditionalInfo Info{CGF.createBasicBlock("cond.true"),
  4068. CGF.createBasicBlock("cond.false"), std::nullopt,
  4069. std::nullopt};
  4070. llvm::BasicBlock *endBlock = CGF.createBasicBlock("cond.end");
  4071. CodeGenFunction::ConditionalEvaluation eval(CGF);
  4072. CGF.EmitBranchOnBoolExpr(E->getCond(), Info.lhsBlock, Info.rhsBlock,
  4073. CGF.getProfileCount(E));
  4074. // Any temporaries created here are conditional.
  4075. CGF.EmitBlock(Info.lhsBlock);
  4076. CGF.incrementProfileCounter(E);
  4077. eval.begin(CGF);
  4078. Info.LHS = BranchGenFunc(CGF, E->getTrueExpr());
  4079. eval.end(CGF);
  4080. Info.lhsBlock = CGF.Builder.GetInsertBlock();
  4081. if (Info.LHS)
  4082. CGF.Builder.CreateBr(endBlock);
  4083. // Any temporaries created here are conditional.
  4084. CGF.EmitBlock(Info.rhsBlock);
  4085. eval.begin(CGF);
  4086. Info.RHS = BranchGenFunc(CGF, E->getFalseExpr());
  4087. eval.end(CGF);
  4088. Info.rhsBlock = CGF.Builder.GetInsertBlock();
  4089. CGF.EmitBlock(endBlock);
  4090. return Info;
  4091. }
  4092. } // namespace
  4093. void CodeGenFunction::EmitIgnoredConditionalOperator(
  4094. const AbstractConditionalOperator *E) {
  4095. if (!E->isGLValue()) {
  4096. // ?: here should be an aggregate.
  4097. assert(hasAggregateEvaluationKind(E->getType()) &&
  4098. "Unexpected conditional operator!");
  4099. return (void)EmitAggExprToLValue(E);
  4100. }
  4101. OpaqueValueMapping binding(*this, E);
  4102. if (HandleConditionalOperatorLValueSimpleCase(*this, E))
  4103. return;
  4104. EmitConditionalBlocks(*this, E, [](CodeGenFunction &CGF, const Expr *E) {
  4105. CGF.EmitIgnoredExpr(E);
  4106. return LValue{};
  4107. });
  4108. }
  4109. LValue CodeGenFunction::EmitConditionalOperatorLValue(
  4110. const AbstractConditionalOperator *expr) {
  4111. if (!expr->isGLValue()) {
  4112. // ?: here should be an aggregate.
  4113. assert(hasAggregateEvaluationKind(expr->getType()) &&
  4114. "Unexpected conditional operator!");
  4115. return EmitAggExprToLValue(expr);
  4116. }
  4117. OpaqueValueMapping binding(*this, expr);
  4118. if (std::optional<LValue> Res =
  4119. HandleConditionalOperatorLValueSimpleCase(*this, expr))
  4120. return *Res;
  4121. ConditionalInfo Info = EmitConditionalBlocks(
  4122. *this, expr, [](CodeGenFunction &CGF, const Expr *E) {
  4123. return EmitLValueOrThrowExpression(CGF, E);
  4124. });
  4125. if ((Info.LHS && !Info.LHS->isSimple()) ||
  4126. (Info.RHS && !Info.RHS->isSimple()))
  4127. return EmitUnsupportedLValue(expr, "conditional operator");
  4128. if (Info.LHS && Info.RHS) {
  4129. Address lhsAddr = Info.LHS->getAddress(*this);
  4130. Address rhsAddr = Info.RHS->getAddress(*this);
  4131. llvm::PHINode *phi = Builder.CreatePHI(lhsAddr.getType(), 2, "cond-lvalue");
  4132. phi->addIncoming(lhsAddr.getPointer(), Info.lhsBlock);
  4133. phi->addIncoming(rhsAddr.getPointer(), Info.rhsBlock);
  4134. Address result(phi, lhsAddr.getElementType(),
  4135. std::min(lhsAddr.getAlignment(), rhsAddr.getAlignment()));
  4136. AlignmentSource alignSource =
  4137. std::max(Info.LHS->getBaseInfo().getAlignmentSource(),
  4138. Info.RHS->getBaseInfo().getAlignmentSource());
  4139. TBAAAccessInfo TBAAInfo = CGM.mergeTBAAInfoForConditionalOperator(
  4140. Info.LHS->getTBAAInfo(), Info.RHS->getTBAAInfo());
  4141. return MakeAddrLValue(result, expr->getType(), LValueBaseInfo(alignSource),
  4142. TBAAInfo);
  4143. } else {
  4144. assert((Info.LHS || Info.RHS) &&
  4145. "both operands of glvalue conditional are throw-expressions?");
  4146. return Info.LHS ? *Info.LHS : *Info.RHS;
  4147. }
  4148. }
  4149. /// EmitCastLValue - Casts are never lvalues unless that cast is to a reference
  4150. /// type. If the cast is to a reference, we can have the usual lvalue result,
  4151. /// otherwise if a cast is needed by the code generator in an lvalue context,
  4152. /// then it must mean that we need the address of an aggregate in order to
  4153. /// access one of its members. This can happen for all the reasons that casts
  4154. /// are permitted with aggregate result, including noop aggregate casts, and
  4155. /// cast from scalar to union.
  4156. LValue CodeGenFunction::EmitCastLValue(const CastExpr *E) {
  4157. switch (E->getCastKind()) {
  4158. case CK_ToVoid:
  4159. case CK_BitCast:
  4160. case CK_LValueToRValueBitCast:
  4161. case CK_ArrayToPointerDecay:
  4162. case CK_FunctionToPointerDecay:
  4163. case CK_NullToMemberPointer:
  4164. case CK_NullToPointer:
  4165. case CK_IntegralToPointer:
  4166. case CK_PointerToIntegral:
  4167. case CK_PointerToBoolean:
  4168. case CK_VectorSplat:
  4169. case CK_IntegralCast:
  4170. case CK_BooleanToSignedIntegral:
  4171. case CK_IntegralToBoolean:
  4172. case CK_IntegralToFloating:
  4173. case CK_FloatingToIntegral:
  4174. case CK_FloatingToBoolean:
  4175. case CK_FloatingCast:
  4176. case CK_FloatingRealToComplex:
  4177. case CK_FloatingComplexToReal:
  4178. case CK_FloatingComplexToBoolean:
  4179. case CK_FloatingComplexCast:
  4180. case CK_FloatingComplexToIntegralComplex:
  4181. case CK_IntegralRealToComplex:
  4182. case CK_IntegralComplexToReal:
  4183. case CK_IntegralComplexToBoolean:
  4184. case CK_IntegralComplexCast:
  4185. case CK_IntegralComplexToFloatingComplex:
  4186. case CK_DerivedToBaseMemberPointer:
  4187. case CK_BaseToDerivedMemberPointer:
  4188. case CK_MemberPointerToBoolean:
  4189. case CK_ReinterpretMemberPointer:
  4190. case CK_AnyPointerToBlockPointerCast:
  4191. case CK_ARCProduceObject:
  4192. case CK_ARCConsumeObject:
  4193. case CK_ARCReclaimReturnedObject:
  4194. case CK_ARCExtendBlockObject:
  4195. case CK_CopyAndAutoreleaseBlockObject:
  4196. case CK_IntToOCLSampler:
  4197. case CK_FloatingToFixedPoint:
  4198. case CK_FixedPointToFloating:
  4199. case CK_FixedPointCast:
  4200. case CK_FixedPointToBoolean:
  4201. case CK_FixedPointToIntegral:
  4202. case CK_IntegralToFixedPoint:
  4203. case CK_MatrixCast:
  4204. return EmitUnsupportedLValue(E, "unexpected cast lvalue");
  4205. case CK_Dependent:
  4206. llvm_unreachable("dependent cast kind in IR gen!");
  4207. case CK_BuiltinFnToFnPtr:
  4208. llvm_unreachable("builtin functions are handled elsewhere");
  4209. // These are never l-values; just use the aggregate emission code.
  4210. case CK_NonAtomicToAtomic:
  4211. case CK_AtomicToNonAtomic:
  4212. return EmitAggExprToLValue(E);
  4213. case CK_Dynamic: {
  4214. LValue LV = EmitLValue(E->getSubExpr());
  4215. Address V = LV.getAddress(*this);
  4216. const auto *DCE = cast<CXXDynamicCastExpr>(E);
  4217. return MakeNaturalAlignAddrLValue(EmitDynamicCast(V, DCE), E->getType());
  4218. }
  4219. case CK_ConstructorConversion:
  4220. case CK_UserDefinedConversion:
  4221. case CK_CPointerToObjCPointerCast:
  4222. case CK_BlockPointerToObjCPointerCast:
  4223. case CK_LValueToRValue:
  4224. return EmitLValue(E->getSubExpr());
  4225. case CK_NoOp: {
  4226. // CK_NoOp can model a qualification conversion, which can remove an array
  4227. // bound and change the IR type.
  4228. // FIXME: Once pointee types are removed from IR, remove this.
  4229. LValue LV = EmitLValue(E->getSubExpr());
  4230. if (LV.isSimple()) {
  4231. Address V = LV.getAddress(*this);
  4232. if (V.isValid()) {
  4233. llvm::Type *T = ConvertTypeForMem(E->getType());
  4234. if (V.getElementType() != T)
  4235. LV.setAddress(Builder.CreateElementBitCast(V, T));
  4236. }
  4237. }
  4238. return LV;
  4239. }
  4240. case CK_UncheckedDerivedToBase:
  4241. case CK_DerivedToBase: {
  4242. const auto *DerivedClassTy =
  4243. E->getSubExpr()->getType()->castAs<RecordType>();
  4244. auto *DerivedClassDecl = cast<CXXRecordDecl>(DerivedClassTy->getDecl());
  4245. LValue LV = EmitLValue(E->getSubExpr());
  4246. Address This = LV.getAddress(*this);
  4247. // Perform the derived-to-base conversion
  4248. Address Base = GetAddressOfBaseClass(
  4249. This, DerivedClassDecl, E->path_begin(), E->path_end(),
  4250. /*NullCheckValue=*/false, E->getExprLoc());
  4251. // TODO: Support accesses to members of base classes in TBAA. For now, we
  4252. // conservatively pretend that the complete object is of the base class
  4253. // type.
  4254. return MakeAddrLValue(Base, E->getType(), LV.getBaseInfo(),
  4255. CGM.getTBAAInfoForSubobject(LV, E->getType()));
  4256. }
  4257. case CK_ToUnion:
  4258. return EmitAggExprToLValue(E);
  4259. case CK_BaseToDerived: {
  4260. const auto *DerivedClassTy = E->getType()->castAs<RecordType>();
  4261. auto *DerivedClassDecl = cast<CXXRecordDecl>(DerivedClassTy->getDecl());
  4262. LValue LV = EmitLValue(E->getSubExpr());
  4263. // Perform the base-to-derived conversion
  4264. Address Derived = GetAddressOfDerivedClass(
  4265. LV.getAddress(*this), DerivedClassDecl, E->path_begin(), E->path_end(),
  4266. /*NullCheckValue=*/false);
  4267. // C++11 [expr.static.cast]p2: Behavior is undefined if a downcast is
  4268. // performed and the object is not of the derived type.
  4269. if (sanitizePerformTypeCheck())
  4270. EmitTypeCheck(TCK_DowncastReference, E->getExprLoc(),
  4271. Derived.getPointer(), E->getType());
  4272. if (SanOpts.has(SanitizerKind::CFIDerivedCast))
  4273. EmitVTablePtrCheckForCast(E->getType(), Derived,
  4274. /*MayBeNull=*/false, CFITCK_DerivedCast,
  4275. E->getBeginLoc());
  4276. return MakeAddrLValue(Derived, E->getType(), LV.getBaseInfo(),
  4277. CGM.getTBAAInfoForSubobject(LV, E->getType()));
  4278. }
  4279. case CK_LValueBitCast: {
  4280. // This must be a reinterpret_cast (or c-style equivalent).
  4281. const auto *CE = cast<ExplicitCastExpr>(E);
  4282. CGM.EmitExplicitCastExprType(CE, this);
  4283. LValue LV = EmitLValue(E->getSubExpr());
  4284. Address V = Builder.CreateElementBitCast(
  4285. LV.getAddress(*this),
  4286. ConvertTypeForMem(CE->getTypeAsWritten()->getPointeeType()));
  4287. if (SanOpts.has(SanitizerKind::CFIUnrelatedCast))
  4288. EmitVTablePtrCheckForCast(E->getType(), V,
  4289. /*MayBeNull=*/false, CFITCK_UnrelatedCast,
  4290. E->getBeginLoc());
  4291. return MakeAddrLValue(V, E->getType(), LV.getBaseInfo(),
  4292. CGM.getTBAAInfoForSubobject(LV, E->getType()));
  4293. }
  4294. case CK_AddressSpaceConversion: {
  4295. LValue LV = EmitLValue(E->getSubExpr());
  4296. QualType DestTy = getContext().getPointerType(E->getType());
  4297. llvm::Value *V = getTargetHooks().performAddrSpaceCast(
  4298. *this, LV.getPointer(*this),
  4299. E->getSubExpr()->getType().getAddressSpace(),
  4300. E->getType().getAddressSpace(), ConvertType(DestTy));
  4301. return MakeAddrLValue(Address(V, ConvertTypeForMem(E->getType()),
  4302. LV.getAddress(*this).getAlignment()),
  4303. E->getType(), LV.getBaseInfo(), LV.getTBAAInfo());
  4304. }
  4305. case CK_ObjCObjectLValueCast: {
  4306. LValue LV = EmitLValue(E->getSubExpr());
  4307. Address V = Builder.CreateElementBitCast(LV.getAddress(*this),
  4308. ConvertType(E->getType()));
  4309. return MakeAddrLValue(V, E->getType(), LV.getBaseInfo(),
  4310. CGM.getTBAAInfoForSubobject(LV, E->getType()));
  4311. }
  4312. case CK_ZeroToOCLOpaqueType:
  4313. llvm_unreachable("NULL to OpenCL opaque type lvalue cast is not valid");
  4314. }
  4315. llvm_unreachable("Unhandled lvalue cast kind?");
  4316. }
  4317. LValue CodeGenFunction::EmitOpaqueValueLValue(const OpaqueValueExpr *e) {
  4318. assert(OpaqueValueMappingData::shouldBindAsLValue(e));
  4319. return getOrCreateOpaqueLValueMapping(e);
  4320. }
  4321. LValue
  4322. CodeGenFunction::getOrCreateOpaqueLValueMapping(const OpaqueValueExpr *e) {
  4323. assert(OpaqueValueMapping::shouldBindAsLValue(e));
  4324. llvm::DenseMap<const OpaqueValueExpr*,LValue>::iterator
  4325. it = OpaqueLValues.find(e);
  4326. if (it != OpaqueLValues.end())
  4327. return it->second;
  4328. assert(e->isUnique() && "LValue for a nonunique OVE hasn't been emitted");
  4329. return EmitLValue(e->getSourceExpr());
  4330. }
  4331. RValue
  4332. CodeGenFunction::getOrCreateOpaqueRValueMapping(const OpaqueValueExpr *e) {
  4333. assert(!OpaqueValueMapping::shouldBindAsLValue(e));
  4334. llvm::DenseMap<const OpaqueValueExpr*,RValue>::iterator
  4335. it = OpaqueRValues.find(e);
  4336. if (it != OpaqueRValues.end())
  4337. return it->second;
  4338. assert(e->isUnique() && "RValue for a nonunique OVE hasn't been emitted");
  4339. return EmitAnyExpr(e->getSourceExpr());
  4340. }
  4341. RValue CodeGenFunction::EmitRValueForField(LValue LV,
  4342. const FieldDecl *FD,
  4343. SourceLocation Loc) {
  4344. QualType FT = FD->getType();
  4345. LValue FieldLV = EmitLValueForField(LV, FD);
  4346. switch (getEvaluationKind(FT)) {
  4347. case TEK_Complex:
  4348. return RValue::getComplex(EmitLoadOfComplex(FieldLV, Loc));
  4349. case TEK_Aggregate:
  4350. return FieldLV.asAggregateRValue(*this);
  4351. case TEK_Scalar:
  4352. // This routine is used to load fields one-by-one to perform a copy, so
  4353. // don't load reference fields.
  4354. if (FD->getType()->isReferenceType())
  4355. return RValue::get(FieldLV.getPointer(*this));
  4356. // Call EmitLoadOfScalar except when the lvalue is a bitfield to emit a
  4357. // primitive load.
  4358. if (FieldLV.isBitField())
  4359. return EmitLoadOfLValue(FieldLV, Loc);
  4360. return RValue::get(EmitLoadOfScalar(FieldLV, Loc));
  4361. }
  4362. llvm_unreachable("bad evaluation kind");
  4363. }
  4364. //===--------------------------------------------------------------------===//
  4365. // Expression Emission
  4366. //===--------------------------------------------------------------------===//
  4367. RValue CodeGenFunction::EmitCallExpr(const CallExpr *E,
  4368. ReturnValueSlot ReturnValue) {
  4369. // Builtins never have block type.
  4370. if (E->getCallee()->getType()->isBlockPointerType())
  4371. return EmitBlockCallExpr(E, ReturnValue);
  4372. if (const auto *CE = dyn_cast<CXXMemberCallExpr>(E))
  4373. return EmitCXXMemberCallExpr(CE, ReturnValue);
  4374. if (const auto *CE = dyn_cast<CUDAKernelCallExpr>(E))
  4375. return EmitCUDAKernelCallExpr(CE, ReturnValue);
  4376. if (const auto *CE = dyn_cast<CXXOperatorCallExpr>(E))
  4377. if (const CXXMethodDecl *MD =
  4378. dyn_cast_or_null<CXXMethodDecl>(CE->getCalleeDecl()))
  4379. return EmitCXXOperatorMemberCallExpr(CE, MD, ReturnValue);
  4380. CGCallee callee = EmitCallee(E->getCallee());
  4381. if (callee.isBuiltin()) {
  4382. return EmitBuiltinExpr(callee.getBuiltinDecl(), callee.getBuiltinID(),
  4383. E, ReturnValue);
  4384. }
  4385. if (callee.isPseudoDestructor()) {
  4386. return EmitCXXPseudoDestructorExpr(callee.getPseudoDestructorExpr());
  4387. }
  4388. return EmitCall(E->getCallee()->getType(), callee, E, ReturnValue);
  4389. }
  4390. /// Emit a CallExpr without considering whether it might be a subclass.
  4391. RValue CodeGenFunction::EmitSimpleCallExpr(const CallExpr *E,
  4392. ReturnValueSlot ReturnValue) {
  4393. CGCallee Callee = EmitCallee(E->getCallee());
  4394. return EmitCall(E->getCallee()->getType(), Callee, E, ReturnValue);
  4395. }
  4396. // Detect the unusual situation where an inline version is shadowed by a
  4397. // non-inline version. In that case we should pick the external one
  4398. // everywhere. That's GCC behavior too.
  4399. static bool OnlyHasInlineBuiltinDeclaration(const FunctionDecl *FD) {
  4400. for (const FunctionDecl *PD = FD; PD; PD = PD->getPreviousDecl())
  4401. if (!PD->isInlineBuiltinDeclaration())
  4402. return false;
  4403. return true;
  4404. }
  4405. static CGCallee EmitDirectCallee(CodeGenFunction &CGF, GlobalDecl GD) {
  4406. const FunctionDecl *FD = cast<FunctionDecl>(GD.getDecl());
  4407. if (auto builtinID = FD->getBuiltinID()) {
  4408. std::string NoBuiltinFD = ("no-builtin-" + FD->getName()).str();
  4409. std::string NoBuiltins = "no-builtins";
  4410. StringRef Ident = CGF.CGM.getMangledName(GD);
  4411. std::string FDInlineName = (Ident + ".inline").str();
  4412. bool IsPredefinedLibFunction =
  4413. CGF.getContext().BuiltinInfo.isPredefinedLibFunction(builtinID);
  4414. bool HasAttributeNoBuiltin =
  4415. CGF.CurFn->getAttributes().hasFnAttr(NoBuiltinFD) ||
  4416. CGF.CurFn->getAttributes().hasFnAttr(NoBuiltins);
  4417. // When directing calling an inline builtin, call it through it's mangled
  4418. // name to make it clear it's not the actual builtin.
  4419. if (CGF.CurFn->getName() != FDInlineName &&
  4420. OnlyHasInlineBuiltinDeclaration(FD)) {
  4421. llvm::Constant *CalleePtr = EmitFunctionDeclPointer(CGF.CGM, GD);
  4422. llvm::Function *Fn = llvm::cast<llvm::Function>(CalleePtr);
  4423. llvm::Module *M = Fn->getParent();
  4424. llvm::Function *Clone = M->getFunction(FDInlineName);
  4425. if (!Clone) {
  4426. Clone = llvm::Function::Create(Fn->getFunctionType(),
  4427. llvm::GlobalValue::InternalLinkage,
  4428. Fn->getAddressSpace(), FDInlineName, M);
  4429. Clone->addFnAttr(llvm::Attribute::AlwaysInline);
  4430. }
  4431. return CGCallee::forDirect(Clone, GD);
  4432. }
  4433. // Replaceable builtins provide their own implementation of a builtin. If we
  4434. // are in an inline builtin implementation, avoid trivial infinite
  4435. // recursion. Honor __attribute__((no_builtin("foo"))) or
  4436. // __attribute__((no_builtin)) on the current function unless foo is
  4437. // not a predefined library function which means we must generate the
  4438. // builtin no matter what.
  4439. else if (!IsPredefinedLibFunction || !HasAttributeNoBuiltin)
  4440. return CGCallee::forBuiltin(builtinID, FD);
  4441. }
  4442. llvm::Constant *CalleePtr = EmitFunctionDeclPointer(CGF.CGM, GD);
  4443. if (CGF.CGM.getLangOpts().CUDA && !CGF.CGM.getLangOpts().CUDAIsDevice &&
  4444. FD->hasAttr<CUDAGlobalAttr>())
  4445. CalleePtr = CGF.CGM.getCUDARuntime().getKernelStub(
  4446. cast<llvm::GlobalValue>(CalleePtr->stripPointerCasts()));
  4447. return CGCallee::forDirect(CalleePtr, GD);
  4448. }
  4449. CGCallee CodeGenFunction::EmitCallee(const Expr *E) {
  4450. E = E->IgnoreParens();
  4451. // Look through function-to-pointer decay.
  4452. if (auto ICE = dyn_cast<ImplicitCastExpr>(E)) {
  4453. if (ICE->getCastKind() == CK_FunctionToPointerDecay ||
  4454. ICE->getCastKind() == CK_BuiltinFnToFnPtr) {
  4455. return EmitCallee(ICE->getSubExpr());
  4456. }
  4457. // Resolve direct calls.
  4458. } else if (auto DRE = dyn_cast<DeclRefExpr>(E)) {
  4459. if (auto FD = dyn_cast<FunctionDecl>(DRE->getDecl())) {
  4460. return EmitDirectCallee(*this, FD);
  4461. }
  4462. } else if (auto ME = dyn_cast<MemberExpr>(E)) {
  4463. if (auto FD = dyn_cast<FunctionDecl>(ME->getMemberDecl())) {
  4464. EmitIgnoredExpr(ME->getBase());
  4465. return EmitDirectCallee(*this, FD);
  4466. }
  4467. // Look through template substitutions.
  4468. } else if (auto NTTP = dyn_cast<SubstNonTypeTemplateParmExpr>(E)) {
  4469. return EmitCallee(NTTP->getReplacement());
  4470. // Treat pseudo-destructor calls differently.
  4471. } else if (auto PDE = dyn_cast<CXXPseudoDestructorExpr>(E)) {
  4472. return CGCallee::forPseudoDestructor(PDE);
  4473. }
  4474. // Otherwise, we have an indirect reference.
  4475. llvm::Value *calleePtr;
  4476. QualType functionType;
  4477. if (auto ptrType = E->getType()->getAs<PointerType>()) {
  4478. calleePtr = EmitScalarExpr(E);
  4479. functionType = ptrType->getPointeeType();
  4480. } else {
  4481. functionType = E->getType();
  4482. calleePtr = EmitLValue(E).getPointer(*this);
  4483. }
  4484. assert(functionType->isFunctionType());
  4485. GlobalDecl GD;
  4486. if (const auto *VD =
  4487. dyn_cast_or_null<VarDecl>(E->getReferencedDeclOfCallee()))
  4488. GD = GlobalDecl(VD);
  4489. CGCalleeInfo calleeInfo(functionType->getAs<FunctionProtoType>(), GD);
  4490. CGCallee callee(calleeInfo, calleePtr);
  4491. return callee;
  4492. }
  4493. LValue CodeGenFunction::EmitBinaryOperatorLValue(const BinaryOperator *E) {
  4494. // Comma expressions just emit their LHS then their RHS as an l-value.
  4495. if (E->getOpcode() == BO_Comma) {
  4496. EmitIgnoredExpr(E->getLHS());
  4497. EnsureInsertPoint();
  4498. return EmitLValue(E->getRHS());
  4499. }
  4500. if (E->getOpcode() == BO_PtrMemD ||
  4501. E->getOpcode() == BO_PtrMemI)
  4502. return EmitPointerToDataMemberBinaryExpr(E);
  4503. assert(E->getOpcode() == BO_Assign && "unexpected binary l-value");
  4504. // Note that in all of these cases, __block variables need the RHS
  4505. // evaluated first just in case the variable gets moved by the RHS.
  4506. switch (getEvaluationKind(E->getType())) {
  4507. case TEK_Scalar: {
  4508. switch (E->getLHS()->getType().getObjCLifetime()) {
  4509. case Qualifiers::OCL_Strong:
  4510. return EmitARCStoreStrong(E, /*ignored*/ false).first;
  4511. case Qualifiers::OCL_Autoreleasing:
  4512. return EmitARCStoreAutoreleasing(E).first;
  4513. // No reason to do any of these differently.
  4514. case Qualifiers::OCL_None:
  4515. case Qualifiers::OCL_ExplicitNone:
  4516. case Qualifiers::OCL_Weak:
  4517. break;
  4518. }
  4519. RValue RV = EmitAnyExpr(E->getRHS());
  4520. LValue LV = EmitCheckedLValue(E->getLHS(), TCK_Store);
  4521. if (RV.isScalar())
  4522. EmitNullabilityCheck(LV, RV.getScalarVal(), E->getExprLoc());
  4523. EmitStoreThroughLValue(RV, LV);
  4524. if (getLangOpts().OpenMP)
  4525. CGM.getOpenMPRuntime().checkAndEmitLastprivateConditional(*this,
  4526. E->getLHS());
  4527. return LV;
  4528. }
  4529. case TEK_Complex:
  4530. return EmitComplexAssignmentLValue(E);
  4531. case TEK_Aggregate:
  4532. return EmitAggExprToLValue(E);
  4533. }
  4534. llvm_unreachable("bad evaluation kind");
  4535. }
  4536. LValue CodeGenFunction::EmitCallExprLValue(const CallExpr *E) {
  4537. RValue RV = EmitCallExpr(E);
  4538. if (!RV.isScalar())
  4539. return MakeAddrLValue(RV.getAggregateAddress(), E->getType(),
  4540. AlignmentSource::Decl);
  4541. assert(E->getCallReturnType(getContext())->isReferenceType() &&
  4542. "Can't have a scalar return unless the return type is a "
  4543. "reference type!");
  4544. return MakeNaturalAlignPointeeAddrLValue(RV.getScalarVal(), E->getType());
  4545. }
  4546. LValue CodeGenFunction::EmitVAArgExprLValue(const VAArgExpr *E) {
  4547. // FIXME: This shouldn't require another copy.
  4548. return EmitAggExprToLValue(E);
  4549. }
  4550. LValue CodeGenFunction::EmitCXXConstructLValue(const CXXConstructExpr *E) {
  4551. assert(E->getType()->getAsCXXRecordDecl()->hasTrivialDestructor()
  4552. && "binding l-value to type which needs a temporary");
  4553. AggValueSlot Slot = CreateAggTemp(E->getType());
  4554. EmitCXXConstructExpr(E, Slot);
  4555. return MakeAddrLValue(Slot.getAddress(), E->getType(), AlignmentSource::Decl);
  4556. }
  4557. LValue
  4558. CodeGenFunction::EmitCXXTypeidLValue(const CXXTypeidExpr *E) {
  4559. return MakeNaturalAlignAddrLValue(EmitCXXTypeidExpr(E), E->getType());
  4560. }
  4561. Address CodeGenFunction::EmitCXXUuidofExpr(const CXXUuidofExpr *E) {
  4562. return Builder.CreateElementBitCast(CGM.GetAddrOfMSGuidDecl(E->getGuidDecl()),
  4563. ConvertType(E->getType()));
  4564. }
  4565. LValue CodeGenFunction::EmitCXXUuidofLValue(const CXXUuidofExpr *E) {
  4566. return MakeAddrLValue(EmitCXXUuidofExpr(E), E->getType(),
  4567. AlignmentSource::Decl);
  4568. }
  4569. LValue
  4570. CodeGenFunction::EmitCXXBindTemporaryLValue(const CXXBindTemporaryExpr *E) {
  4571. AggValueSlot Slot = CreateAggTemp(E->getType(), "temp.lvalue");
  4572. Slot.setExternallyDestructed();
  4573. EmitAggExpr(E->getSubExpr(), Slot);
  4574. EmitCXXTemporary(E->getTemporary(), E->getType(), Slot.getAddress());
  4575. return MakeAddrLValue(Slot.getAddress(), E->getType(), AlignmentSource::Decl);
  4576. }
  4577. LValue CodeGenFunction::EmitObjCMessageExprLValue(const ObjCMessageExpr *E) {
  4578. RValue RV = EmitObjCMessageExpr(E);
  4579. if (!RV.isScalar())
  4580. return MakeAddrLValue(RV.getAggregateAddress(), E->getType(),
  4581. AlignmentSource::Decl);
  4582. assert(E->getMethodDecl()->getReturnType()->isReferenceType() &&
  4583. "Can't have a scalar return unless the return type is a "
  4584. "reference type!");
  4585. return MakeNaturalAlignPointeeAddrLValue(RV.getScalarVal(), E->getType());
  4586. }
  4587. LValue CodeGenFunction::EmitObjCSelectorLValue(const ObjCSelectorExpr *E) {
  4588. Address V =
  4589. CGM.getObjCRuntime().GetAddrOfSelector(*this, E->getSelector());
  4590. return MakeAddrLValue(V, E->getType(), AlignmentSource::Decl);
  4591. }
  4592. llvm::Value *CodeGenFunction::EmitIvarOffset(const ObjCInterfaceDecl *Interface,
  4593. const ObjCIvarDecl *Ivar) {
  4594. return CGM.getObjCRuntime().EmitIvarOffset(*this, Interface, Ivar);
  4595. }
  4596. llvm::Value *
  4597. CodeGenFunction::EmitIvarOffsetAsPointerDiff(const ObjCInterfaceDecl *Interface,
  4598. const ObjCIvarDecl *Ivar) {
  4599. llvm::Value *OffsetValue = EmitIvarOffset(Interface, Ivar);
  4600. QualType PointerDiffType = getContext().getPointerDiffType();
  4601. return Builder.CreateZExtOrTrunc(OffsetValue,
  4602. getTypes().ConvertType(PointerDiffType));
  4603. }
  4604. LValue CodeGenFunction::EmitLValueForIvar(QualType ObjectTy,
  4605. llvm::Value *BaseValue,
  4606. const ObjCIvarDecl *Ivar,
  4607. unsigned CVRQualifiers) {
  4608. return CGM.getObjCRuntime().EmitObjCValueForIvar(*this, ObjectTy, BaseValue,
  4609. Ivar, CVRQualifiers);
  4610. }
  4611. LValue CodeGenFunction::EmitObjCIvarRefLValue(const ObjCIvarRefExpr *E) {
  4612. // FIXME: A lot of the code below could be shared with EmitMemberExpr.
  4613. llvm::Value *BaseValue = nullptr;
  4614. const Expr *BaseExpr = E->getBase();
  4615. Qualifiers BaseQuals;
  4616. QualType ObjectTy;
  4617. if (E->isArrow()) {
  4618. BaseValue = EmitScalarExpr(BaseExpr);
  4619. ObjectTy = BaseExpr->getType()->getPointeeType();
  4620. BaseQuals = ObjectTy.getQualifiers();
  4621. } else {
  4622. LValue BaseLV = EmitLValue(BaseExpr);
  4623. BaseValue = BaseLV.getPointer(*this);
  4624. ObjectTy = BaseExpr->getType();
  4625. BaseQuals = ObjectTy.getQualifiers();
  4626. }
  4627. LValue LV =
  4628. EmitLValueForIvar(ObjectTy, BaseValue, E->getDecl(),
  4629. BaseQuals.getCVRQualifiers());
  4630. setObjCGCLValueClass(getContext(), E, LV);
  4631. return LV;
  4632. }
  4633. LValue CodeGenFunction::EmitStmtExprLValue(const StmtExpr *E) {
  4634. // Can only get l-value for message expression returning aggregate type
  4635. RValue RV = EmitAnyExprToTemp(E);
  4636. return MakeAddrLValue(RV.getAggregateAddress(), E->getType(),
  4637. AlignmentSource::Decl);
  4638. }
  4639. RValue CodeGenFunction::EmitCall(QualType CalleeType, const CGCallee &OrigCallee,
  4640. const CallExpr *E, ReturnValueSlot ReturnValue,
  4641. llvm::Value *Chain) {
  4642. // Get the actual function type. The callee type will always be a pointer to
  4643. // function type or a block pointer type.
  4644. assert(CalleeType->isFunctionPointerType() &&
  4645. "Call must have function pointer type!");
  4646. const Decl *TargetDecl =
  4647. OrigCallee.getAbstractInfo().getCalleeDecl().getDecl();
  4648. CalleeType = getContext().getCanonicalType(CalleeType);
  4649. auto PointeeType = cast<PointerType>(CalleeType)->getPointeeType();
  4650. CGCallee Callee = OrigCallee;
  4651. if (getLangOpts().CPlusPlus && SanOpts.has(SanitizerKind::Function) &&
  4652. (!TargetDecl || !isa<FunctionDecl>(TargetDecl))) {
  4653. if (llvm::Constant *PrefixSig =
  4654. CGM.getTargetCodeGenInfo().getUBSanFunctionSignature(CGM)) {
  4655. SanitizerScope SanScope(this);
  4656. // Remove any (C++17) exception specifications, to allow calling e.g. a
  4657. // noexcept function through a non-noexcept pointer.
  4658. auto ProtoTy =
  4659. getContext().getFunctionTypeWithExceptionSpec(PointeeType, EST_None);
  4660. llvm::Constant *FTRTTIConst =
  4661. CGM.GetAddrOfRTTIDescriptor(ProtoTy, /*ForEH=*/true);
  4662. llvm::Type *PrefixSigType = PrefixSig->getType();
  4663. llvm::StructType *PrefixStructTy = llvm::StructType::get(
  4664. CGM.getLLVMContext(), {PrefixSigType, Int32Ty}, /*isPacked=*/true);
  4665. llvm::Value *CalleePtr = Callee.getFunctionPointer();
  4666. llvm::Value *CalleePrefixStruct = Builder.CreateBitCast(
  4667. CalleePtr, llvm::PointerType::getUnqual(PrefixStructTy));
  4668. llvm::Value *CalleeSigPtr =
  4669. Builder.CreateConstGEP2_32(PrefixStructTy, CalleePrefixStruct, 0, 0);
  4670. llvm::Value *CalleeSig =
  4671. Builder.CreateAlignedLoad(PrefixSigType, CalleeSigPtr, getIntAlign());
  4672. llvm::Value *CalleeSigMatch = Builder.CreateICmpEQ(CalleeSig, PrefixSig);
  4673. llvm::BasicBlock *Cont = createBasicBlock("cont");
  4674. llvm::BasicBlock *TypeCheck = createBasicBlock("typecheck");
  4675. Builder.CreateCondBr(CalleeSigMatch, TypeCheck, Cont);
  4676. EmitBlock(TypeCheck);
  4677. llvm::Value *CalleeRTTIPtr =
  4678. Builder.CreateConstGEP2_32(PrefixStructTy, CalleePrefixStruct, 0, 1);
  4679. llvm::Value *CalleeRTTIEncoded =
  4680. Builder.CreateAlignedLoad(Int32Ty, CalleeRTTIPtr, getPointerAlign());
  4681. llvm::Value *CalleeRTTI =
  4682. DecodeAddrUsedInPrologue(CalleePtr, CalleeRTTIEncoded);
  4683. llvm::Value *CalleeRTTIMatch =
  4684. Builder.CreateICmpEQ(CalleeRTTI, FTRTTIConst);
  4685. llvm::Constant *StaticData[] = {EmitCheckSourceLocation(E->getBeginLoc()),
  4686. EmitCheckTypeDescriptor(CalleeType)};
  4687. EmitCheck(std::make_pair(CalleeRTTIMatch, SanitizerKind::Function),
  4688. SanitizerHandler::FunctionTypeMismatch, StaticData,
  4689. {CalleePtr, CalleeRTTI, FTRTTIConst});
  4690. Builder.CreateBr(Cont);
  4691. EmitBlock(Cont);
  4692. }
  4693. }
  4694. const auto *FnType = cast<FunctionType>(PointeeType);
  4695. // If we are checking indirect calls and this call is indirect, check that the
  4696. // function pointer is a member of the bit set for the function type.
  4697. if (SanOpts.has(SanitizerKind::CFIICall) &&
  4698. (!TargetDecl || !isa<FunctionDecl>(TargetDecl))) {
  4699. SanitizerScope SanScope(this);
  4700. EmitSanitizerStatReport(llvm::SanStat_CFI_ICall);
  4701. llvm::Metadata *MD;
  4702. if (CGM.getCodeGenOpts().SanitizeCfiICallGeneralizePointers)
  4703. MD = CGM.CreateMetadataIdentifierGeneralized(QualType(FnType, 0));
  4704. else
  4705. MD = CGM.CreateMetadataIdentifierForType(QualType(FnType, 0));
  4706. llvm::Value *TypeId = llvm::MetadataAsValue::get(getLLVMContext(), MD);
  4707. llvm::Value *CalleePtr = Callee.getFunctionPointer();
  4708. llvm::Value *CastedCallee = Builder.CreateBitCast(CalleePtr, Int8PtrTy);
  4709. llvm::Value *TypeTest = Builder.CreateCall(
  4710. CGM.getIntrinsic(llvm::Intrinsic::type_test), {CastedCallee, TypeId});
  4711. auto CrossDsoTypeId = CGM.CreateCrossDsoCfiTypeId(MD);
  4712. llvm::Constant *StaticData[] = {
  4713. llvm::ConstantInt::get(Int8Ty, CFITCK_ICall),
  4714. EmitCheckSourceLocation(E->getBeginLoc()),
  4715. EmitCheckTypeDescriptor(QualType(FnType, 0)),
  4716. };
  4717. if (CGM.getCodeGenOpts().SanitizeCfiCrossDso && CrossDsoTypeId) {
  4718. EmitCfiSlowPathCheck(SanitizerKind::CFIICall, TypeTest, CrossDsoTypeId,
  4719. CastedCallee, StaticData);
  4720. } else {
  4721. EmitCheck(std::make_pair(TypeTest, SanitizerKind::CFIICall),
  4722. SanitizerHandler::CFICheckFail, StaticData,
  4723. {CastedCallee, llvm::UndefValue::get(IntPtrTy)});
  4724. }
  4725. }
  4726. CallArgList Args;
  4727. if (Chain)
  4728. Args.add(RValue::get(Builder.CreateBitCast(Chain, CGM.VoidPtrTy)),
  4729. CGM.getContext().VoidPtrTy);
  4730. // C++17 requires that we evaluate arguments to a call using assignment syntax
  4731. // right-to-left, and that we evaluate arguments to certain other operators
  4732. // left-to-right. Note that we allow this to override the order dictated by
  4733. // the calling convention on the MS ABI, which means that parameter
  4734. // destruction order is not necessarily reverse construction order.
  4735. // FIXME: Revisit this based on C++ committee response to unimplementability.
  4736. EvaluationOrder Order = EvaluationOrder::Default;
  4737. if (auto *OCE = dyn_cast<CXXOperatorCallExpr>(E)) {
  4738. if (OCE->isAssignmentOp())
  4739. Order = EvaluationOrder::ForceRightToLeft;
  4740. else {
  4741. switch (OCE->getOperator()) {
  4742. case OO_LessLess:
  4743. case OO_GreaterGreater:
  4744. case OO_AmpAmp:
  4745. case OO_PipePipe:
  4746. case OO_Comma:
  4747. case OO_ArrowStar:
  4748. Order = EvaluationOrder::ForceLeftToRight;
  4749. break;
  4750. default:
  4751. break;
  4752. }
  4753. }
  4754. }
  4755. EmitCallArgs(Args, dyn_cast<FunctionProtoType>(FnType), E->arguments(),
  4756. E->getDirectCallee(), /*ParamsToSkip*/ 0, Order);
  4757. const CGFunctionInfo &FnInfo = CGM.getTypes().arrangeFreeFunctionCall(
  4758. Args, FnType, /*ChainCall=*/Chain);
  4759. // C99 6.5.2.2p6:
  4760. // If the expression that denotes the called function has a type
  4761. // that does not include a prototype, [the default argument
  4762. // promotions are performed]. If the number of arguments does not
  4763. // equal the number of parameters, the behavior is undefined. If
  4764. // the function is defined with a type that includes a prototype,
  4765. // and either the prototype ends with an ellipsis (, ...) or the
  4766. // types of the arguments after promotion are not compatible with
  4767. // the types of the parameters, the behavior is undefined. If the
  4768. // function is defined with a type that does not include a
  4769. // prototype, and the types of the arguments after promotion are
  4770. // not compatible with those of the parameters after promotion,
  4771. // the behavior is undefined [except in some trivial cases].
  4772. // That is, in the general case, we should assume that a call
  4773. // through an unprototyped function type works like a *non-variadic*
  4774. // call. The way we make this work is to cast to the exact type
  4775. // of the promoted arguments.
  4776. //
  4777. // Chain calls use this same code path to add the invisible chain parameter
  4778. // to the function type.
  4779. if (isa<FunctionNoProtoType>(FnType) || Chain) {
  4780. llvm::Type *CalleeTy = getTypes().GetFunctionType(FnInfo);
  4781. int AS = Callee.getFunctionPointer()->getType()->getPointerAddressSpace();
  4782. CalleeTy = CalleeTy->getPointerTo(AS);
  4783. llvm::Value *CalleePtr = Callee.getFunctionPointer();
  4784. CalleePtr = Builder.CreateBitCast(CalleePtr, CalleeTy, "callee.knr.cast");
  4785. Callee.setFunctionPointer(CalleePtr);
  4786. }
  4787. // HIP function pointer contains kernel handle when it is used in triple
  4788. // chevron. The kernel stub needs to be loaded from kernel handle and used
  4789. // as callee.
  4790. if (CGM.getLangOpts().HIP && !CGM.getLangOpts().CUDAIsDevice &&
  4791. isa<CUDAKernelCallExpr>(E) &&
  4792. (!TargetDecl || !isa<FunctionDecl>(TargetDecl))) {
  4793. llvm::Value *Handle = Callee.getFunctionPointer();
  4794. auto *Cast =
  4795. Builder.CreateBitCast(Handle, Handle->getType()->getPointerTo());
  4796. auto *Stub = Builder.CreateLoad(
  4797. Address(Cast, Handle->getType(), CGM.getPointerAlign()));
  4798. Callee.setFunctionPointer(Stub);
  4799. }
  4800. llvm::CallBase *CallOrInvoke = nullptr;
  4801. RValue Call = EmitCall(FnInfo, Callee, ReturnValue, Args, &CallOrInvoke,
  4802. E == MustTailCall, E->getExprLoc());
  4803. // Generate function declaration DISuprogram in order to be used
  4804. // in debug info about call sites.
  4805. if (CGDebugInfo *DI = getDebugInfo()) {
  4806. if (auto *CalleeDecl = dyn_cast_or_null<FunctionDecl>(TargetDecl)) {
  4807. FunctionArgList Args;
  4808. QualType ResTy = BuildFunctionArgList(CalleeDecl, Args);
  4809. DI->EmitFuncDeclForCallSite(CallOrInvoke,
  4810. DI->getFunctionType(CalleeDecl, ResTy, Args),
  4811. CalleeDecl);
  4812. }
  4813. }
  4814. return Call;
  4815. }
  4816. LValue CodeGenFunction::
  4817. EmitPointerToDataMemberBinaryExpr(const BinaryOperator *E) {
  4818. Address BaseAddr = Address::invalid();
  4819. if (E->getOpcode() == BO_PtrMemI) {
  4820. BaseAddr = EmitPointerWithAlignment(E->getLHS());
  4821. } else {
  4822. BaseAddr = EmitLValue(E->getLHS()).getAddress(*this);
  4823. }
  4824. llvm::Value *OffsetV = EmitScalarExpr(E->getRHS());
  4825. const auto *MPT = E->getRHS()->getType()->castAs<MemberPointerType>();
  4826. LValueBaseInfo BaseInfo;
  4827. TBAAAccessInfo TBAAInfo;
  4828. Address MemberAddr =
  4829. EmitCXXMemberDataPointerAddress(E, BaseAddr, OffsetV, MPT, &BaseInfo,
  4830. &TBAAInfo);
  4831. return MakeAddrLValue(MemberAddr, MPT->getPointeeType(), BaseInfo, TBAAInfo);
  4832. }
  4833. /// Given the address of a temporary variable, produce an r-value of
  4834. /// its type.
  4835. RValue CodeGenFunction::convertTempToRValue(Address addr,
  4836. QualType type,
  4837. SourceLocation loc) {
  4838. LValue lvalue = MakeAddrLValue(addr, type, AlignmentSource::Decl);
  4839. switch (getEvaluationKind(type)) {
  4840. case TEK_Complex:
  4841. return RValue::getComplex(EmitLoadOfComplex(lvalue, loc));
  4842. case TEK_Aggregate:
  4843. return lvalue.asAggregateRValue(*this);
  4844. case TEK_Scalar:
  4845. return RValue::get(EmitLoadOfScalar(lvalue, loc));
  4846. }
  4847. llvm_unreachable("bad evaluation kind");
  4848. }
  4849. void CodeGenFunction::SetFPAccuracy(llvm::Value *Val, float Accuracy) {
  4850. assert(Val->getType()->isFPOrFPVectorTy());
  4851. if (Accuracy == 0.0 || !isa<llvm::Instruction>(Val))
  4852. return;
  4853. llvm::MDBuilder MDHelper(getLLVMContext());
  4854. llvm::MDNode *Node = MDHelper.createFPMath(Accuracy);
  4855. cast<llvm::Instruction>(Val)->setMetadata(llvm::LLVMContext::MD_fpmath, Node);
  4856. }
  4857. namespace {
  4858. struct LValueOrRValue {
  4859. LValue LV;
  4860. RValue RV;
  4861. };
  4862. }
  4863. static LValueOrRValue emitPseudoObjectExpr(CodeGenFunction &CGF,
  4864. const PseudoObjectExpr *E,
  4865. bool forLValue,
  4866. AggValueSlot slot) {
  4867. SmallVector<CodeGenFunction::OpaqueValueMappingData, 4> opaques;
  4868. // Find the result expression, if any.
  4869. const Expr *resultExpr = E->getResultExpr();
  4870. LValueOrRValue result;
  4871. for (PseudoObjectExpr::const_semantics_iterator
  4872. i = E->semantics_begin(), e = E->semantics_end(); i != e; ++i) {
  4873. const Expr *semantic = *i;
  4874. // If this semantic expression is an opaque value, bind it
  4875. // to the result of its source expression.
  4876. if (const auto *ov = dyn_cast<OpaqueValueExpr>(semantic)) {
  4877. // Skip unique OVEs.
  4878. if (ov->isUnique()) {
  4879. assert(ov != resultExpr &&
  4880. "A unique OVE cannot be used as the result expression");
  4881. continue;
  4882. }
  4883. // If this is the result expression, we may need to evaluate
  4884. // directly into the slot.
  4885. typedef CodeGenFunction::OpaqueValueMappingData OVMA;
  4886. OVMA opaqueData;
  4887. if (ov == resultExpr && ov->isPRValue() && !forLValue &&
  4888. CodeGenFunction::hasAggregateEvaluationKind(ov->getType())) {
  4889. CGF.EmitAggExpr(ov->getSourceExpr(), slot);
  4890. LValue LV = CGF.MakeAddrLValue(slot.getAddress(), ov->getType(),
  4891. AlignmentSource::Decl);
  4892. opaqueData = OVMA::bind(CGF, ov, LV);
  4893. result.RV = slot.asRValue();
  4894. // Otherwise, emit as normal.
  4895. } else {
  4896. opaqueData = OVMA::bind(CGF, ov, ov->getSourceExpr());
  4897. // If this is the result, also evaluate the result now.
  4898. if (ov == resultExpr) {
  4899. if (forLValue)
  4900. result.LV = CGF.EmitLValue(ov);
  4901. else
  4902. result.RV = CGF.EmitAnyExpr(ov, slot);
  4903. }
  4904. }
  4905. opaques.push_back(opaqueData);
  4906. // Otherwise, if the expression is the result, evaluate it
  4907. // and remember the result.
  4908. } else if (semantic == resultExpr) {
  4909. if (forLValue)
  4910. result.LV = CGF.EmitLValue(semantic);
  4911. else
  4912. result.RV = CGF.EmitAnyExpr(semantic, slot);
  4913. // Otherwise, evaluate the expression in an ignored context.
  4914. } else {
  4915. CGF.EmitIgnoredExpr(semantic);
  4916. }
  4917. }
  4918. // Unbind all the opaques now.
  4919. for (unsigned i = 0, e = opaques.size(); i != e; ++i)
  4920. opaques[i].unbind(CGF);
  4921. return result;
  4922. }
  4923. RValue CodeGenFunction::EmitPseudoObjectRValue(const PseudoObjectExpr *E,
  4924. AggValueSlot slot) {
  4925. return emitPseudoObjectExpr(*this, E, false, slot).RV;
  4926. }
  4927. LValue CodeGenFunction::EmitPseudoObjectLValue(const PseudoObjectExpr *E) {
  4928. return emitPseudoObjectExpr(*this, E, true, AggValueSlot::ignored()).LV;
  4929. }