ItaniumCXXABI.cpp 190 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869
  1. //===------- ItaniumCXXABI.cpp - Emit LLVM Code from ASTs for a Module ----===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This provides C++ code generation targeting the Itanium C++ ABI. The class
  10. // in this file generates structures that follow the Itanium C++ ABI, which is
  11. // documented at:
  12. // https://itanium-cxx-abi.github.io/cxx-abi/abi.html
  13. // https://itanium-cxx-abi.github.io/cxx-abi/abi-eh.html
  14. //
  15. // It also supports the closely-related ARM ABI, documented at:
  16. // https://developer.arm.com/documentation/ihi0041/g/
  17. //
  18. //===----------------------------------------------------------------------===//
  19. #include "CGCXXABI.h"
  20. #include "CGCleanup.h"
  21. #include "CGRecordLayout.h"
  22. #include "CGVTables.h"
  23. #include "CodeGenFunction.h"
  24. #include "CodeGenModule.h"
  25. #include "TargetInfo.h"
  26. #include "clang/AST/Attr.h"
  27. #include "clang/AST/Mangle.h"
  28. #include "clang/AST/StmtCXX.h"
  29. #include "clang/AST/Type.h"
  30. #include "clang/CodeGen/ConstantInitBuilder.h"
  31. #include "llvm/IR/DataLayout.h"
  32. #include "llvm/IR/GlobalValue.h"
  33. #include "llvm/IR/Instructions.h"
  34. #include "llvm/IR/Intrinsics.h"
  35. #include "llvm/IR/Value.h"
  36. #include "llvm/Support/ScopedPrinter.h"
  37. using namespace clang;
  38. using namespace CodeGen;
  39. namespace {
  40. class ItaniumCXXABI : public CodeGen::CGCXXABI {
  41. /// VTables - All the vtables which have been defined.
  42. llvm::DenseMap<const CXXRecordDecl *, llvm::GlobalVariable *> VTables;
  43. /// All the thread wrapper functions that have been used.
  44. llvm::SmallVector<std::pair<const VarDecl *, llvm::Function *>, 8>
  45. ThreadWrappers;
  46. protected:
  47. bool UseARMMethodPtrABI;
  48. bool UseARMGuardVarABI;
  49. bool Use32BitVTableOffsetABI;
  50. ItaniumMangleContext &getMangleContext() {
  51. return cast<ItaniumMangleContext>(CodeGen::CGCXXABI::getMangleContext());
  52. }
  53. public:
  54. ItaniumCXXABI(CodeGen::CodeGenModule &CGM,
  55. bool UseARMMethodPtrABI = false,
  56. bool UseARMGuardVarABI = false) :
  57. CGCXXABI(CGM), UseARMMethodPtrABI(UseARMMethodPtrABI),
  58. UseARMGuardVarABI(UseARMGuardVarABI),
  59. Use32BitVTableOffsetABI(false) { }
  60. bool classifyReturnType(CGFunctionInfo &FI) const override;
  61. RecordArgABI getRecordArgABI(const CXXRecordDecl *RD) const override {
  62. // If C++ prohibits us from making a copy, pass by address.
  63. if (!RD->canPassInRegisters())
  64. return RAA_Indirect;
  65. return RAA_Default;
  66. }
  67. bool isThisCompleteObject(GlobalDecl GD) const override {
  68. // The Itanium ABI has separate complete-object vs. base-object
  69. // variants of both constructors and destructors.
  70. if (isa<CXXDestructorDecl>(GD.getDecl())) {
  71. switch (GD.getDtorType()) {
  72. case Dtor_Complete:
  73. case Dtor_Deleting:
  74. return true;
  75. case Dtor_Base:
  76. return false;
  77. case Dtor_Comdat:
  78. llvm_unreachable("emitting dtor comdat as function?");
  79. }
  80. llvm_unreachable("bad dtor kind");
  81. }
  82. if (isa<CXXConstructorDecl>(GD.getDecl())) {
  83. switch (GD.getCtorType()) {
  84. case Ctor_Complete:
  85. return true;
  86. case Ctor_Base:
  87. return false;
  88. case Ctor_CopyingClosure:
  89. case Ctor_DefaultClosure:
  90. llvm_unreachable("closure ctors in Itanium ABI?");
  91. case Ctor_Comdat:
  92. llvm_unreachable("emitting ctor comdat as function?");
  93. }
  94. llvm_unreachable("bad dtor kind");
  95. }
  96. // No other kinds.
  97. return false;
  98. }
  99. bool isZeroInitializable(const MemberPointerType *MPT) override;
  100. llvm::Type *ConvertMemberPointerType(const MemberPointerType *MPT) override;
  101. CGCallee
  102. EmitLoadOfMemberFunctionPointer(CodeGenFunction &CGF,
  103. const Expr *E,
  104. Address This,
  105. llvm::Value *&ThisPtrForCall,
  106. llvm::Value *MemFnPtr,
  107. const MemberPointerType *MPT) override;
  108. llvm::Value *
  109. EmitMemberDataPointerAddress(CodeGenFunction &CGF, const Expr *E,
  110. Address Base,
  111. llvm::Value *MemPtr,
  112. const MemberPointerType *MPT) override;
  113. llvm::Value *EmitMemberPointerConversion(CodeGenFunction &CGF,
  114. const CastExpr *E,
  115. llvm::Value *Src) override;
  116. llvm::Constant *EmitMemberPointerConversion(const CastExpr *E,
  117. llvm::Constant *Src) override;
  118. llvm::Constant *EmitNullMemberPointer(const MemberPointerType *MPT) override;
  119. llvm::Constant *EmitMemberFunctionPointer(const CXXMethodDecl *MD) override;
  120. llvm::Constant *EmitMemberDataPointer(const MemberPointerType *MPT,
  121. CharUnits offset) override;
  122. llvm::Constant *EmitMemberPointer(const APValue &MP, QualType MPT) override;
  123. llvm::Constant *BuildMemberPointer(const CXXMethodDecl *MD,
  124. CharUnits ThisAdjustment);
  125. llvm::Value *EmitMemberPointerComparison(CodeGenFunction &CGF,
  126. llvm::Value *L, llvm::Value *R,
  127. const MemberPointerType *MPT,
  128. bool Inequality) override;
  129. llvm::Value *EmitMemberPointerIsNotNull(CodeGenFunction &CGF,
  130. llvm::Value *Addr,
  131. const MemberPointerType *MPT) override;
  132. void emitVirtualObjectDelete(CodeGenFunction &CGF, const CXXDeleteExpr *DE,
  133. Address Ptr, QualType ElementType,
  134. const CXXDestructorDecl *Dtor) override;
  135. void emitRethrow(CodeGenFunction &CGF, bool isNoReturn) override;
  136. void emitThrow(CodeGenFunction &CGF, const CXXThrowExpr *E) override;
  137. void emitBeginCatch(CodeGenFunction &CGF, const CXXCatchStmt *C) override;
  138. llvm::CallInst *
  139. emitTerminateForUnexpectedException(CodeGenFunction &CGF,
  140. llvm::Value *Exn) override;
  141. void EmitFundamentalRTTIDescriptors(const CXXRecordDecl *RD);
  142. llvm::Constant *getAddrOfRTTIDescriptor(QualType Ty) override;
  143. CatchTypeInfo
  144. getAddrOfCXXCatchHandlerType(QualType Ty,
  145. QualType CatchHandlerType) override {
  146. return CatchTypeInfo{getAddrOfRTTIDescriptor(Ty), 0};
  147. }
  148. bool shouldTypeidBeNullChecked(bool IsDeref, QualType SrcRecordTy) override;
  149. void EmitBadTypeidCall(CodeGenFunction &CGF) override;
  150. llvm::Value *EmitTypeid(CodeGenFunction &CGF, QualType SrcRecordTy,
  151. Address ThisPtr,
  152. llvm::Type *StdTypeInfoPtrTy) override;
  153. bool shouldDynamicCastCallBeNullChecked(bool SrcIsPtr,
  154. QualType SrcRecordTy) override;
  155. llvm::Value *EmitDynamicCastCall(CodeGenFunction &CGF, Address Value,
  156. QualType SrcRecordTy, QualType DestTy,
  157. QualType DestRecordTy,
  158. llvm::BasicBlock *CastEnd) override;
  159. llvm::Value *EmitDynamicCastToVoid(CodeGenFunction &CGF, Address Value,
  160. QualType SrcRecordTy,
  161. QualType DestTy) override;
  162. bool EmitBadCastCall(CodeGenFunction &CGF) override;
  163. llvm::Value *
  164. GetVirtualBaseClassOffset(CodeGenFunction &CGF, Address This,
  165. const CXXRecordDecl *ClassDecl,
  166. const CXXRecordDecl *BaseClassDecl) override;
  167. void EmitCXXConstructors(const CXXConstructorDecl *D) override;
  168. AddedStructorArgCounts
  169. buildStructorSignature(GlobalDecl GD,
  170. SmallVectorImpl<CanQualType> &ArgTys) override;
  171. bool useThunkForDtorVariant(const CXXDestructorDecl *Dtor,
  172. CXXDtorType DT) const override {
  173. // Itanium does not emit any destructor variant as an inline thunk.
  174. // Delegating may occur as an optimization, but all variants are either
  175. // emitted with external linkage or as linkonce if they are inline and used.
  176. return false;
  177. }
  178. void EmitCXXDestructors(const CXXDestructorDecl *D) override;
  179. void addImplicitStructorParams(CodeGenFunction &CGF, QualType &ResTy,
  180. FunctionArgList &Params) override;
  181. void EmitInstanceFunctionProlog(CodeGenFunction &CGF) override;
  182. AddedStructorArgs getImplicitConstructorArgs(CodeGenFunction &CGF,
  183. const CXXConstructorDecl *D,
  184. CXXCtorType Type,
  185. bool ForVirtualBase,
  186. bool Delegating) override;
  187. llvm::Value *getCXXDestructorImplicitParam(CodeGenFunction &CGF,
  188. const CXXDestructorDecl *DD,
  189. CXXDtorType Type,
  190. bool ForVirtualBase,
  191. bool Delegating) override;
  192. void EmitDestructorCall(CodeGenFunction &CGF, const CXXDestructorDecl *DD,
  193. CXXDtorType Type, bool ForVirtualBase,
  194. bool Delegating, Address This,
  195. QualType ThisTy) override;
  196. void emitVTableDefinitions(CodeGenVTables &CGVT,
  197. const CXXRecordDecl *RD) override;
  198. bool isVirtualOffsetNeededForVTableField(CodeGenFunction &CGF,
  199. CodeGenFunction::VPtr Vptr) override;
  200. bool doStructorsInitializeVPtrs(const CXXRecordDecl *VTableClass) override {
  201. return true;
  202. }
  203. llvm::Constant *
  204. getVTableAddressPoint(BaseSubobject Base,
  205. const CXXRecordDecl *VTableClass) override;
  206. llvm::Value *getVTableAddressPointInStructor(
  207. CodeGenFunction &CGF, const CXXRecordDecl *VTableClass,
  208. BaseSubobject Base, const CXXRecordDecl *NearestVBase) override;
  209. llvm::Value *getVTableAddressPointInStructorWithVTT(
  210. CodeGenFunction &CGF, const CXXRecordDecl *VTableClass,
  211. BaseSubobject Base, const CXXRecordDecl *NearestVBase);
  212. llvm::Constant *
  213. getVTableAddressPointForConstExpr(BaseSubobject Base,
  214. const CXXRecordDecl *VTableClass) override;
  215. llvm::GlobalVariable *getAddrOfVTable(const CXXRecordDecl *RD,
  216. CharUnits VPtrOffset) override;
  217. CGCallee getVirtualFunctionPointer(CodeGenFunction &CGF, GlobalDecl GD,
  218. Address This, llvm::Type *Ty,
  219. SourceLocation Loc) override;
  220. llvm::Value *EmitVirtualDestructorCall(CodeGenFunction &CGF,
  221. const CXXDestructorDecl *Dtor,
  222. CXXDtorType DtorType, Address This,
  223. DeleteOrMemberCallExpr E) override;
  224. void emitVirtualInheritanceTables(const CXXRecordDecl *RD) override;
  225. bool canSpeculativelyEmitVTable(const CXXRecordDecl *RD) const override;
  226. bool canSpeculativelyEmitVTableAsBaseClass(const CXXRecordDecl *RD) const;
  227. void setThunkLinkage(llvm::Function *Thunk, bool ForVTable, GlobalDecl GD,
  228. bool ReturnAdjustment) override {
  229. // Allow inlining of thunks by emitting them with available_externally
  230. // linkage together with vtables when needed.
  231. if (ForVTable && !Thunk->hasLocalLinkage())
  232. Thunk->setLinkage(llvm::GlobalValue::AvailableExternallyLinkage);
  233. CGM.setGVProperties(Thunk, GD);
  234. }
  235. bool exportThunk() override { return true; }
  236. llvm::Value *performThisAdjustment(CodeGenFunction &CGF, Address This,
  237. const ThisAdjustment &TA) override;
  238. llvm::Value *performReturnAdjustment(CodeGenFunction &CGF, Address Ret,
  239. const ReturnAdjustment &RA) override;
  240. size_t getSrcArgforCopyCtor(const CXXConstructorDecl *,
  241. FunctionArgList &Args) const override {
  242. assert(!Args.empty() && "expected the arglist to not be empty!");
  243. return Args.size() - 1;
  244. }
  245. StringRef GetPureVirtualCallName() override { return "__cxa_pure_virtual"; }
  246. StringRef GetDeletedVirtualCallName() override
  247. { return "__cxa_deleted_virtual"; }
  248. CharUnits getArrayCookieSizeImpl(QualType elementType) override;
  249. Address InitializeArrayCookie(CodeGenFunction &CGF,
  250. Address NewPtr,
  251. llvm::Value *NumElements,
  252. const CXXNewExpr *expr,
  253. QualType ElementType) override;
  254. llvm::Value *readArrayCookieImpl(CodeGenFunction &CGF,
  255. Address allocPtr,
  256. CharUnits cookieSize) override;
  257. void EmitGuardedInit(CodeGenFunction &CGF, const VarDecl &D,
  258. llvm::GlobalVariable *DeclPtr,
  259. bool PerformInit) override;
  260. void registerGlobalDtor(CodeGenFunction &CGF, const VarDecl &D,
  261. llvm::FunctionCallee dtor,
  262. llvm::Constant *addr) override;
  263. llvm::Function *getOrCreateThreadLocalWrapper(const VarDecl *VD,
  264. llvm::Value *Val);
  265. void EmitThreadLocalInitFuncs(
  266. CodeGenModule &CGM,
  267. ArrayRef<const VarDecl *> CXXThreadLocals,
  268. ArrayRef<llvm::Function *> CXXThreadLocalInits,
  269. ArrayRef<const VarDecl *> CXXThreadLocalInitVars) override;
  270. bool usesThreadWrapperFunction(const VarDecl *VD) const override {
  271. return !isEmittedWithConstantInitializer(VD) ||
  272. mayNeedDestruction(VD);
  273. }
  274. LValue EmitThreadLocalVarDeclLValue(CodeGenFunction &CGF, const VarDecl *VD,
  275. QualType LValType) override;
  276. bool NeedsVTTParameter(GlobalDecl GD) override;
  277. /**************************** RTTI Uniqueness ******************************/
  278. protected:
  279. /// Returns true if the ABI requires RTTI type_info objects to be unique
  280. /// across a program.
  281. virtual bool shouldRTTIBeUnique() const { return true; }
  282. public:
  283. /// What sort of unique-RTTI behavior should we use?
  284. enum RTTIUniquenessKind {
  285. /// We are guaranteeing, or need to guarantee, that the RTTI string
  286. /// is unique.
  287. RUK_Unique,
  288. /// We are not guaranteeing uniqueness for the RTTI string, so we
  289. /// can demote to hidden visibility but must use string comparisons.
  290. RUK_NonUniqueHidden,
  291. /// We are not guaranteeing uniqueness for the RTTI string, so we
  292. /// have to use string comparisons, but we also have to emit it with
  293. /// non-hidden visibility.
  294. RUK_NonUniqueVisible
  295. };
  296. /// Return the required visibility status for the given type and linkage in
  297. /// the current ABI.
  298. RTTIUniquenessKind
  299. classifyRTTIUniqueness(QualType CanTy,
  300. llvm::GlobalValue::LinkageTypes Linkage) const;
  301. friend class ItaniumRTTIBuilder;
  302. void emitCXXStructor(GlobalDecl GD) override;
  303. std::pair<llvm::Value *, const CXXRecordDecl *>
  304. LoadVTablePtr(CodeGenFunction &CGF, Address This,
  305. const CXXRecordDecl *RD) override;
  306. private:
  307. bool hasAnyUnusedVirtualInlineFunction(const CXXRecordDecl *RD) const {
  308. const auto &VtableLayout =
  309. CGM.getItaniumVTableContext().getVTableLayout(RD);
  310. for (const auto &VtableComponent : VtableLayout.vtable_components()) {
  311. // Skip empty slot.
  312. if (!VtableComponent.isUsedFunctionPointerKind())
  313. continue;
  314. const CXXMethodDecl *Method = VtableComponent.getFunctionDecl();
  315. if (!Method->getCanonicalDecl()->isInlined())
  316. continue;
  317. StringRef Name = CGM.getMangledName(VtableComponent.getGlobalDecl());
  318. auto *Entry = CGM.GetGlobalValue(Name);
  319. // This checks if virtual inline function has already been emitted.
  320. // Note that it is possible that this inline function would be emitted
  321. // after trying to emit vtable speculatively. Because of this we do
  322. // an extra pass after emitting all deferred vtables to find and emit
  323. // these vtables opportunistically.
  324. if (!Entry || Entry->isDeclaration())
  325. return true;
  326. }
  327. return false;
  328. }
  329. bool isVTableHidden(const CXXRecordDecl *RD) const {
  330. const auto &VtableLayout =
  331. CGM.getItaniumVTableContext().getVTableLayout(RD);
  332. for (const auto &VtableComponent : VtableLayout.vtable_components()) {
  333. if (VtableComponent.isRTTIKind()) {
  334. const CXXRecordDecl *RTTIDecl = VtableComponent.getRTTIDecl();
  335. if (RTTIDecl->getVisibility() == Visibility::HiddenVisibility)
  336. return true;
  337. } else if (VtableComponent.isUsedFunctionPointerKind()) {
  338. const CXXMethodDecl *Method = VtableComponent.getFunctionDecl();
  339. if (Method->getVisibility() == Visibility::HiddenVisibility &&
  340. !Method->isDefined())
  341. return true;
  342. }
  343. }
  344. return false;
  345. }
  346. };
  347. class ARMCXXABI : public ItaniumCXXABI {
  348. public:
  349. ARMCXXABI(CodeGen::CodeGenModule &CGM) :
  350. ItaniumCXXABI(CGM, /*UseARMMethodPtrABI=*/true,
  351. /*UseARMGuardVarABI=*/true) {}
  352. bool constructorsAndDestructorsReturnThis() const override { return true; }
  353. void EmitReturnFromThunk(CodeGenFunction &CGF, RValue RV,
  354. QualType ResTy) override;
  355. CharUnits getArrayCookieSizeImpl(QualType elementType) override;
  356. Address InitializeArrayCookie(CodeGenFunction &CGF,
  357. Address NewPtr,
  358. llvm::Value *NumElements,
  359. const CXXNewExpr *expr,
  360. QualType ElementType) override;
  361. llvm::Value *readArrayCookieImpl(CodeGenFunction &CGF, Address allocPtr,
  362. CharUnits cookieSize) override;
  363. };
  364. class AppleARM64CXXABI : public ARMCXXABI {
  365. public:
  366. AppleARM64CXXABI(CodeGen::CodeGenModule &CGM) : ARMCXXABI(CGM) {
  367. Use32BitVTableOffsetABI = true;
  368. }
  369. // ARM64 libraries are prepared for non-unique RTTI.
  370. bool shouldRTTIBeUnique() const override { return false; }
  371. };
  372. class FuchsiaCXXABI final : public ItaniumCXXABI {
  373. public:
  374. explicit FuchsiaCXXABI(CodeGen::CodeGenModule &CGM)
  375. : ItaniumCXXABI(CGM) {}
  376. private:
  377. bool constructorsAndDestructorsReturnThis() const override { return true; }
  378. };
  379. class WebAssemblyCXXABI final : public ItaniumCXXABI {
  380. public:
  381. explicit WebAssemblyCXXABI(CodeGen::CodeGenModule &CGM)
  382. : ItaniumCXXABI(CGM, /*UseARMMethodPtrABI=*/true,
  383. /*UseARMGuardVarABI=*/true) {}
  384. void emitBeginCatch(CodeGenFunction &CGF, const CXXCatchStmt *C) override;
  385. llvm::CallInst *
  386. emitTerminateForUnexpectedException(CodeGenFunction &CGF,
  387. llvm::Value *Exn) override;
  388. private:
  389. bool constructorsAndDestructorsReturnThis() const override { return true; }
  390. bool canCallMismatchedFunctionType() const override { return false; }
  391. };
  392. class XLCXXABI final : public ItaniumCXXABI {
  393. public:
  394. explicit XLCXXABI(CodeGen::CodeGenModule &CGM)
  395. : ItaniumCXXABI(CGM) {}
  396. void registerGlobalDtor(CodeGenFunction &CGF, const VarDecl &D,
  397. llvm::FunctionCallee dtor,
  398. llvm::Constant *addr) override;
  399. bool useSinitAndSterm() const override { return true; }
  400. private:
  401. void emitCXXStermFinalizer(const VarDecl &D, llvm::Function *dtorStub,
  402. llvm::Constant *addr);
  403. };
  404. }
  405. CodeGen::CGCXXABI *CodeGen::CreateItaniumCXXABI(CodeGenModule &CGM) {
  406. switch (CGM.getContext().getCXXABIKind()) {
  407. // For IR-generation purposes, there's no significant difference
  408. // between the ARM and iOS ABIs.
  409. case TargetCXXABI::GenericARM:
  410. case TargetCXXABI::iOS:
  411. case TargetCXXABI::WatchOS:
  412. return new ARMCXXABI(CGM);
  413. case TargetCXXABI::AppleARM64:
  414. return new AppleARM64CXXABI(CGM);
  415. case TargetCXXABI::Fuchsia:
  416. return new FuchsiaCXXABI(CGM);
  417. // Note that AArch64 uses the generic ItaniumCXXABI class since it doesn't
  418. // include the other 32-bit ARM oddities: constructor/destructor return values
  419. // and array cookies.
  420. case TargetCXXABI::GenericAArch64:
  421. return new ItaniumCXXABI(CGM, /*UseARMMethodPtrABI=*/true,
  422. /*UseARMGuardVarABI=*/true);
  423. case TargetCXXABI::GenericMIPS:
  424. return new ItaniumCXXABI(CGM, /*UseARMMethodPtrABI=*/true);
  425. case TargetCXXABI::WebAssembly:
  426. return new WebAssemblyCXXABI(CGM);
  427. case TargetCXXABI::XL:
  428. return new XLCXXABI(CGM);
  429. case TargetCXXABI::GenericItanium:
  430. if (CGM.getContext().getTargetInfo().getTriple().getArch()
  431. == llvm::Triple::le32) {
  432. // For PNaCl, use ARM-style method pointers so that PNaCl code
  433. // does not assume anything about the alignment of function
  434. // pointers.
  435. return new ItaniumCXXABI(CGM, /*UseARMMethodPtrABI=*/true);
  436. }
  437. return new ItaniumCXXABI(CGM);
  438. case TargetCXXABI::Microsoft:
  439. llvm_unreachable("Microsoft ABI is not Itanium-based");
  440. }
  441. llvm_unreachable("bad ABI kind");
  442. }
  443. llvm::Type *
  444. ItaniumCXXABI::ConvertMemberPointerType(const MemberPointerType *MPT) {
  445. if (MPT->isMemberDataPointer())
  446. return CGM.PtrDiffTy;
  447. return llvm::StructType::get(CGM.PtrDiffTy, CGM.PtrDiffTy);
  448. }
  449. /// In the Itanium and ARM ABIs, method pointers have the form:
  450. /// struct { ptrdiff_t ptr; ptrdiff_t adj; } memptr;
  451. ///
  452. /// In the Itanium ABI:
  453. /// - method pointers are virtual if (memptr.ptr & 1) is nonzero
  454. /// - the this-adjustment is (memptr.adj)
  455. /// - the virtual offset is (memptr.ptr - 1)
  456. ///
  457. /// In the ARM ABI:
  458. /// - method pointers are virtual if (memptr.adj & 1) is nonzero
  459. /// - the this-adjustment is (memptr.adj >> 1)
  460. /// - the virtual offset is (memptr.ptr)
  461. /// ARM uses 'adj' for the virtual flag because Thumb functions
  462. /// may be only single-byte aligned.
  463. ///
  464. /// If the member is virtual, the adjusted 'this' pointer points
  465. /// to a vtable pointer from which the virtual offset is applied.
  466. ///
  467. /// If the member is non-virtual, memptr.ptr is the address of
  468. /// the function to call.
  469. CGCallee ItaniumCXXABI::EmitLoadOfMemberFunctionPointer(
  470. CodeGenFunction &CGF, const Expr *E, Address ThisAddr,
  471. llvm::Value *&ThisPtrForCall,
  472. llvm::Value *MemFnPtr, const MemberPointerType *MPT) {
  473. CGBuilderTy &Builder = CGF.Builder;
  474. const FunctionProtoType *FPT =
  475. MPT->getPointeeType()->getAs<FunctionProtoType>();
  476. auto *RD =
  477. cast<CXXRecordDecl>(MPT->getClass()->castAs<RecordType>()->getDecl());
  478. llvm::FunctionType *FTy = CGM.getTypes().GetFunctionType(
  479. CGM.getTypes().arrangeCXXMethodType(RD, FPT, /*FD=*/nullptr));
  480. llvm::Constant *ptrdiff_1 = llvm::ConstantInt::get(CGM.PtrDiffTy, 1);
  481. llvm::BasicBlock *FnVirtual = CGF.createBasicBlock("memptr.virtual");
  482. llvm::BasicBlock *FnNonVirtual = CGF.createBasicBlock("memptr.nonvirtual");
  483. llvm::BasicBlock *FnEnd = CGF.createBasicBlock("memptr.end");
  484. // Extract memptr.adj, which is in the second field.
  485. llvm::Value *RawAdj = Builder.CreateExtractValue(MemFnPtr, 1, "memptr.adj");
  486. // Compute the true adjustment.
  487. llvm::Value *Adj = RawAdj;
  488. if (UseARMMethodPtrABI)
  489. Adj = Builder.CreateAShr(Adj, ptrdiff_1, "memptr.adj.shifted");
  490. // Apply the adjustment and cast back to the original struct type
  491. // for consistency.
  492. llvm::Value *This = ThisAddr.getPointer();
  493. llvm::Value *Ptr = Builder.CreateBitCast(This, Builder.getInt8PtrTy());
  494. Ptr = Builder.CreateInBoundsGEP(Builder.getInt8Ty(), Ptr, Adj);
  495. This = Builder.CreateBitCast(Ptr, This->getType(), "this.adjusted");
  496. ThisPtrForCall = This;
  497. // Load the function pointer.
  498. llvm::Value *FnAsInt = Builder.CreateExtractValue(MemFnPtr, 0, "memptr.ptr");
  499. // If the LSB in the function pointer is 1, the function pointer points to
  500. // a virtual function.
  501. llvm::Value *IsVirtual;
  502. if (UseARMMethodPtrABI)
  503. IsVirtual = Builder.CreateAnd(RawAdj, ptrdiff_1);
  504. else
  505. IsVirtual = Builder.CreateAnd(FnAsInt, ptrdiff_1);
  506. IsVirtual = Builder.CreateIsNotNull(IsVirtual, "memptr.isvirtual");
  507. Builder.CreateCondBr(IsVirtual, FnVirtual, FnNonVirtual);
  508. // In the virtual path, the adjustment left 'This' pointing to the
  509. // vtable of the correct base subobject. The "function pointer" is an
  510. // offset within the vtable (+1 for the virtual flag on non-ARM).
  511. CGF.EmitBlock(FnVirtual);
  512. // Cast the adjusted this to a pointer to vtable pointer and load.
  513. llvm::Type *VTableTy = Builder.getInt8PtrTy();
  514. CharUnits VTablePtrAlign =
  515. CGF.CGM.getDynamicOffsetAlignment(ThisAddr.getAlignment(), RD,
  516. CGF.getPointerAlign());
  517. llvm::Value *VTable = CGF.GetVTablePtr(
  518. Address(This, ThisAddr.getElementType(), VTablePtrAlign), VTableTy, RD);
  519. // Apply the offset.
  520. // On ARM64, to reserve extra space in virtual member function pointers,
  521. // we only pay attention to the low 32 bits of the offset.
  522. llvm::Value *VTableOffset = FnAsInt;
  523. if (!UseARMMethodPtrABI)
  524. VTableOffset = Builder.CreateSub(VTableOffset, ptrdiff_1);
  525. if (Use32BitVTableOffsetABI) {
  526. VTableOffset = Builder.CreateTrunc(VTableOffset, CGF.Int32Ty);
  527. VTableOffset = Builder.CreateZExt(VTableOffset, CGM.PtrDiffTy);
  528. }
  529. // Check the address of the function pointer if CFI on member function
  530. // pointers is enabled.
  531. llvm::Constant *CheckSourceLocation;
  532. llvm::Constant *CheckTypeDesc;
  533. bool ShouldEmitCFICheck = CGF.SanOpts.has(SanitizerKind::CFIMFCall) &&
  534. CGM.HasHiddenLTOVisibility(RD);
  535. bool ShouldEmitVFEInfo = CGM.getCodeGenOpts().VirtualFunctionElimination &&
  536. CGM.HasHiddenLTOVisibility(RD);
  537. bool ShouldEmitWPDInfo =
  538. CGM.getCodeGenOpts().WholeProgramVTables &&
  539. // Don't insert type tests if we are forcing public visibility.
  540. !CGM.AlwaysHasLTOVisibilityPublic(RD);
  541. llvm::Value *VirtualFn = nullptr;
  542. {
  543. CodeGenFunction::SanitizerScope SanScope(&CGF);
  544. llvm::Value *TypeId = nullptr;
  545. llvm::Value *CheckResult = nullptr;
  546. if (ShouldEmitCFICheck || ShouldEmitVFEInfo || ShouldEmitWPDInfo) {
  547. // If doing CFI, VFE or WPD, we will need the metadata node to check
  548. // against.
  549. llvm::Metadata *MD =
  550. CGM.CreateMetadataIdentifierForVirtualMemPtrType(QualType(MPT, 0));
  551. TypeId = llvm::MetadataAsValue::get(CGF.getLLVMContext(), MD);
  552. }
  553. if (ShouldEmitVFEInfo) {
  554. llvm::Value *VFPAddr =
  555. Builder.CreateGEP(CGF.Int8Ty, VTable, VTableOffset);
  556. // If doing VFE, load from the vtable with a type.checked.load intrinsic
  557. // call. Note that we use the GEP to calculate the address to load from
  558. // and pass 0 as the offset to the intrinsic. This is because every
  559. // vtable slot of the correct type is marked with matching metadata, and
  560. // we know that the load must be from one of these slots.
  561. llvm::Value *CheckedLoad = Builder.CreateCall(
  562. CGM.getIntrinsic(llvm::Intrinsic::type_checked_load),
  563. {VFPAddr, llvm::ConstantInt::get(CGM.Int32Ty, 0), TypeId});
  564. CheckResult = Builder.CreateExtractValue(CheckedLoad, 1);
  565. VirtualFn = Builder.CreateExtractValue(CheckedLoad, 0);
  566. VirtualFn = Builder.CreateBitCast(VirtualFn, FTy->getPointerTo(),
  567. "memptr.virtualfn");
  568. } else {
  569. // When not doing VFE, emit a normal load, as it allows more
  570. // optimisations than type.checked.load.
  571. if (ShouldEmitCFICheck || ShouldEmitWPDInfo) {
  572. llvm::Value *VFPAddr =
  573. Builder.CreateGEP(CGF.Int8Ty, VTable, VTableOffset);
  574. llvm::Intrinsic::ID IID = CGM.HasHiddenLTOVisibility(RD)
  575. ? llvm::Intrinsic::type_test
  576. : llvm::Intrinsic::public_type_test;
  577. CheckResult = Builder.CreateCall(
  578. CGM.getIntrinsic(IID),
  579. {Builder.CreateBitCast(VFPAddr, CGF.Int8PtrTy), TypeId});
  580. }
  581. if (CGM.getItaniumVTableContext().isRelativeLayout()) {
  582. VirtualFn = CGF.Builder.CreateCall(
  583. CGM.getIntrinsic(llvm::Intrinsic::load_relative,
  584. {VTableOffset->getType()}),
  585. {VTable, VTableOffset});
  586. VirtualFn = CGF.Builder.CreateBitCast(VirtualFn, FTy->getPointerTo());
  587. } else {
  588. llvm::Value *VFPAddr =
  589. CGF.Builder.CreateGEP(CGF.Int8Ty, VTable, VTableOffset);
  590. VFPAddr = CGF.Builder.CreateBitCast(
  591. VFPAddr, FTy->getPointerTo()->getPointerTo());
  592. VirtualFn = CGF.Builder.CreateAlignedLoad(
  593. FTy->getPointerTo(), VFPAddr, CGF.getPointerAlign(),
  594. "memptr.virtualfn");
  595. }
  596. }
  597. assert(VirtualFn && "Virtual fuction pointer not created!");
  598. assert((!ShouldEmitCFICheck || !ShouldEmitVFEInfo || !ShouldEmitWPDInfo ||
  599. CheckResult) &&
  600. "Check result required but not created!");
  601. if (ShouldEmitCFICheck) {
  602. // If doing CFI, emit the check.
  603. CheckSourceLocation = CGF.EmitCheckSourceLocation(E->getBeginLoc());
  604. CheckTypeDesc = CGF.EmitCheckTypeDescriptor(QualType(MPT, 0));
  605. llvm::Constant *StaticData[] = {
  606. llvm::ConstantInt::get(CGF.Int8Ty, CodeGenFunction::CFITCK_VMFCall),
  607. CheckSourceLocation,
  608. CheckTypeDesc,
  609. };
  610. if (CGM.getCodeGenOpts().SanitizeTrap.has(SanitizerKind::CFIMFCall)) {
  611. CGF.EmitTrapCheck(CheckResult, SanitizerHandler::CFICheckFail);
  612. } else {
  613. llvm::Value *AllVtables = llvm::MetadataAsValue::get(
  614. CGM.getLLVMContext(),
  615. llvm::MDString::get(CGM.getLLVMContext(), "all-vtables"));
  616. llvm::Value *ValidVtable = Builder.CreateCall(
  617. CGM.getIntrinsic(llvm::Intrinsic::type_test), {VTable, AllVtables});
  618. CGF.EmitCheck(std::make_pair(CheckResult, SanitizerKind::CFIMFCall),
  619. SanitizerHandler::CFICheckFail, StaticData,
  620. {VTable, ValidVtable});
  621. }
  622. FnVirtual = Builder.GetInsertBlock();
  623. }
  624. } // End of sanitizer scope
  625. CGF.EmitBranch(FnEnd);
  626. // In the non-virtual path, the function pointer is actually a
  627. // function pointer.
  628. CGF.EmitBlock(FnNonVirtual);
  629. llvm::Value *NonVirtualFn =
  630. Builder.CreateIntToPtr(FnAsInt, FTy->getPointerTo(), "memptr.nonvirtualfn");
  631. // Check the function pointer if CFI on member function pointers is enabled.
  632. if (ShouldEmitCFICheck) {
  633. CXXRecordDecl *RD = MPT->getClass()->getAsCXXRecordDecl();
  634. if (RD->hasDefinition()) {
  635. CodeGenFunction::SanitizerScope SanScope(&CGF);
  636. llvm::Constant *StaticData[] = {
  637. llvm::ConstantInt::get(CGF.Int8Ty, CodeGenFunction::CFITCK_NVMFCall),
  638. CheckSourceLocation,
  639. CheckTypeDesc,
  640. };
  641. llvm::Value *Bit = Builder.getFalse();
  642. llvm::Value *CastedNonVirtualFn =
  643. Builder.CreateBitCast(NonVirtualFn, CGF.Int8PtrTy);
  644. for (const CXXRecordDecl *Base : CGM.getMostBaseClasses(RD)) {
  645. llvm::Metadata *MD = CGM.CreateMetadataIdentifierForType(
  646. getContext().getMemberPointerType(
  647. MPT->getPointeeType(),
  648. getContext().getRecordType(Base).getTypePtr()));
  649. llvm::Value *TypeId =
  650. llvm::MetadataAsValue::get(CGF.getLLVMContext(), MD);
  651. llvm::Value *TypeTest =
  652. Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::type_test),
  653. {CastedNonVirtualFn, TypeId});
  654. Bit = Builder.CreateOr(Bit, TypeTest);
  655. }
  656. CGF.EmitCheck(std::make_pair(Bit, SanitizerKind::CFIMFCall),
  657. SanitizerHandler::CFICheckFail, StaticData,
  658. {CastedNonVirtualFn, llvm::UndefValue::get(CGF.IntPtrTy)});
  659. FnNonVirtual = Builder.GetInsertBlock();
  660. }
  661. }
  662. // We're done.
  663. CGF.EmitBlock(FnEnd);
  664. llvm::PHINode *CalleePtr = Builder.CreatePHI(FTy->getPointerTo(), 2);
  665. CalleePtr->addIncoming(VirtualFn, FnVirtual);
  666. CalleePtr->addIncoming(NonVirtualFn, FnNonVirtual);
  667. CGCallee Callee(FPT, CalleePtr);
  668. return Callee;
  669. }
  670. /// Compute an l-value by applying the given pointer-to-member to a
  671. /// base object.
  672. llvm::Value *ItaniumCXXABI::EmitMemberDataPointerAddress(
  673. CodeGenFunction &CGF, const Expr *E, Address Base, llvm::Value *MemPtr,
  674. const MemberPointerType *MPT) {
  675. assert(MemPtr->getType() == CGM.PtrDiffTy);
  676. CGBuilderTy &Builder = CGF.Builder;
  677. // Cast to char*.
  678. Base = Builder.CreateElementBitCast(Base, CGF.Int8Ty);
  679. // Apply the offset, which we assume is non-null.
  680. llvm::Value *Addr = Builder.CreateInBoundsGEP(
  681. Base.getElementType(), Base.getPointer(), MemPtr, "memptr.offset");
  682. // Cast the address to the appropriate pointer type, adopting the
  683. // address space of the base pointer.
  684. llvm::Type *PType = CGF.ConvertTypeForMem(MPT->getPointeeType())
  685. ->getPointerTo(Base.getAddressSpace());
  686. return Builder.CreateBitCast(Addr, PType);
  687. }
  688. /// Perform a bitcast, derived-to-base, or base-to-derived member pointer
  689. /// conversion.
  690. ///
  691. /// Bitcast conversions are always a no-op under Itanium.
  692. ///
  693. /// Obligatory offset/adjustment diagram:
  694. /// <-- offset --> <-- adjustment -->
  695. /// |--------------------------|----------------------|--------------------|
  696. /// ^Derived address point ^Base address point ^Member address point
  697. ///
  698. /// So when converting a base member pointer to a derived member pointer,
  699. /// we add the offset to the adjustment because the address point has
  700. /// decreased; and conversely, when converting a derived MP to a base MP
  701. /// we subtract the offset from the adjustment because the address point
  702. /// has increased.
  703. ///
  704. /// The standard forbids (at compile time) conversion to and from
  705. /// virtual bases, which is why we don't have to consider them here.
  706. ///
  707. /// The standard forbids (at run time) casting a derived MP to a base
  708. /// MP when the derived MP does not point to a member of the base.
  709. /// This is why -1 is a reasonable choice for null data member
  710. /// pointers.
  711. llvm::Value *
  712. ItaniumCXXABI::EmitMemberPointerConversion(CodeGenFunction &CGF,
  713. const CastExpr *E,
  714. llvm::Value *src) {
  715. assert(E->getCastKind() == CK_DerivedToBaseMemberPointer ||
  716. E->getCastKind() == CK_BaseToDerivedMemberPointer ||
  717. E->getCastKind() == CK_ReinterpretMemberPointer);
  718. // Under Itanium, reinterprets don't require any additional processing.
  719. if (E->getCastKind() == CK_ReinterpretMemberPointer) return src;
  720. // Use constant emission if we can.
  721. if (isa<llvm::Constant>(src))
  722. return EmitMemberPointerConversion(E, cast<llvm::Constant>(src));
  723. llvm::Constant *adj = getMemberPointerAdjustment(E);
  724. if (!adj) return src;
  725. CGBuilderTy &Builder = CGF.Builder;
  726. bool isDerivedToBase = (E->getCastKind() == CK_DerivedToBaseMemberPointer);
  727. const MemberPointerType *destTy =
  728. E->getType()->castAs<MemberPointerType>();
  729. // For member data pointers, this is just a matter of adding the
  730. // offset if the source is non-null.
  731. if (destTy->isMemberDataPointer()) {
  732. llvm::Value *dst;
  733. if (isDerivedToBase)
  734. dst = Builder.CreateNSWSub(src, adj, "adj");
  735. else
  736. dst = Builder.CreateNSWAdd(src, adj, "adj");
  737. // Null check.
  738. llvm::Value *null = llvm::Constant::getAllOnesValue(src->getType());
  739. llvm::Value *isNull = Builder.CreateICmpEQ(src, null, "memptr.isnull");
  740. return Builder.CreateSelect(isNull, src, dst);
  741. }
  742. // The this-adjustment is left-shifted by 1 on ARM.
  743. if (UseARMMethodPtrABI) {
  744. uint64_t offset = cast<llvm::ConstantInt>(adj)->getZExtValue();
  745. offset <<= 1;
  746. adj = llvm::ConstantInt::get(adj->getType(), offset);
  747. }
  748. llvm::Value *srcAdj = Builder.CreateExtractValue(src, 1, "src.adj");
  749. llvm::Value *dstAdj;
  750. if (isDerivedToBase)
  751. dstAdj = Builder.CreateNSWSub(srcAdj, adj, "adj");
  752. else
  753. dstAdj = Builder.CreateNSWAdd(srcAdj, adj, "adj");
  754. return Builder.CreateInsertValue(src, dstAdj, 1);
  755. }
  756. llvm::Constant *
  757. ItaniumCXXABI::EmitMemberPointerConversion(const CastExpr *E,
  758. llvm::Constant *src) {
  759. assert(E->getCastKind() == CK_DerivedToBaseMemberPointer ||
  760. E->getCastKind() == CK_BaseToDerivedMemberPointer ||
  761. E->getCastKind() == CK_ReinterpretMemberPointer);
  762. // Under Itanium, reinterprets don't require any additional processing.
  763. if (E->getCastKind() == CK_ReinterpretMemberPointer) return src;
  764. // If the adjustment is trivial, we don't need to do anything.
  765. llvm::Constant *adj = getMemberPointerAdjustment(E);
  766. if (!adj) return src;
  767. bool isDerivedToBase = (E->getCastKind() == CK_DerivedToBaseMemberPointer);
  768. const MemberPointerType *destTy =
  769. E->getType()->castAs<MemberPointerType>();
  770. // For member data pointers, this is just a matter of adding the
  771. // offset if the source is non-null.
  772. if (destTy->isMemberDataPointer()) {
  773. // null maps to null.
  774. if (src->isAllOnesValue()) return src;
  775. if (isDerivedToBase)
  776. return llvm::ConstantExpr::getNSWSub(src, adj);
  777. else
  778. return llvm::ConstantExpr::getNSWAdd(src, adj);
  779. }
  780. // The this-adjustment is left-shifted by 1 on ARM.
  781. if (UseARMMethodPtrABI) {
  782. uint64_t offset = cast<llvm::ConstantInt>(adj)->getZExtValue();
  783. offset <<= 1;
  784. adj = llvm::ConstantInt::get(adj->getType(), offset);
  785. }
  786. llvm::Constant *srcAdj = src->getAggregateElement(1);
  787. llvm::Constant *dstAdj;
  788. if (isDerivedToBase)
  789. dstAdj = llvm::ConstantExpr::getNSWSub(srcAdj, adj);
  790. else
  791. dstAdj = llvm::ConstantExpr::getNSWAdd(srcAdj, adj);
  792. llvm::Constant *res = ConstantFoldInsertValueInstruction(src, dstAdj, 1);
  793. assert(res != nullptr && "Folding must succeed");
  794. return res;
  795. }
  796. llvm::Constant *
  797. ItaniumCXXABI::EmitNullMemberPointer(const MemberPointerType *MPT) {
  798. // Itanium C++ ABI 2.3:
  799. // A NULL pointer is represented as -1.
  800. if (MPT->isMemberDataPointer())
  801. return llvm::ConstantInt::get(CGM.PtrDiffTy, -1ULL, /*isSigned=*/true);
  802. llvm::Constant *Zero = llvm::ConstantInt::get(CGM.PtrDiffTy, 0);
  803. llvm::Constant *Values[2] = { Zero, Zero };
  804. return llvm::ConstantStruct::getAnon(Values);
  805. }
  806. llvm::Constant *
  807. ItaniumCXXABI::EmitMemberDataPointer(const MemberPointerType *MPT,
  808. CharUnits offset) {
  809. // Itanium C++ ABI 2.3:
  810. // A pointer to data member is an offset from the base address of
  811. // the class object containing it, represented as a ptrdiff_t
  812. return llvm::ConstantInt::get(CGM.PtrDiffTy, offset.getQuantity());
  813. }
  814. llvm::Constant *
  815. ItaniumCXXABI::EmitMemberFunctionPointer(const CXXMethodDecl *MD) {
  816. return BuildMemberPointer(MD, CharUnits::Zero());
  817. }
  818. llvm::Constant *ItaniumCXXABI::BuildMemberPointer(const CXXMethodDecl *MD,
  819. CharUnits ThisAdjustment) {
  820. assert(MD->isInstance() && "Member function must not be static!");
  821. CodeGenTypes &Types = CGM.getTypes();
  822. // Get the function pointer (or index if this is a virtual function).
  823. llvm::Constant *MemPtr[2];
  824. if (MD->isVirtual()) {
  825. uint64_t Index = CGM.getItaniumVTableContext().getMethodVTableIndex(MD);
  826. uint64_t VTableOffset;
  827. if (CGM.getItaniumVTableContext().isRelativeLayout()) {
  828. // Multiply by 4-byte relative offsets.
  829. VTableOffset = Index * 4;
  830. } else {
  831. const ASTContext &Context = getContext();
  832. CharUnits PointerWidth = Context.toCharUnitsFromBits(
  833. Context.getTargetInfo().getPointerWidth(LangAS::Default));
  834. VTableOffset = Index * PointerWidth.getQuantity();
  835. }
  836. if (UseARMMethodPtrABI) {
  837. // ARM C++ ABI 3.2.1:
  838. // This ABI specifies that adj contains twice the this
  839. // adjustment, plus 1 if the member function is virtual. The
  840. // least significant bit of adj then makes exactly the same
  841. // discrimination as the least significant bit of ptr does for
  842. // Itanium.
  843. MemPtr[0] = llvm::ConstantInt::get(CGM.PtrDiffTy, VTableOffset);
  844. MemPtr[1] = llvm::ConstantInt::get(CGM.PtrDiffTy,
  845. 2 * ThisAdjustment.getQuantity() + 1);
  846. } else {
  847. // Itanium C++ ABI 2.3:
  848. // For a virtual function, [the pointer field] is 1 plus the
  849. // virtual table offset (in bytes) of the function,
  850. // represented as a ptrdiff_t.
  851. MemPtr[0] = llvm::ConstantInt::get(CGM.PtrDiffTy, VTableOffset + 1);
  852. MemPtr[1] = llvm::ConstantInt::get(CGM.PtrDiffTy,
  853. ThisAdjustment.getQuantity());
  854. }
  855. } else {
  856. const FunctionProtoType *FPT = MD->getType()->castAs<FunctionProtoType>();
  857. llvm::Type *Ty;
  858. // Check whether the function has a computable LLVM signature.
  859. if (Types.isFuncTypeConvertible(FPT)) {
  860. // The function has a computable LLVM signature; use the correct type.
  861. Ty = Types.GetFunctionType(Types.arrangeCXXMethodDeclaration(MD));
  862. } else {
  863. // Use an arbitrary non-function type to tell GetAddrOfFunction that the
  864. // function type is incomplete.
  865. Ty = CGM.PtrDiffTy;
  866. }
  867. llvm::Constant *addr = CGM.GetAddrOfFunction(MD, Ty);
  868. MemPtr[0] = llvm::ConstantExpr::getPtrToInt(addr, CGM.PtrDiffTy);
  869. MemPtr[1] = llvm::ConstantInt::get(CGM.PtrDiffTy,
  870. (UseARMMethodPtrABI ? 2 : 1) *
  871. ThisAdjustment.getQuantity());
  872. }
  873. return llvm::ConstantStruct::getAnon(MemPtr);
  874. }
  875. llvm::Constant *ItaniumCXXABI::EmitMemberPointer(const APValue &MP,
  876. QualType MPType) {
  877. const MemberPointerType *MPT = MPType->castAs<MemberPointerType>();
  878. const ValueDecl *MPD = MP.getMemberPointerDecl();
  879. if (!MPD)
  880. return EmitNullMemberPointer(MPT);
  881. CharUnits ThisAdjustment = getContext().getMemberPointerPathAdjustment(MP);
  882. if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(MPD))
  883. return BuildMemberPointer(MD, ThisAdjustment);
  884. CharUnits FieldOffset =
  885. getContext().toCharUnitsFromBits(getContext().getFieldOffset(MPD));
  886. return EmitMemberDataPointer(MPT, ThisAdjustment + FieldOffset);
  887. }
  888. /// The comparison algorithm is pretty easy: the member pointers are
  889. /// the same if they're either bitwise identical *or* both null.
  890. ///
  891. /// ARM is different here only because null-ness is more complicated.
  892. llvm::Value *
  893. ItaniumCXXABI::EmitMemberPointerComparison(CodeGenFunction &CGF,
  894. llvm::Value *L,
  895. llvm::Value *R,
  896. const MemberPointerType *MPT,
  897. bool Inequality) {
  898. CGBuilderTy &Builder = CGF.Builder;
  899. llvm::ICmpInst::Predicate Eq;
  900. llvm::Instruction::BinaryOps And, Or;
  901. if (Inequality) {
  902. Eq = llvm::ICmpInst::ICMP_NE;
  903. And = llvm::Instruction::Or;
  904. Or = llvm::Instruction::And;
  905. } else {
  906. Eq = llvm::ICmpInst::ICMP_EQ;
  907. And = llvm::Instruction::And;
  908. Or = llvm::Instruction::Or;
  909. }
  910. // Member data pointers are easy because there's a unique null
  911. // value, so it just comes down to bitwise equality.
  912. if (MPT->isMemberDataPointer())
  913. return Builder.CreateICmp(Eq, L, R);
  914. // For member function pointers, the tautologies are more complex.
  915. // The Itanium tautology is:
  916. // (L == R) <==> (L.ptr == R.ptr && (L.ptr == 0 || L.adj == R.adj))
  917. // The ARM tautology is:
  918. // (L == R) <==> (L.ptr == R.ptr &&
  919. // (L.adj == R.adj ||
  920. // (L.ptr == 0 && ((L.adj|R.adj) & 1) == 0)))
  921. // The inequality tautologies have exactly the same structure, except
  922. // applying De Morgan's laws.
  923. llvm::Value *LPtr = Builder.CreateExtractValue(L, 0, "lhs.memptr.ptr");
  924. llvm::Value *RPtr = Builder.CreateExtractValue(R, 0, "rhs.memptr.ptr");
  925. // This condition tests whether L.ptr == R.ptr. This must always be
  926. // true for equality to hold.
  927. llvm::Value *PtrEq = Builder.CreateICmp(Eq, LPtr, RPtr, "cmp.ptr");
  928. // This condition, together with the assumption that L.ptr == R.ptr,
  929. // tests whether the pointers are both null. ARM imposes an extra
  930. // condition.
  931. llvm::Value *Zero = llvm::Constant::getNullValue(LPtr->getType());
  932. llvm::Value *EqZero = Builder.CreateICmp(Eq, LPtr, Zero, "cmp.ptr.null");
  933. // This condition tests whether L.adj == R.adj. If this isn't
  934. // true, the pointers are unequal unless they're both null.
  935. llvm::Value *LAdj = Builder.CreateExtractValue(L, 1, "lhs.memptr.adj");
  936. llvm::Value *RAdj = Builder.CreateExtractValue(R, 1, "rhs.memptr.adj");
  937. llvm::Value *AdjEq = Builder.CreateICmp(Eq, LAdj, RAdj, "cmp.adj");
  938. // Null member function pointers on ARM clear the low bit of Adj,
  939. // so the zero condition has to check that neither low bit is set.
  940. if (UseARMMethodPtrABI) {
  941. llvm::Value *One = llvm::ConstantInt::get(LPtr->getType(), 1);
  942. // Compute (l.adj | r.adj) & 1 and test it against zero.
  943. llvm::Value *OrAdj = Builder.CreateOr(LAdj, RAdj, "or.adj");
  944. llvm::Value *OrAdjAnd1 = Builder.CreateAnd(OrAdj, One);
  945. llvm::Value *OrAdjAnd1EqZero = Builder.CreateICmp(Eq, OrAdjAnd1, Zero,
  946. "cmp.or.adj");
  947. EqZero = Builder.CreateBinOp(And, EqZero, OrAdjAnd1EqZero);
  948. }
  949. // Tie together all our conditions.
  950. llvm::Value *Result = Builder.CreateBinOp(Or, EqZero, AdjEq);
  951. Result = Builder.CreateBinOp(And, PtrEq, Result,
  952. Inequality ? "memptr.ne" : "memptr.eq");
  953. return Result;
  954. }
  955. llvm::Value *
  956. ItaniumCXXABI::EmitMemberPointerIsNotNull(CodeGenFunction &CGF,
  957. llvm::Value *MemPtr,
  958. const MemberPointerType *MPT) {
  959. CGBuilderTy &Builder = CGF.Builder;
  960. /// For member data pointers, this is just a check against -1.
  961. if (MPT->isMemberDataPointer()) {
  962. assert(MemPtr->getType() == CGM.PtrDiffTy);
  963. llvm::Value *NegativeOne =
  964. llvm::Constant::getAllOnesValue(MemPtr->getType());
  965. return Builder.CreateICmpNE(MemPtr, NegativeOne, "memptr.tobool");
  966. }
  967. // In Itanium, a member function pointer is not null if 'ptr' is not null.
  968. llvm::Value *Ptr = Builder.CreateExtractValue(MemPtr, 0, "memptr.ptr");
  969. llvm::Constant *Zero = llvm::ConstantInt::get(Ptr->getType(), 0);
  970. llvm::Value *Result = Builder.CreateICmpNE(Ptr, Zero, "memptr.tobool");
  971. // On ARM, a member function pointer is also non-null if the low bit of 'adj'
  972. // (the virtual bit) is set.
  973. if (UseARMMethodPtrABI) {
  974. llvm::Constant *One = llvm::ConstantInt::get(Ptr->getType(), 1);
  975. llvm::Value *Adj = Builder.CreateExtractValue(MemPtr, 1, "memptr.adj");
  976. llvm::Value *VirtualBit = Builder.CreateAnd(Adj, One, "memptr.virtualbit");
  977. llvm::Value *IsVirtual = Builder.CreateICmpNE(VirtualBit, Zero,
  978. "memptr.isvirtual");
  979. Result = Builder.CreateOr(Result, IsVirtual);
  980. }
  981. return Result;
  982. }
  983. bool ItaniumCXXABI::classifyReturnType(CGFunctionInfo &FI) const {
  984. const CXXRecordDecl *RD = FI.getReturnType()->getAsCXXRecordDecl();
  985. if (!RD)
  986. return false;
  987. // If C++ prohibits us from making a copy, return by address.
  988. if (!RD->canPassInRegisters()) {
  989. auto Align = CGM.getContext().getTypeAlignInChars(FI.getReturnType());
  990. FI.getReturnInfo() = ABIArgInfo::getIndirect(Align, /*ByVal=*/false);
  991. return true;
  992. }
  993. return false;
  994. }
  995. /// The Itanium ABI requires non-zero initialization only for data
  996. /// member pointers, for which '0' is a valid offset.
  997. bool ItaniumCXXABI::isZeroInitializable(const MemberPointerType *MPT) {
  998. return MPT->isMemberFunctionPointer();
  999. }
  1000. /// The Itanium ABI always places an offset to the complete object
  1001. /// at entry -2 in the vtable.
  1002. void ItaniumCXXABI::emitVirtualObjectDelete(CodeGenFunction &CGF,
  1003. const CXXDeleteExpr *DE,
  1004. Address Ptr,
  1005. QualType ElementType,
  1006. const CXXDestructorDecl *Dtor) {
  1007. bool UseGlobalDelete = DE->isGlobalDelete();
  1008. if (UseGlobalDelete) {
  1009. // Derive the complete-object pointer, which is what we need
  1010. // to pass to the deallocation function.
  1011. // Grab the vtable pointer as an intptr_t*.
  1012. auto *ClassDecl =
  1013. cast<CXXRecordDecl>(ElementType->castAs<RecordType>()->getDecl());
  1014. llvm::Value *VTable =
  1015. CGF.GetVTablePtr(Ptr, CGF.IntPtrTy->getPointerTo(), ClassDecl);
  1016. // Track back to entry -2 and pull out the offset there.
  1017. llvm::Value *OffsetPtr = CGF.Builder.CreateConstInBoundsGEP1_64(
  1018. CGF.IntPtrTy, VTable, -2, "complete-offset.ptr");
  1019. llvm::Value *Offset = CGF.Builder.CreateAlignedLoad(CGF.IntPtrTy, OffsetPtr, CGF.getPointerAlign());
  1020. // Apply the offset.
  1021. llvm::Value *CompletePtr =
  1022. CGF.Builder.CreateBitCast(Ptr.getPointer(), CGF.Int8PtrTy);
  1023. CompletePtr =
  1024. CGF.Builder.CreateInBoundsGEP(CGF.Int8Ty, CompletePtr, Offset);
  1025. // If we're supposed to call the global delete, make sure we do so
  1026. // even if the destructor throws.
  1027. CGF.pushCallObjectDeleteCleanup(DE->getOperatorDelete(), CompletePtr,
  1028. ElementType);
  1029. }
  1030. // FIXME: Provide a source location here even though there's no
  1031. // CXXMemberCallExpr for dtor call.
  1032. CXXDtorType DtorType = UseGlobalDelete ? Dtor_Complete : Dtor_Deleting;
  1033. EmitVirtualDestructorCall(CGF, Dtor, DtorType, Ptr, DE);
  1034. if (UseGlobalDelete)
  1035. CGF.PopCleanupBlock();
  1036. }
  1037. void ItaniumCXXABI::emitRethrow(CodeGenFunction &CGF, bool isNoReturn) {
  1038. // void __cxa_rethrow();
  1039. llvm::FunctionType *FTy =
  1040. llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
  1041. llvm::FunctionCallee Fn = CGM.CreateRuntimeFunction(FTy, "__cxa_rethrow");
  1042. if (isNoReturn)
  1043. CGF.EmitNoreturnRuntimeCallOrInvoke(Fn, std::nullopt);
  1044. else
  1045. CGF.EmitRuntimeCallOrInvoke(Fn);
  1046. }
  1047. static llvm::FunctionCallee getAllocateExceptionFn(CodeGenModule &CGM) {
  1048. // void *__cxa_allocate_exception(size_t thrown_size);
  1049. llvm::FunctionType *FTy =
  1050. llvm::FunctionType::get(CGM.Int8PtrTy, CGM.SizeTy, /*isVarArg=*/false);
  1051. return CGM.CreateRuntimeFunction(FTy, "__cxa_allocate_exception");
  1052. }
  1053. static llvm::FunctionCallee getThrowFn(CodeGenModule &CGM) {
  1054. // void __cxa_throw(void *thrown_exception, std::type_info *tinfo,
  1055. // void (*dest) (void *));
  1056. llvm::Type *Args[3] = { CGM.Int8PtrTy, CGM.Int8PtrTy, CGM.Int8PtrTy };
  1057. llvm::FunctionType *FTy =
  1058. llvm::FunctionType::get(CGM.VoidTy, Args, /*isVarArg=*/false);
  1059. return CGM.CreateRuntimeFunction(FTy, "__cxa_throw");
  1060. }
  1061. void ItaniumCXXABI::emitThrow(CodeGenFunction &CGF, const CXXThrowExpr *E) {
  1062. QualType ThrowType = E->getSubExpr()->getType();
  1063. // Now allocate the exception object.
  1064. llvm::Type *SizeTy = CGF.ConvertType(getContext().getSizeType());
  1065. uint64_t TypeSize = getContext().getTypeSizeInChars(ThrowType).getQuantity();
  1066. llvm::FunctionCallee AllocExceptionFn = getAllocateExceptionFn(CGM);
  1067. llvm::CallInst *ExceptionPtr = CGF.EmitNounwindRuntimeCall(
  1068. AllocExceptionFn, llvm::ConstantInt::get(SizeTy, TypeSize), "exception");
  1069. CharUnits ExnAlign = CGF.getContext().getExnObjectAlignment();
  1070. CGF.EmitAnyExprToExn(
  1071. E->getSubExpr(), Address(ExceptionPtr, CGM.Int8Ty, ExnAlign));
  1072. // Now throw the exception.
  1073. llvm::Constant *TypeInfo = CGM.GetAddrOfRTTIDescriptor(ThrowType,
  1074. /*ForEH=*/true);
  1075. // The address of the destructor. If the exception type has a
  1076. // trivial destructor (or isn't a record), we just pass null.
  1077. llvm::Constant *Dtor = nullptr;
  1078. if (const RecordType *RecordTy = ThrowType->getAs<RecordType>()) {
  1079. CXXRecordDecl *Record = cast<CXXRecordDecl>(RecordTy->getDecl());
  1080. if (!Record->hasTrivialDestructor()) {
  1081. CXXDestructorDecl *DtorD = Record->getDestructor();
  1082. Dtor = CGM.getAddrOfCXXStructor(GlobalDecl(DtorD, Dtor_Complete));
  1083. Dtor = llvm::ConstantExpr::getBitCast(Dtor, CGM.Int8PtrTy);
  1084. }
  1085. }
  1086. if (!Dtor) Dtor = llvm::Constant::getNullValue(CGM.Int8PtrTy);
  1087. llvm::Value *args[] = { ExceptionPtr, TypeInfo, Dtor };
  1088. CGF.EmitNoreturnRuntimeCallOrInvoke(getThrowFn(CGM), args);
  1089. }
  1090. static llvm::FunctionCallee getItaniumDynamicCastFn(CodeGenFunction &CGF) {
  1091. // void *__dynamic_cast(const void *sub,
  1092. // const abi::__class_type_info *src,
  1093. // const abi::__class_type_info *dst,
  1094. // std::ptrdiff_t src2dst_offset);
  1095. llvm::Type *Int8PtrTy = CGF.Int8PtrTy;
  1096. llvm::Type *PtrDiffTy =
  1097. CGF.ConvertType(CGF.getContext().getPointerDiffType());
  1098. llvm::Type *Args[4] = { Int8PtrTy, Int8PtrTy, Int8PtrTy, PtrDiffTy };
  1099. llvm::FunctionType *FTy = llvm::FunctionType::get(Int8PtrTy, Args, false);
  1100. // Mark the function as nounwind readonly.
  1101. llvm::AttrBuilder FuncAttrs(CGF.getLLVMContext());
  1102. FuncAttrs.addAttribute(llvm::Attribute::NoUnwind);
  1103. FuncAttrs.addMemoryAttr(llvm::MemoryEffects::readOnly());
  1104. llvm::AttributeList Attrs = llvm::AttributeList::get(
  1105. CGF.getLLVMContext(), llvm::AttributeList::FunctionIndex, FuncAttrs);
  1106. return CGF.CGM.CreateRuntimeFunction(FTy, "__dynamic_cast", Attrs);
  1107. }
  1108. static llvm::FunctionCallee getBadCastFn(CodeGenFunction &CGF) {
  1109. // void __cxa_bad_cast();
  1110. llvm::FunctionType *FTy = llvm::FunctionType::get(CGF.VoidTy, false);
  1111. return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_bad_cast");
  1112. }
  1113. /// Compute the src2dst_offset hint as described in the
  1114. /// Itanium C++ ABI [2.9.7]
  1115. static CharUnits computeOffsetHint(ASTContext &Context,
  1116. const CXXRecordDecl *Src,
  1117. const CXXRecordDecl *Dst) {
  1118. CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true,
  1119. /*DetectVirtual=*/false);
  1120. // If Dst is not derived from Src we can skip the whole computation below and
  1121. // return that Src is not a public base of Dst. Record all inheritance paths.
  1122. if (!Dst->isDerivedFrom(Src, Paths))
  1123. return CharUnits::fromQuantity(-2ULL);
  1124. unsigned NumPublicPaths = 0;
  1125. CharUnits Offset;
  1126. // Now walk all possible inheritance paths.
  1127. for (const CXXBasePath &Path : Paths) {
  1128. if (Path.Access != AS_public) // Ignore non-public inheritance.
  1129. continue;
  1130. ++NumPublicPaths;
  1131. for (const CXXBasePathElement &PathElement : Path) {
  1132. // If the path contains a virtual base class we can't give any hint.
  1133. // -1: no hint.
  1134. if (PathElement.Base->isVirtual())
  1135. return CharUnits::fromQuantity(-1ULL);
  1136. if (NumPublicPaths > 1) // Won't use offsets, skip computation.
  1137. continue;
  1138. // Accumulate the base class offsets.
  1139. const ASTRecordLayout &L = Context.getASTRecordLayout(PathElement.Class);
  1140. Offset += L.getBaseClassOffset(
  1141. PathElement.Base->getType()->getAsCXXRecordDecl());
  1142. }
  1143. }
  1144. // -2: Src is not a public base of Dst.
  1145. if (NumPublicPaths == 0)
  1146. return CharUnits::fromQuantity(-2ULL);
  1147. // -3: Src is a multiple public base type but never a virtual base type.
  1148. if (NumPublicPaths > 1)
  1149. return CharUnits::fromQuantity(-3ULL);
  1150. // Otherwise, the Src type is a unique public nonvirtual base type of Dst.
  1151. // Return the offset of Src from the origin of Dst.
  1152. return Offset;
  1153. }
  1154. static llvm::FunctionCallee getBadTypeidFn(CodeGenFunction &CGF) {
  1155. // void __cxa_bad_typeid();
  1156. llvm::FunctionType *FTy = llvm::FunctionType::get(CGF.VoidTy, false);
  1157. return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_bad_typeid");
  1158. }
  1159. bool ItaniumCXXABI::shouldTypeidBeNullChecked(bool IsDeref,
  1160. QualType SrcRecordTy) {
  1161. return IsDeref;
  1162. }
  1163. void ItaniumCXXABI::EmitBadTypeidCall(CodeGenFunction &CGF) {
  1164. llvm::FunctionCallee Fn = getBadTypeidFn(CGF);
  1165. llvm::CallBase *Call = CGF.EmitRuntimeCallOrInvoke(Fn);
  1166. Call->setDoesNotReturn();
  1167. CGF.Builder.CreateUnreachable();
  1168. }
  1169. llvm::Value *ItaniumCXXABI::EmitTypeid(CodeGenFunction &CGF,
  1170. QualType SrcRecordTy,
  1171. Address ThisPtr,
  1172. llvm::Type *StdTypeInfoPtrTy) {
  1173. auto *ClassDecl =
  1174. cast<CXXRecordDecl>(SrcRecordTy->castAs<RecordType>()->getDecl());
  1175. llvm::Value *Value =
  1176. CGF.GetVTablePtr(ThisPtr, StdTypeInfoPtrTy->getPointerTo(), ClassDecl);
  1177. if (CGM.getItaniumVTableContext().isRelativeLayout()) {
  1178. // Load the type info.
  1179. Value = CGF.Builder.CreateBitCast(Value, CGM.Int8PtrTy);
  1180. Value = CGF.Builder.CreateCall(
  1181. CGM.getIntrinsic(llvm::Intrinsic::load_relative, {CGM.Int32Ty}),
  1182. {Value, llvm::ConstantInt::get(CGM.Int32Ty, -4)});
  1183. // Setup to dereference again since this is a proxy we accessed.
  1184. Value = CGF.Builder.CreateBitCast(Value, StdTypeInfoPtrTy->getPointerTo());
  1185. } else {
  1186. // Load the type info.
  1187. Value =
  1188. CGF.Builder.CreateConstInBoundsGEP1_64(StdTypeInfoPtrTy, Value, -1ULL);
  1189. }
  1190. return CGF.Builder.CreateAlignedLoad(StdTypeInfoPtrTy, Value,
  1191. CGF.getPointerAlign());
  1192. }
  1193. bool ItaniumCXXABI::shouldDynamicCastCallBeNullChecked(bool SrcIsPtr,
  1194. QualType SrcRecordTy) {
  1195. return SrcIsPtr;
  1196. }
  1197. llvm::Value *ItaniumCXXABI::EmitDynamicCastCall(
  1198. CodeGenFunction &CGF, Address ThisAddr, QualType SrcRecordTy,
  1199. QualType DestTy, QualType DestRecordTy, llvm::BasicBlock *CastEnd) {
  1200. llvm::Type *PtrDiffLTy =
  1201. CGF.ConvertType(CGF.getContext().getPointerDiffType());
  1202. llvm::Type *DestLTy = CGF.ConvertType(DestTy);
  1203. llvm::Value *SrcRTTI =
  1204. CGF.CGM.GetAddrOfRTTIDescriptor(SrcRecordTy.getUnqualifiedType());
  1205. llvm::Value *DestRTTI =
  1206. CGF.CGM.GetAddrOfRTTIDescriptor(DestRecordTy.getUnqualifiedType());
  1207. // Compute the offset hint.
  1208. const CXXRecordDecl *SrcDecl = SrcRecordTy->getAsCXXRecordDecl();
  1209. const CXXRecordDecl *DestDecl = DestRecordTy->getAsCXXRecordDecl();
  1210. llvm::Value *OffsetHint = llvm::ConstantInt::get(
  1211. PtrDiffLTy,
  1212. computeOffsetHint(CGF.getContext(), SrcDecl, DestDecl).getQuantity());
  1213. // Emit the call to __dynamic_cast.
  1214. llvm::Value *Value = ThisAddr.getPointer();
  1215. Value = CGF.EmitCastToVoidPtr(Value);
  1216. llvm::Value *args[] = {Value, SrcRTTI, DestRTTI, OffsetHint};
  1217. Value = CGF.EmitNounwindRuntimeCall(getItaniumDynamicCastFn(CGF), args);
  1218. Value = CGF.Builder.CreateBitCast(Value, DestLTy);
  1219. /// C++ [expr.dynamic.cast]p9:
  1220. /// A failed cast to reference type throws std::bad_cast
  1221. if (DestTy->isReferenceType()) {
  1222. llvm::BasicBlock *BadCastBlock =
  1223. CGF.createBasicBlock("dynamic_cast.bad_cast");
  1224. llvm::Value *IsNull = CGF.Builder.CreateIsNull(Value);
  1225. CGF.Builder.CreateCondBr(IsNull, BadCastBlock, CastEnd);
  1226. CGF.EmitBlock(BadCastBlock);
  1227. EmitBadCastCall(CGF);
  1228. }
  1229. return Value;
  1230. }
  1231. llvm::Value *ItaniumCXXABI::EmitDynamicCastToVoid(CodeGenFunction &CGF,
  1232. Address ThisAddr,
  1233. QualType SrcRecordTy,
  1234. QualType DestTy) {
  1235. llvm::Type *DestLTy = CGF.ConvertType(DestTy);
  1236. auto *ClassDecl =
  1237. cast<CXXRecordDecl>(SrcRecordTy->castAs<RecordType>()->getDecl());
  1238. llvm::Value *OffsetToTop;
  1239. if (CGM.getItaniumVTableContext().isRelativeLayout()) {
  1240. // Get the vtable pointer.
  1241. llvm::Value *VTable =
  1242. CGF.GetVTablePtr(ThisAddr, CGM.Int32Ty->getPointerTo(), ClassDecl);
  1243. // Get the offset-to-top from the vtable.
  1244. OffsetToTop =
  1245. CGF.Builder.CreateConstInBoundsGEP1_32(CGM.Int32Ty, VTable, -2U);
  1246. OffsetToTop = CGF.Builder.CreateAlignedLoad(
  1247. CGM.Int32Ty, OffsetToTop, CharUnits::fromQuantity(4), "offset.to.top");
  1248. } else {
  1249. llvm::Type *PtrDiffLTy =
  1250. CGF.ConvertType(CGF.getContext().getPointerDiffType());
  1251. // Get the vtable pointer.
  1252. llvm::Value *VTable =
  1253. CGF.GetVTablePtr(ThisAddr, PtrDiffLTy->getPointerTo(), ClassDecl);
  1254. // Get the offset-to-top from the vtable.
  1255. OffsetToTop =
  1256. CGF.Builder.CreateConstInBoundsGEP1_64(PtrDiffLTy, VTable, -2ULL);
  1257. OffsetToTop = CGF.Builder.CreateAlignedLoad(
  1258. PtrDiffLTy, OffsetToTop, CGF.getPointerAlign(), "offset.to.top");
  1259. }
  1260. // Finally, add the offset to the pointer.
  1261. llvm::Value *Value = ThisAddr.getPointer();
  1262. Value = CGF.EmitCastToVoidPtr(Value);
  1263. Value = CGF.Builder.CreateInBoundsGEP(CGF.Int8Ty, Value, OffsetToTop);
  1264. return CGF.Builder.CreateBitCast(Value, DestLTy);
  1265. }
  1266. bool ItaniumCXXABI::EmitBadCastCall(CodeGenFunction &CGF) {
  1267. llvm::FunctionCallee Fn = getBadCastFn(CGF);
  1268. llvm::CallBase *Call = CGF.EmitRuntimeCallOrInvoke(Fn);
  1269. Call->setDoesNotReturn();
  1270. CGF.Builder.CreateUnreachable();
  1271. return true;
  1272. }
  1273. llvm::Value *
  1274. ItaniumCXXABI::GetVirtualBaseClassOffset(CodeGenFunction &CGF,
  1275. Address This,
  1276. const CXXRecordDecl *ClassDecl,
  1277. const CXXRecordDecl *BaseClassDecl) {
  1278. llvm::Value *VTablePtr = CGF.GetVTablePtr(This, CGM.Int8PtrTy, ClassDecl);
  1279. CharUnits VBaseOffsetOffset =
  1280. CGM.getItaniumVTableContext().getVirtualBaseOffsetOffset(ClassDecl,
  1281. BaseClassDecl);
  1282. llvm::Value *VBaseOffsetPtr =
  1283. CGF.Builder.CreateConstGEP1_64(
  1284. CGF.Int8Ty, VTablePtr, VBaseOffsetOffset.getQuantity(),
  1285. "vbase.offset.ptr");
  1286. llvm::Value *VBaseOffset;
  1287. if (CGM.getItaniumVTableContext().isRelativeLayout()) {
  1288. VBaseOffsetPtr =
  1289. CGF.Builder.CreateBitCast(VBaseOffsetPtr, CGF.Int32Ty->getPointerTo());
  1290. VBaseOffset = CGF.Builder.CreateAlignedLoad(
  1291. CGF.Int32Ty, VBaseOffsetPtr, CharUnits::fromQuantity(4),
  1292. "vbase.offset");
  1293. } else {
  1294. VBaseOffsetPtr = CGF.Builder.CreateBitCast(VBaseOffsetPtr,
  1295. CGM.PtrDiffTy->getPointerTo());
  1296. VBaseOffset = CGF.Builder.CreateAlignedLoad(
  1297. CGM.PtrDiffTy, VBaseOffsetPtr, CGF.getPointerAlign(), "vbase.offset");
  1298. }
  1299. return VBaseOffset;
  1300. }
  1301. void ItaniumCXXABI::EmitCXXConstructors(const CXXConstructorDecl *D) {
  1302. // Just make sure we're in sync with TargetCXXABI.
  1303. assert(CGM.getTarget().getCXXABI().hasConstructorVariants());
  1304. // The constructor used for constructing this as a base class;
  1305. // ignores virtual bases.
  1306. CGM.EmitGlobal(GlobalDecl(D, Ctor_Base));
  1307. // The constructor used for constructing this as a complete class;
  1308. // constructs the virtual bases, then calls the base constructor.
  1309. if (!D->getParent()->isAbstract()) {
  1310. // We don't need to emit the complete ctor if the class is abstract.
  1311. CGM.EmitGlobal(GlobalDecl(D, Ctor_Complete));
  1312. }
  1313. }
  1314. CGCXXABI::AddedStructorArgCounts
  1315. ItaniumCXXABI::buildStructorSignature(GlobalDecl GD,
  1316. SmallVectorImpl<CanQualType> &ArgTys) {
  1317. ASTContext &Context = getContext();
  1318. // All parameters are already in place except VTT, which goes after 'this'.
  1319. // These are Clang types, so we don't need to worry about sret yet.
  1320. // Check if we need to add a VTT parameter (which has type void **).
  1321. if ((isa<CXXConstructorDecl>(GD.getDecl()) ? GD.getCtorType() == Ctor_Base
  1322. : GD.getDtorType() == Dtor_Base) &&
  1323. cast<CXXMethodDecl>(GD.getDecl())->getParent()->getNumVBases() != 0) {
  1324. ArgTys.insert(ArgTys.begin() + 1,
  1325. Context.getPointerType(Context.VoidPtrTy));
  1326. return AddedStructorArgCounts::prefix(1);
  1327. }
  1328. return AddedStructorArgCounts{};
  1329. }
  1330. void ItaniumCXXABI::EmitCXXDestructors(const CXXDestructorDecl *D) {
  1331. // The destructor used for destructing this as a base class; ignores
  1332. // virtual bases.
  1333. CGM.EmitGlobal(GlobalDecl(D, Dtor_Base));
  1334. // The destructor used for destructing this as a most-derived class;
  1335. // call the base destructor and then destructs any virtual bases.
  1336. CGM.EmitGlobal(GlobalDecl(D, Dtor_Complete));
  1337. // The destructor in a virtual table is always a 'deleting'
  1338. // destructor, which calls the complete destructor and then uses the
  1339. // appropriate operator delete.
  1340. if (D->isVirtual())
  1341. CGM.EmitGlobal(GlobalDecl(D, Dtor_Deleting));
  1342. }
  1343. void ItaniumCXXABI::addImplicitStructorParams(CodeGenFunction &CGF,
  1344. QualType &ResTy,
  1345. FunctionArgList &Params) {
  1346. const CXXMethodDecl *MD = cast<CXXMethodDecl>(CGF.CurGD.getDecl());
  1347. assert(isa<CXXConstructorDecl>(MD) || isa<CXXDestructorDecl>(MD));
  1348. // Check if we need a VTT parameter as well.
  1349. if (NeedsVTTParameter(CGF.CurGD)) {
  1350. ASTContext &Context = getContext();
  1351. // FIXME: avoid the fake decl
  1352. QualType T = Context.getPointerType(Context.VoidPtrTy);
  1353. auto *VTTDecl = ImplicitParamDecl::Create(
  1354. Context, /*DC=*/nullptr, MD->getLocation(), &Context.Idents.get("vtt"),
  1355. T, ImplicitParamDecl::CXXVTT);
  1356. Params.insert(Params.begin() + 1, VTTDecl);
  1357. getStructorImplicitParamDecl(CGF) = VTTDecl;
  1358. }
  1359. }
  1360. void ItaniumCXXABI::EmitInstanceFunctionProlog(CodeGenFunction &CGF) {
  1361. // Naked functions have no prolog.
  1362. if (CGF.CurFuncDecl && CGF.CurFuncDecl->hasAttr<NakedAttr>())
  1363. return;
  1364. /// Initialize the 'this' slot. In the Itanium C++ ABI, no prologue
  1365. /// adjustments are required, because they are all handled by thunks.
  1366. setCXXABIThisValue(CGF, loadIncomingCXXThis(CGF));
  1367. /// Initialize the 'vtt' slot if needed.
  1368. if (getStructorImplicitParamDecl(CGF)) {
  1369. getStructorImplicitParamValue(CGF) = CGF.Builder.CreateLoad(
  1370. CGF.GetAddrOfLocalVar(getStructorImplicitParamDecl(CGF)), "vtt");
  1371. }
  1372. /// If this is a function that the ABI specifies returns 'this', initialize
  1373. /// the return slot to 'this' at the start of the function.
  1374. ///
  1375. /// Unlike the setting of return types, this is done within the ABI
  1376. /// implementation instead of by clients of CGCXXABI because:
  1377. /// 1) getThisValue is currently protected
  1378. /// 2) in theory, an ABI could implement 'this' returns some other way;
  1379. /// HasThisReturn only specifies a contract, not the implementation
  1380. if (HasThisReturn(CGF.CurGD))
  1381. CGF.Builder.CreateStore(getThisValue(CGF), CGF.ReturnValue);
  1382. }
  1383. CGCXXABI::AddedStructorArgs ItaniumCXXABI::getImplicitConstructorArgs(
  1384. CodeGenFunction &CGF, const CXXConstructorDecl *D, CXXCtorType Type,
  1385. bool ForVirtualBase, bool Delegating) {
  1386. if (!NeedsVTTParameter(GlobalDecl(D, Type)))
  1387. return AddedStructorArgs{};
  1388. // Insert the implicit 'vtt' argument as the second argument.
  1389. llvm::Value *VTT =
  1390. CGF.GetVTTParameter(GlobalDecl(D, Type), ForVirtualBase, Delegating);
  1391. QualType VTTTy = getContext().getPointerType(getContext().VoidPtrTy);
  1392. return AddedStructorArgs::prefix({{VTT, VTTTy}});
  1393. }
  1394. llvm::Value *ItaniumCXXABI::getCXXDestructorImplicitParam(
  1395. CodeGenFunction &CGF, const CXXDestructorDecl *DD, CXXDtorType Type,
  1396. bool ForVirtualBase, bool Delegating) {
  1397. GlobalDecl GD(DD, Type);
  1398. return CGF.GetVTTParameter(GD, ForVirtualBase, Delegating);
  1399. }
  1400. void ItaniumCXXABI::EmitDestructorCall(CodeGenFunction &CGF,
  1401. const CXXDestructorDecl *DD,
  1402. CXXDtorType Type, bool ForVirtualBase,
  1403. bool Delegating, Address This,
  1404. QualType ThisTy) {
  1405. GlobalDecl GD(DD, Type);
  1406. llvm::Value *VTT =
  1407. getCXXDestructorImplicitParam(CGF, DD, Type, ForVirtualBase, Delegating);
  1408. QualType VTTTy = getContext().getPointerType(getContext().VoidPtrTy);
  1409. CGCallee Callee;
  1410. if (getContext().getLangOpts().AppleKext &&
  1411. Type != Dtor_Base && DD->isVirtual())
  1412. Callee = CGF.BuildAppleKextVirtualDestructorCall(DD, Type, DD->getParent());
  1413. else
  1414. Callee = CGCallee::forDirect(CGM.getAddrOfCXXStructor(GD), GD);
  1415. CGF.EmitCXXDestructorCall(GD, Callee, This.getPointer(), ThisTy, VTT, VTTTy,
  1416. nullptr);
  1417. }
  1418. void ItaniumCXXABI::emitVTableDefinitions(CodeGenVTables &CGVT,
  1419. const CXXRecordDecl *RD) {
  1420. llvm::GlobalVariable *VTable = getAddrOfVTable(RD, CharUnits());
  1421. if (VTable->hasInitializer())
  1422. return;
  1423. ItaniumVTableContext &VTContext = CGM.getItaniumVTableContext();
  1424. const VTableLayout &VTLayout = VTContext.getVTableLayout(RD);
  1425. llvm::GlobalVariable::LinkageTypes Linkage = CGM.getVTableLinkage(RD);
  1426. llvm::Constant *RTTI =
  1427. CGM.GetAddrOfRTTIDescriptor(CGM.getContext().getTagDeclType(RD));
  1428. // Create and set the initializer.
  1429. ConstantInitBuilder builder(CGM);
  1430. auto components = builder.beginStruct();
  1431. CGVT.createVTableInitializer(components, VTLayout, RTTI,
  1432. llvm::GlobalValue::isLocalLinkage(Linkage));
  1433. components.finishAndSetAsInitializer(VTable);
  1434. // Set the correct linkage.
  1435. VTable->setLinkage(Linkage);
  1436. if (CGM.supportsCOMDAT() && VTable->isWeakForLinker())
  1437. VTable->setComdat(CGM.getModule().getOrInsertComdat(VTable->getName()));
  1438. // Set the right visibility.
  1439. CGM.setGVProperties(VTable, RD);
  1440. // If this is the magic class __cxxabiv1::__fundamental_type_info,
  1441. // we will emit the typeinfo for the fundamental types. This is the
  1442. // same behaviour as GCC.
  1443. const DeclContext *DC = RD->getDeclContext();
  1444. if (RD->getIdentifier() &&
  1445. RD->getIdentifier()->isStr("__fundamental_type_info") &&
  1446. isa<NamespaceDecl>(DC) && cast<NamespaceDecl>(DC)->getIdentifier() &&
  1447. cast<NamespaceDecl>(DC)->getIdentifier()->isStr("__cxxabiv1") &&
  1448. DC->getParent()->isTranslationUnit())
  1449. EmitFundamentalRTTIDescriptors(RD);
  1450. // Always emit type metadata on non-available_externally definitions, and on
  1451. // available_externally definitions if we are performing whole program
  1452. // devirtualization. For WPD we need the type metadata on all vtable
  1453. // definitions to ensure we associate derived classes with base classes
  1454. // defined in headers but with a strong definition only in a shared library.
  1455. if (!VTable->isDeclarationForLinker() ||
  1456. CGM.getCodeGenOpts().WholeProgramVTables) {
  1457. CGM.EmitVTableTypeMetadata(RD, VTable, VTLayout);
  1458. // For available_externally definitions, add the vtable to
  1459. // @llvm.compiler.used so that it isn't deleted before whole program
  1460. // analysis.
  1461. if (VTable->isDeclarationForLinker()) {
  1462. assert(CGM.getCodeGenOpts().WholeProgramVTables);
  1463. CGM.addCompilerUsedGlobal(VTable);
  1464. }
  1465. }
  1466. if (VTContext.isRelativeLayout()) {
  1467. CGVT.RemoveHwasanMetadata(VTable);
  1468. if (!VTable->isDSOLocal())
  1469. CGVT.GenerateRelativeVTableAlias(VTable, VTable->getName());
  1470. }
  1471. }
  1472. bool ItaniumCXXABI::isVirtualOffsetNeededForVTableField(
  1473. CodeGenFunction &CGF, CodeGenFunction::VPtr Vptr) {
  1474. if (Vptr.NearestVBase == nullptr)
  1475. return false;
  1476. return NeedsVTTParameter(CGF.CurGD);
  1477. }
  1478. llvm::Value *ItaniumCXXABI::getVTableAddressPointInStructor(
  1479. CodeGenFunction &CGF, const CXXRecordDecl *VTableClass, BaseSubobject Base,
  1480. const CXXRecordDecl *NearestVBase) {
  1481. if ((Base.getBase()->getNumVBases() || NearestVBase != nullptr) &&
  1482. NeedsVTTParameter(CGF.CurGD)) {
  1483. return getVTableAddressPointInStructorWithVTT(CGF, VTableClass, Base,
  1484. NearestVBase);
  1485. }
  1486. return getVTableAddressPoint(Base, VTableClass);
  1487. }
  1488. llvm::Constant *
  1489. ItaniumCXXABI::getVTableAddressPoint(BaseSubobject Base,
  1490. const CXXRecordDecl *VTableClass) {
  1491. llvm::GlobalValue *VTable = getAddrOfVTable(VTableClass, CharUnits());
  1492. // Find the appropriate vtable within the vtable group, and the address point
  1493. // within that vtable.
  1494. VTableLayout::AddressPointLocation AddressPoint =
  1495. CGM.getItaniumVTableContext()
  1496. .getVTableLayout(VTableClass)
  1497. .getAddressPoint(Base);
  1498. llvm::Value *Indices[] = {
  1499. llvm::ConstantInt::get(CGM.Int32Ty, 0),
  1500. llvm::ConstantInt::get(CGM.Int32Ty, AddressPoint.VTableIndex),
  1501. llvm::ConstantInt::get(CGM.Int32Ty, AddressPoint.AddressPointIndex),
  1502. };
  1503. return llvm::ConstantExpr::getGetElementPtr(VTable->getValueType(), VTable,
  1504. Indices, /*InBounds=*/true,
  1505. /*InRangeIndex=*/1);
  1506. }
  1507. // Check whether all the non-inline virtual methods for the class have the
  1508. // specified attribute.
  1509. template <typename T>
  1510. static bool CXXRecordAllNonInlineVirtualsHaveAttr(const CXXRecordDecl *RD) {
  1511. bool FoundNonInlineVirtualMethodWithAttr = false;
  1512. for (const auto *D : RD->noload_decls()) {
  1513. if (const auto *FD = dyn_cast<FunctionDecl>(D)) {
  1514. if (!FD->isVirtualAsWritten() || FD->isInlineSpecified() ||
  1515. FD->doesThisDeclarationHaveABody())
  1516. continue;
  1517. if (!D->hasAttr<T>())
  1518. return false;
  1519. FoundNonInlineVirtualMethodWithAttr = true;
  1520. }
  1521. }
  1522. // We didn't find any non-inline virtual methods missing the attribute. We
  1523. // will return true when we found at least one non-inline virtual with the
  1524. // attribute. (This lets our caller know that the attribute needs to be
  1525. // propagated up to the vtable.)
  1526. return FoundNonInlineVirtualMethodWithAttr;
  1527. }
  1528. llvm::Value *ItaniumCXXABI::getVTableAddressPointInStructorWithVTT(
  1529. CodeGenFunction &CGF, const CXXRecordDecl *VTableClass, BaseSubobject Base,
  1530. const CXXRecordDecl *NearestVBase) {
  1531. assert((Base.getBase()->getNumVBases() || NearestVBase != nullptr) &&
  1532. NeedsVTTParameter(CGF.CurGD) && "This class doesn't have VTT");
  1533. // Get the secondary vpointer index.
  1534. uint64_t VirtualPointerIndex =
  1535. CGM.getVTables().getSecondaryVirtualPointerIndex(VTableClass, Base);
  1536. /// Load the VTT.
  1537. llvm::Value *VTT = CGF.LoadCXXVTT();
  1538. if (VirtualPointerIndex)
  1539. VTT = CGF.Builder.CreateConstInBoundsGEP1_64(
  1540. CGF.VoidPtrTy, VTT, VirtualPointerIndex);
  1541. // And load the address point from the VTT.
  1542. return CGF.Builder.CreateAlignedLoad(CGF.VoidPtrTy, VTT,
  1543. CGF.getPointerAlign());
  1544. }
  1545. llvm::Constant *ItaniumCXXABI::getVTableAddressPointForConstExpr(
  1546. BaseSubobject Base, const CXXRecordDecl *VTableClass) {
  1547. return getVTableAddressPoint(Base, VTableClass);
  1548. }
  1549. llvm::GlobalVariable *ItaniumCXXABI::getAddrOfVTable(const CXXRecordDecl *RD,
  1550. CharUnits VPtrOffset) {
  1551. assert(VPtrOffset.isZero() && "Itanium ABI only supports zero vptr offsets");
  1552. llvm::GlobalVariable *&VTable = VTables[RD];
  1553. if (VTable)
  1554. return VTable;
  1555. // Queue up this vtable for possible deferred emission.
  1556. CGM.addDeferredVTable(RD);
  1557. SmallString<256> Name;
  1558. llvm::raw_svector_ostream Out(Name);
  1559. getMangleContext().mangleCXXVTable(RD, Out);
  1560. const VTableLayout &VTLayout =
  1561. CGM.getItaniumVTableContext().getVTableLayout(RD);
  1562. llvm::Type *VTableType = CGM.getVTables().getVTableType(VTLayout);
  1563. // Use pointer alignment for the vtable. Otherwise we would align them based
  1564. // on the size of the initializer which doesn't make sense as only single
  1565. // values are read.
  1566. unsigned PAlign = CGM.getItaniumVTableContext().isRelativeLayout()
  1567. ? 32
  1568. : CGM.getTarget().getPointerAlign(LangAS::Default);
  1569. VTable = CGM.CreateOrReplaceCXXRuntimeVariable(
  1570. Name, VTableType, llvm::GlobalValue::ExternalLinkage,
  1571. getContext().toCharUnitsFromBits(PAlign).getAsAlign());
  1572. VTable->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
  1573. // In MS C++ if you have a class with virtual functions in which you are using
  1574. // selective member import/export, then all virtual functions must be exported
  1575. // unless they are inline, otherwise a link error will result. To match this
  1576. // behavior, for such classes, we dllimport the vtable if it is defined
  1577. // externally and all the non-inline virtual methods are marked dllimport, and
  1578. // we dllexport the vtable if it is defined in this TU and all the non-inline
  1579. // virtual methods are marked dllexport.
  1580. if (CGM.getTarget().hasPS4DLLImportExport()) {
  1581. if ((!RD->hasAttr<DLLImportAttr>()) && (!RD->hasAttr<DLLExportAttr>())) {
  1582. if (CGM.getVTables().isVTableExternal(RD)) {
  1583. if (CXXRecordAllNonInlineVirtualsHaveAttr<DLLImportAttr>(RD))
  1584. VTable->setDLLStorageClass(llvm::GlobalValue::DLLImportStorageClass);
  1585. } else {
  1586. if (CXXRecordAllNonInlineVirtualsHaveAttr<DLLExportAttr>(RD))
  1587. VTable->setDLLStorageClass(llvm::GlobalValue::DLLExportStorageClass);
  1588. }
  1589. }
  1590. }
  1591. CGM.setGVProperties(VTable, RD);
  1592. return VTable;
  1593. }
  1594. CGCallee ItaniumCXXABI::getVirtualFunctionPointer(CodeGenFunction &CGF,
  1595. GlobalDecl GD,
  1596. Address This,
  1597. llvm::Type *Ty,
  1598. SourceLocation Loc) {
  1599. llvm::Type *TyPtr = Ty->getPointerTo();
  1600. auto *MethodDecl = cast<CXXMethodDecl>(GD.getDecl());
  1601. llvm::Value *VTable = CGF.GetVTablePtr(
  1602. This, TyPtr->getPointerTo(), MethodDecl->getParent());
  1603. uint64_t VTableIndex = CGM.getItaniumVTableContext().getMethodVTableIndex(GD);
  1604. llvm::Value *VFunc;
  1605. if (CGF.ShouldEmitVTableTypeCheckedLoad(MethodDecl->getParent())) {
  1606. VFunc = CGF.EmitVTableTypeCheckedLoad(
  1607. MethodDecl->getParent(), VTable, TyPtr,
  1608. VTableIndex *
  1609. CGM.getContext().getTargetInfo().getPointerWidth(LangAS::Default) /
  1610. 8);
  1611. } else {
  1612. CGF.EmitTypeMetadataCodeForVCall(MethodDecl->getParent(), VTable, Loc);
  1613. llvm::Value *VFuncLoad;
  1614. if (CGM.getItaniumVTableContext().isRelativeLayout()) {
  1615. VTable = CGF.Builder.CreateBitCast(VTable, CGM.Int8PtrTy);
  1616. llvm::Value *Load = CGF.Builder.CreateCall(
  1617. CGM.getIntrinsic(llvm::Intrinsic::load_relative, {CGM.Int32Ty}),
  1618. {VTable, llvm::ConstantInt::get(CGM.Int32Ty, 4 * VTableIndex)});
  1619. VFuncLoad = CGF.Builder.CreateBitCast(Load, TyPtr);
  1620. } else {
  1621. VTable =
  1622. CGF.Builder.CreateBitCast(VTable, TyPtr->getPointerTo());
  1623. llvm::Value *VTableSlotPtr = CGF.Builder.CreateConstInBoundsGEP1_64(
  1624. TyPtr, VTable, VTableIndex, "vfn");
  1625. VFuncLoad =
  1626. CGF.Builder.CreateAlignedLoad(TyPtr, VTableSlotPtr,
  1627. CGF.getPointerAlign());
  1628. }
  1629. // Add !invariant.load md to virtual function load to indicate that
  1630. // function didn't change inside vtable.
  1631. // It's safe to add it without -fstrict-vtable-pointers, but it would not
  1632. // help in devirtualization because it will only matter if we will have 2
  1633. // the same virtual function loads from the same vtable load, which won't
  1634. // happen without enabled devirtualization with -fstrict-vtable-pointers.
  1635. if (CGM.getCodeGenOpts().OptimizationLevel > 0 &&
  1636. CGM.getCodeGenOpts().StrictVTablePointers) {
  1637. if (auto *VFuncLoadInstr = dyn_cast<llvm::Instruction>(VFuncLoad)) {
  1638. VFuncLoadInstr->setMetadata(
  1639. llvm::LLVMContext::MD_invariant_load,
  1640. llvm::MDNode::get(CGM.getLLVMContext(),
  1641. llvm::ArrayRef<llvm::Metadata *>()));
  1642. }
  1643. }
  1644. VFunc = VFuncLoad;
  1645. }
  1646. CGCallee Callee(GD, VFunc);
  1647. return Callee;
  1648. }
  1649. llvm::Value *ItaniumCXXABI::EmitVirtualDestructorCall(
  1650. CodeGenFunction &CGF, const CXXDestructorDecl *Dtor, CXXDtorType DtorType,
  1651. Address This, DeleteOrMemberCallExpr E) {
  1652. auto *CE = E.dyn_cast<const CXXMemberCallExpr *>();
  1653. auto *D = E.dyn_cast<const CXXDeleteExpr *>();
  1654. assert((CE != nullptr) ^ (D != nullptr));
  1655. assert(CE == nullptr || CE->arg_begin() == CE->arg_end());
  1656. assert(DtorType == Dtor_Deleting || DtorType == Dtor_Complete);
  1657. GlobalDecl GD(Dtor, DtorType);
  1658. const CGFunctionInfo *FInfo =
  1659. &CGM.getTypes().arrangeCXXStructorDeclaration(GD);
  1660. llvm::FunctionType *Ty = CGF.CGM.getTypes().GetFunctionType(*FInfo);
  1661. CGCallee Callee = CGCallee::forVirtual(CE, GD, This, Ty);
  1662. QualType ThisTy;
  1663. if (CE) {
  1664. ThisTy = CE->getObjectType();
  1665. } else {
  1666. ThisTy = D->getDestroyedType();
  1667. }
  1668. CGF.EmitCXXDestructorCall(GD, Callee, This.getPointer(), ThisTy, nullptr,
  1669. QualType(), nullptr);
  1670. return nullptr;
  1671. }
  1672. void ItaniumCXXABI::emitVirtualInheritanceTables(const CXXRecordDecl *RD) {
  1673. CodeGenVTables &VTables = CGM.getVTables();
  1674. llvm::GlobalVariable *VTT = VTables.GetAddrOfVTT(RD);
  1675. VTables.EmitVTTDefinition(VTT, CGM.getVTableLinkage(RD), RD);
  1676. }
  1677. bool ItaniumCXXABI::canSpeculativelyEmitVTableAsBaseClass(
  1678. const CXXRecordDecl *RD) const {
  1679. // We don't emit available_externally vtables if we are in -fapple-kext mode
  1680. // because kext mode does not permit devirtualization.
  1681. if (CGM.getLangOpts().AppleKext)
  1682. return false;
  1683. // If the vtable is hidden then it is not safe to emit an available_externally
  1684. // copy of vtable.
  1685. if (isVTableHidden(RD))
  1686. return false;
  1687. if (CGM.getCodeGenOpts().ForceEmitVTables)
  1688. return true;
  1689. // If we don't have any not emitted inline virtual function then we are safe
  1690. // to emit an available_externally copy of vtable.
  1691. // FIXME we can still emit a copy of the vtable if we
  1692. // can emit definition of the inline functions.
  1693. if (hasAnyUnusedVirtualInlineFunction(RD))
  1694. return false;
  1695. // For a class with virtual bases, we must also be able to speculatively
  1696. // emit the VTT, because CodeGen doesn't have separate notions of "can emit
  1697. // the vtable" and "can emit the VTT". For a base subobject, this means we
  1698. // need to be able to emit non-virtual base vtables.
  1699. if (RD->getNumVBases()) {
  1700. for (const auto &B : RD->bases()) {
  1701. auto *BRD = B.getType()->getAsCXXRecordDecl();
  1702. assert(BRD && "no class for base specifier");
  1703. if (B.isVirtual() || !BRD->isDynamicClass())
  1704. continue;
  1705. if (!canSpeculativelyEmitVTableAsBaseClass(BRD))
  1706. return false;
  1707. }
  1708. }
  1709. return true;
  1710. }
  1711. bool ItaniumCXXABI::canSpeculativelyEmitVTable(const CXXRecordDecl *RD) const {
  1712. if (!canSpeculativelyEmitVTableAsBaseClass(RD))
  1713. return false;
  1714. // For a complete-object vtable (or more specifically, for the VTT), we need
  1715. // to be able to speculatively emit the vtables of all dynamic virtual bases.
  1716. for (const auto &B : RD->vbases()) {
  1717. auto *BRD = B.getType()->getAsCXXRecordDecl();
  1718. assert(BRD && "no class for base specifier");
  1719. if (!BRD->isDynamicClass())
  1720. continue;
  1721. if (!canSpeculativelyEmitVTableAsBaseClass(BRD))
  1722. return false;
  1723. }
  1724. return true;
  1725. }
  1726. static llvm::Value *performTypeAdjustment(CodeGenFunction &CGF,
  1727. Address InitialPtr,
  1728. int64_t NonVirtualAdjustment,
  1729. int64_t VirtualAdjustment,
  1730. bool IsReturnAdjustment) {
  1731. if (!NonVirtualAdjustment && !VirtualAdjustment)
  1732. return InitialPtr.getPointer();
  1733. Address V = CGF.Builder.CreateElementBitCast(InitialPtr, CGF.Int8Ty);
  1734. // In a base-to-derived cast, the non-virtual adjustment is applied first.
  1735. if (NonVirtualAdjustment && !IsReturnAdjustment) {
  1736. V = CGF.Builder.CreateConstInBoundsByteGEP(V,
  1737. CharUnits::fromQuantity(NonVirtualAdjustment));
  1738. }
  1739. // Perform the virtual adjustment if we have one.
  1740. llvm::Value *ResultPtr;
  1741. if (VirtualAdjustment) {
  1742. Address VTablePtrPtr = CGF.Builder.CreateElementBitCast(V, CGF.Int8PtrTy);
  1743. llvm::Value *VTablePtr = CGF.Builder.CreateLoad(VTablePtrPtr);
  1744. llvm::Value *Offset;
  1745. llvm::Value *OffsetPtr = CGF.Builder.CreateConstInBoundsGEP1_64(
  1746. CGF.Int8Ty, VTablePtr, VirtualAdjustment);
  1747. if (CGF.CGM.getItaniumVTableContext().isRelativeLayout()) {
  1748. // Load the adjustment offset from the vtable as a 32-bit int.
  1749. OffsetPtr =
  1750. CGF.Builder.CreateBitCast(OffsetPtr, CGF.Int32Ty->getPointerTo());
  1751. Offset =
  1752. CGF.Builder.CreateAlignedLoad(CGF.Int32Ty, OffsetPtr,
  1753. CharUnits::fromQuantity(4));
  1754. } else {
  1755. llvm::Type *PtrDiffTy =
  1756. CGF.ConvertType(CGF.getContext().getPointerDiffType());
  1757. OffsetPtr =
  1758. CGF.Builder.CreateBitCast(OffsetPtr, PtrDiffTy->getPointerTo());
  1759. // Load the adjustment offset from the vtable.
  1760. Offset = CGF.Builder.CreateAlignedLoad(PtrDiffTy, OffsetPtr,
  1761. CGF.getPointerAlign());
  1762. }
  1763. // Adjust our pointer.
  1764. ResultPtr = CGF.Builder.CreateInBoundsGEP(
  1765. V.getElementType(), V.getPointer(), Offset);
  1766. } else {
  1767. ResultPtr = V.getPointer();
  1768. }
  1769. // In a derived-to-base conversion, the non-virtual adjustment is
  1770. // applied second.
  1771. if (NonVirtualAdjustment && IsReturnAdjustment) {
  1772. ResultPtr = CGF.Builder.CreateConstInBoundsGEP1_64(CGF.Int8Ty, ResultPtr,
  1773. NonVirtualAdjustment);
  1774. }
  1775. // Cast back to the original type.
  1776. return CGF.Builder.CreateBitCast(ResultPtr, InitialPtr.getType());
  1777. }
  1778. llvm::Value *ItaniumCXXABI::performThisAdjustment(CodeGenFunction &CGF,
  1779. Address This,
  1780. const ThisAdjustment &TA) {
  1781. return performTypeAdjustment(CGF, This, TA.NonVirtual,
  1782. TA.Virtual.Itanium.VCallOffsetOffset,
  1783. /*IsReturnAdjustment=*/false);
  1784. }
  1785. llvm::Value *
  1786. ItaniumCXXABI::performReturnAdjustment(CodeGenFunction &CGF, Address Ret,
  1787. const ReturnAdjustment &RA) {
  1788. return performTypeAdjustment(CGF, Ret, RA.NonVirtual,
  1789. RA.Virtual.Itanium.VBaseOffsetOffset,
  1790. /*IsReturnAdjustment=*/true);
  1791. }
  1792. void ARMCXXABI::EmitReturnFromThunk(CodeGenFunction &CGF,
  1793. RValue RV, QualType ResultType) {
  1794. if (!isa<CXXDestructorDecl>(CGF.CurGD.getDecl()))
  1795. return ItaniumCXXABI::EmitReturnFromThunk(CGF, RV, ResultType);
  1796. // Destructor thunks in the ARM ABI have indeterminate results.
  1797. llvm::Type *T = CGF.ReturnValue.getElementType();
  1798. RValue Undef = RValue::get(llvm::UndefValue::get(T));
  1799. return ItaniumCXXABI::EmitReturnFromThunk(CGF, Undef, ResultType);
  1800. }
  1801. /************************** Array allocation cookies **************************/
  1802. CharUnits ItaniumCXXABI::getArrayCookieSizeImpl(QualType elementType) {
  1803. // The array cookie is a size_t; pad that up to the element alignment.
  1804. // The cookie is actually right-justified in that space.
  1805. return std::max(CharUnits::fromQuantity(CGM.SizeSizeInBytes),
  1806. CGM.getContext().getPreferredTypeAlignInChars(elementType));
  1807. }
  1808. Address ItaniumCXXABI::InitializeArrayCookie(CodeGenFunction &CGF,
  1809. Address NewPtr,
  1810. llvm::Value *NumElements,
  1811. const CXXNewExpr *expr,
  1812. QualType ElementType) {
  1813. assert(requiresArrayCookie(expr));
  1814. unsigned AS = NewPtr.getAddressSpace();
  1815. ASTContext &Ctx = getContext();
  1816. CharUnits SizeSize = CGF.getSizeSize();
  1817. // The size of the cookie.
  1818. CharUnits CookieSize =
  1819. std::max(SizeSize, Ctx.getPreferredTypeAlignInChars(ElementType));
  1820. assert(CookieSize == getArrayCookieSizeImpl(ElementType));
  1821. // Compute an offset to the cookie.
  1822. Address CookiePtr = NewPtr;
  1823. CharUnits CookieOffset = CookieSize - SizeSize;
  1824. if (!CookieOffset.isZero())
  1825. CookiePtr = CGF.Builder.CreateConstInBoundsByteGEP(CookiePtr, CookieOffset);
  1826. // Write the number of elements into the appropriate slot.
  1827. Address NumElementsPtr =
  1828. CGF.Builder.CreateElementBitCast(CookiePtr, CGF.SizeTy);
  1829. llvm::Instruction *SI = CGF.Builder.CreateStore(NumElements, NumElementsPtr);
  1830. // Handle the array cookie specially in ASan.
  1831. if (CGM.getLangOpts().Sanitize.has(SanitizerKind::Address) && AS == 0 &&
  1832. (expr->getOperatorNew()->isReplaceableGlobalAllocationFunction() ||
  1833. CGM.getCodeGenOpts().SanitizeAddressPoisonCustomArrayCookie)) {
  1834. // The store to the CookiePtr does not need to be instrumented.
  1835. CGM.getSanitizerMetadata()->disableSanitizerForInstruction(SI);
  1836. llvm::FunctionType *FTy =
  1837. llvm::FunctionType::get(CGM.VoidTy, NumElementsPtr.getType(), false);
  1838. llvm::FunctionCallee F =
  1839. CGM.CreateRuntimeFunction(FTy, "__asan_poison_cxx_array_cookie");
  1840. CGF.Builder.CreateCall(F, NumElementsPtr.getPointer());
  1841. }
  1842. // Finally, compute a pointer to the actual data buffer by skipping
  1843. // over the cookie completely.
  1844. return CGF.Builder.CreateConstInBoundsByteGEP(NewPtr, CookieSize);
  1845. }
  1846. llvm::Value *ItaniumCXXABI::readArrayCookieImpl(CodeGenFunction &CGF,
  1847. Address allocPtr,
  1848. CharUnits cookieSize) {
  1849. // The element size is right-justified in the cookie.
  1850. Address numElementsPtr = allocPtr;
  1851. CharUnits numElementsOffset = cookieSize - CGF.getSizeSize();
  1852. if (!numElementsOffset.isZero())
  1853. numElementsPtr =
  1854. CGF.Builder.CreateConstInBoundsByteGEP(numElementsPtr, numElementsOffset);
  1855. unsigned AS = allocPtr.getAddressSpace();
  1856. numElementsPtr = CGF.Builder.CreateElementBitCast(numElementsPtr, CGF.SizeTy);
  1857. if (!CGM.getLangOpts().Sanitize.has(SanitizerKind::Address) || AS != 0)
  1858. return CGF.Builder.CreateLoad(numElementsPtr);
  1859. // In asan mode emit a function call instead of a regular load and let the
  1860. // run-time deal with it: if the shadow is properly poisoned return the
  1861. // cookie, otherwise return 0 to avoid an infinite loop calling DTORs.
  1862. // We can't simply ignore this load using nosanitize metadata because
  1863. // the metadata may be lost.
  1864. llvm::FunctionType *FTy =
  1865. llvm::FunctionType::get(CGF.SizeTy, CGF.SizeTy->getPointerTo(0), false);
  1866. llvm::FunctionCallee F =
  1867. CGM.CreateRuntimeFunction(FTy, "__asan_load_cxx_array_cookie");
  1868. return CGF.Builder.CreateCall(F, numElementsPtr.getPointer());
  1869. }
  1870. CharUnits ARMCXXABI::getArrayCookieSizeImpl(QualType elementType) {
  1871. // ARM says that the cookie is always:
  1872. // struct array_cookie {
  1873. // std::size_t element_size; // element_size != 0
  1874. // std::size_t element_count;
  1875. // };
  1876. // But the base ABI doesn't give anything an alignment greater than
  1877. // 8, so we can dismiss this as typical ABI-author blindness to
  1878. // actual language complexity and round up to the element alignment.
  1879. return std::max(CharUnits::fromQuantity(2 * CGM.SizeSizeInBytes),
  1880. CGM.getContext().getTypeAlignInChars(elementType));
  1881. }
  1882. Address ARMCXXABI::InitializeArrayCookie(CodeGenFunction &CGF,
  1883. Address newPtr,
  1884. llvm::Value *numElements,
  1885. const CXXNewExpr *expr,
  1886. QualType elementType) {
  1887. assert(requiresArrayCookie(expr));
  1888. // The cookie is always at the start of the buffer.
  1889. Address cookie = newPtr;
  1890. // The first element is the element size.
  1891. cookie = CGF.Builder.CreateElementBitCast(cookie, CGF.SizeTy);
  1892. llvm::Value *elementSize = llvm::ConstantInt::get(CGF.SizeTy,
  1893. getContext().getTypeSizeInChars(elementType).getQuantity());
  1894. CGF.Builder.CreateStore(elementSize, cookie);
  1895. // The second element is the element count.
  1896. cookie = CGF.Builder.CreateConstInBoundsGEP(cookie, 1);
  1897. CGF.Builder.CreateStore(numElements, cookie);
  1898. // Finally, compute a pointer to the actual data buffer by skipping
  1899. // over the cookie completely.
  1900. CharUnits cookieSize = ARMCXXABI::getArrayCookieSizeImpl(elementType);
  1901. return CGF.Builder.CreateConstInBoundsByteGEP(newPtr, cookieSize);
  1902. }
  1903. llvm::Value *ARMCXXABI::readArrayCookieImpl(CodeGenFunction &CGF,
  1904. Address allocPtr,
  1905. CharUnits cookieSize) {
  1906. // The number of elements is at offset sizeof(size_t) relative to
  1907. // the allocated pointer.
  1908. Address numElementsPtr
  1909. = CGF.Builder.CreateConstInBoundsByteGEP(allocPtr, CGF.getSizeSize());
  1910. numElementsPtr = CGF.Builder.CreateElementBitCast(numElementsPtr, CGF.SizeTy);
  1911. return CGF.Builder.CreateLoad(numElementsPtr);
  1912. }
  1913. /*********************** Static local initialization **************************/
  1914. static llvm::FunctionCallee getGuardAcquireFn(CodeGenModule &CGM,
  1915. llvm::PointerType *GuardPtrTy) {
  1916. // int __cxa_guard_acquire(__guard *guard_object);
  1917. llvm::FunctionType *FTy =
  1918. llvm::FunctionType::get(CGM.getTypes().ConvertType(CGM.getContext().IntTy),
  1919. GuardPtrTy, /*isVarArg=*/false);
  1920. return CGM.CreateRuntimeFunction(
  1921. FTy, "__cxa_guard_acquire",
  1922. llvm::AttributeList::get(CGM.getLLVMContext(),
  1923. llvm::AttributeList::FunctionIndex,
  1924. llvm::Attribute::NoUnwind));
  1925. }
  1926. static llvm::FunctionCallee getGuardReleaseFn(CodeGenModule &CGM,
  1927. llvm::PointerType *GuardPtrTy) {
  1928. // void __cxa_guard_release(__guard *guard_object);
  1929. llvm::FunctionType *FTy =
  1930. llvm::FunctionType::get(CGM.VoidTy, GuardPtrTy, /*isVarArg=*/false);
  1931. return CGM.CreateRuntimeFunction(
  1932. FTy, "__cxa_guard_release",
  1933. llvm::AttributeList::get(CGM.getLLVMContext(),
  1934. llvm::AttributeList::FunctionIndex,
  1935. llvm::Attribute::NoUnwind));
  1936. }
  1937. static llvm::FunctionCallee getGuardAbortFn(CodeGenModule &CGM,
  1938. llvm::PointerType *GuardPtrTy) {
  1939. // void __cxa_guard_abort(__guard *guard_object);
  1940. llvm::FunctionType *FTy =
  1941. llvm::FunctionType::get(CGM.VoidTy, GuardPtrTy, /*isVarArg=*/false);
  1942. return CGM.CreateRuntimeFunction(
  1943. FTy, "__cxa_guard_abort",
  1944. llvm::AttributeList::get(CGM.getLLVMContext(),
  1945. llvm::AttributeList::FunctionIndex,
  1946. llvm::Attribute::NoUnwind));
  1947. }
  1948. namespace {
  1949. struct CallGuardAbort final : EHScopeStack::Cleanup {
  1950. llvm::GlobalVariable *Guard;
  1951. CallGuardAbort(llvm::GlobalVariable *Guard) : Guard(Guard) {}
  1952. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1953. CGF.EmitNounwindRuntimeCall(getGuardAbortFn(CGF.CGM, Guard->getType()),
  1954. Guard);
  1955. }
  1956. };
  1957. }
  1958. /// The ARM code here follows the Itanium code closely enough that we
  1959. /// just special-case it at particular places.
  1960. void ItaniumCXXABI::EmitGuardedInit(CodeGenFunction &CGF,
  1961. const VarDecl &D,
  1962. llvm::GlobalVariable *var,
  1963. bool shouldPerformInit) {
  1964. CGBuilderTy &Builder = CGF.Builder;
  1965. // Inline variables that weren't instantiated from variable templates have
  1966. // partially-ordered initialization within their translation unit.
  1967. bool NonTemplateInline =
  1968. D.isInline() &&
  1969. !isTemplateInstantiation(D.getTemplateSpecializationKind());
  1970. // We only need to use thread-safe statics for local non-TLS variables and
  1971. // inline variables; other global initialization is always single-threaded
  1972. // or (through lazy dynamic loading in multiple threads) unsequenced.
  1973. bool threadsafe = getContext().getLangOpts().ThreadsafeStatics &&
  1974. (D.isLocalVarDecl() || NonTemplateInline) &&
  1975. !D.getTLSKind();
  1976. // If we have a global variable with internal linkage and thread-safe statics
  1977. // are disabled, we can just let the guard variable be of type i8.
  1978. bool useInt8GuardVariable = !threadsafe && var->hasInternalLinkage();
  1979. llvm::IntegerType *guardTy;
  1980. CharUnits guardAlignment;
  1981. if (useInt8GuardVariable) {
  1982. guardTy = CGF.Int8Ty;
  1983. guardAlignment = CharUnits::One();
  1984. } else {
  1985. // Guard variables are 64 bits in the generic ABI and size width on ARM
  1986. // (i.e. 32-bit on AArch32, 64-bit on AArch64).
  1987. if (UseARMGuardVarABI) {
  1988. guardTy = CGF.SizeTy;
  1989. guardAlignment = CGF.getSizeAlign();
  1990. } else {
  1991. guardTy = CGF.Int64Ty;
  1992. guardAlignment =
  1993. CharUnits::fromQuantity(CGM.getDataLayout().getABITypeAlign(guardTy));
  1994. }
  1995. }
  1996. llvm::PointerType *guardPtrTy = guardTy->getPointerTo(
  1997. CGF.CGM.getDataLayout().getDefaultGlobalsAddressSpace());
  1998. // Create the guard variable if we don't already have it (as we
  1999. // might if we're double-emitting this function body).
  2000. llvm::GlobalVariable *guard = CGM.getStaticLocalDeclGuardAddress(&D);
  2001. if (!guard) {
  2002. // Mangle the name for the guard.
  2003. SmallString<256> guardName;
  2004. {
  2005. llvm::raw_svector_ostream out(guardName);
  2006. getMangleContext().mangleStaticGuardVariable(&D, out);
  2007. }
  2008. // Create the guard variable with a zero-initializer.
  2009. // Just absorb linkage, visibility and dll storage class from the guarded
  2010. // variable.
  2011. guard = new llvm::GlobalVariable(CGM.getModule(), guardTy,
  2012. false, var->getLinkage(),
  2013. llvm::ConstantInt::get(guardTy, 0),
  2014. guardName.str());
  2015. guard->setDSOLocal(var->isDSOLocal());
  2016. guard->setVisibility(var->getVisibility());
  2017. guard->setDLLStorageClass(var->getDLLStorageClass());
  2018. // If the variable is thread-local, so is its guard variable.
  2019. guard->setThreadLocalMode(var->getThreadLocalMode());
  2020. guard->setAlignment(guardAlignment.getAsAlign());
  2021. // The ABI says: "It is suggested that it be emitted in the same COMDAT
  2022. // group as the associated data object." In practice, this doesn't work for
  2023. // non-ELF and non-Wasm object formats, so only do it for ELF and Wasm.
  2024. llvm::Comdat *C = var->getComdat();
  2025. if (!D.isLocalVarDecl() && C &&
  2026. (CGM.getTarget().getTriple().isOSBinFormatELF() ||
  2027. CGM.getTarget().getTriple().isOSBinFormatWasm())) {
  2028. guard->setComdat(C);
  2029. } else if (CGM.supportsCOMDAT() && guard->isWeakForLinker()) {
  2030. guard->setComdat(CGM.getModule().getOrInsertComdat(guard->getName()));
  2031. }
  2032. CGM.setStaticLocalDeclGuardAddress(&D, guard);
  2033. }
  2034. Address guardAddr = Address(guard, guard->getValueType(), guardAlignment);
  2035. // Test whether the variable has completed initialization.
  2036. //
  2037. // Itanium C++ ABI 3.3.2:
  2038. // The following is pseudo-code showing how these functions can be used:
  2039. // if (obj_guard.first_byte == 0) {
  2040. // if ( __cxa_guard_acquire (&obj_guard) ) {
  2041. // try {
  2042. // ... initialize the object ...;
  2043. // } catch (...) {
  2044. // __cxa_guard_abort (&obj_guard);
  2045. // throw;
  2046. // }
  2047. // ... queue object destructor with __cxa_atexit() ...;
  2048. // __cxa_guard_release (&obj_guard);
  2049. // }
  2050. // }
  2051. //
  2052. // If threadsafe statics are enabled, but we don't have inline atomics, just
  2053. // call __cxa_guard_acquire unconditionally. The "inline" check isn't
  2054. // actually inline, and the user might not expect calls to __atomic libcalls.
  2055. unsigned MaxInlineWidthInBits = CGF.getTarget().getMaxAtomicInlineWidth();
  2056. llvm::BasicBlock *EndBlock = CGF.createBasicBlock("init.end");
  2057. if (!threadsafe || MaxInlineWidthInBits) {
  2058. // Load the first byte of the guard variable.
  2059. llvm::LoadInst *LI =
  2060. Builder.CreateLoad(Builder.CreateElementBitCast(guardAddr, CGM.Int8Ty));
  2061. // Itanium ABI:
  2062. // An implementation supporting thread-safety on multiprocessor
  2063. // systems must also guarantee that references to the initialized
  2064. // object do not occur before the load of the initialization flag.
  2065. //
  2066. // In LLVM, we do this by marking the load Acquire.
  2067. if (threadsafe)
  2068. LI->setAtomic(llvm::AtomicOrdering::Acquire);
  2069. // For ARM, we should only check the first bit, rather than the entire byte:
  2070. //
  2071. // ARM C++ ABI 3.2.3.1:
  2072. // To support the potential use of initialization guard variables
  2073. // as semaphores that are the target of ARM SWP and LDREX/STREX
  2074. // synchronizing instructions we define a static initialization
  2075. // guard variable to be a 4-byte aligned, 4-byte word with the
  2076. // following inline access protocol.
  2077. // #define INITIALIZED 1
  2078. // if ((obj_guard & INITIALIZED) != INITIALIZED) {
  2079. // if (__cxa_guard_acquire(&obj_guard))
  2080. // ...
  2081. // }
  2082. //
  2083. // and similarly for ARM64:
  2084. //
  2085. // ARM64 C++ ABI 3.2.2:
  2086. // This ABI instead only specifies the value bit 0 of the static guard
  2087. // variable; all other bits are platform defined. Bit 0 shall be 0 when the
  2088. // variable is not initialized and 1 when it is.
  2089. llvm::Value *V =
  2090. (UseARMGuardVarABI && !useInt8GuardVariable)
  2091. ? Builder.CreateAnd(LI, llvm::ConstantInt::get(CGM.Int8Ty, 1))
  2092. : LI;
  2093. llvm::Value *NeedsInit = Builder.CreateIsNull(V, "guard.uninitialized");
  2094. llvm::BasicBlock *InitCheckBlock = CGF.createBasicBlock("init.check");
  2095. // Check if the first byte of the guard variable is zero.
  2096. CGF.EmitCXXGuardedInitBranch(NeedsInit, InitCheckBlock, EndBlock,
  2097. CodeGenFunction::GuardKind::VariableGuard, &D);
  2098. CGF.EmitBlock(InitCheckBlock);
  2099. }
  2100. // The semantics of dynamic initialization of variables with static or thread
  2101. // storage duration depends on whether they are declared at block-scope. The
  2102. // initialization of such variables at block-scope can be aborted with an
  2103. // exception and later retried (per C++20 [stmt.dcl]p4), and recursive entry
  2104. // to their initialization has undefined behavior (also per C++20
  2105. // [stmt.dcl]p4). For such variables declared at non-block scope, exceptions
  2106. // lead to termination (per C++20 [except.terminate]p1), and recursive
  2107. // references to the variables are governed only by the lifetime rules (per
  2108. // C++20 [class.cdtor]p2), which means such references are perfectly fine as
  2109. // long as they avoid touching memory. As a result, block-scope variables must
  2110. // not be marked as initialized until after initialization completes (unless
  2111. // the mark is reverted following an exception), but non-block-scope variables
  2112. // must be marked prior to initialization so that recursive accesses during
  2113. // initialization do not restart initialization.
  2114. // Variables used when coping with thread-safe statics and exceptions.
  2115. if (threadsafe) {
  2116. // Call __cxa_guard_acquire.
  2117. llvm::Value *V
  2118. = CGF.EmitNounwindRuntimeCall(getGuardAcquireFn(CGM, guardPtrTy), guard);
  2119. llvm::BasicBlock *InitBlock = CGF.createBasicBlock("init");
  2120. Builder.CreateCondBr(Builder.CreateIsNotNull(V, "tobool"),
  2121. InitBlock, EndBlock);
  2122. // Call __cxa_guard_abort along the exceptional edge.
  2123. CGF.EHStack.pushCleanup<CallGuardAbort>(EHCleanup, guard);
  2124. CGF.EmitBlock(InitBlock);
  2125. } else if (!D.isLocalVarDecl()) {
  2126. // For non-local variables, store 1 into the first byte of the guard
  2127. // variable before the object initialization begins so that references
  2128. // to the variable during initialization don't restart initialization.
  2129. Builder.CreateStore(llvm::ConstantInt::get(CGM.Int8Ty, 1),
  2130. Builder.CreateElementBitCast(guardAddr, CGM.Int8Ty));
  2131. }
  2132. // Emit the initializer and add a global destructor if appropriate.
  2133. CGF.EmitCXXGlobalVarDeclInit(D, var, shouldPerformInit);
  2134. if (threadsafe) {
  2135. // Pop the guard-abort cleanup if we pushed one.
  2136. CGF.PopCleanupBlock();
  2137. // Call __cxa_guard_release. This cannot throw.
  2138. CGF.EmitNounwindRuntimeCall(getGuardReleaseFn(CGM, guardPtrTy),
  2139. guardAddr.getPointer());
  2140. } else if (D.isLocalVarDecl()) {
  2141. // For local variables, store 1 into the first byte of the guard variable
  2142. // after the object initialization completes so that initialization is
  2143. // retried if initialization is interrupted by an exception.
  2144. Builder.CreateStore(llvm::ConstantInt::get(CGM.Int8Ty, 1),
  2145. Builder.CreateElementBitCast(guardAddr, CGM.Int8Ty));
  2146. }
  2147. CGF.EmitBlock(EndBlock);
  2148. }
  2149. /// Register a global destructor using __cxa_atexit.
  2150. static void emitGlobalDtorWithCXAAtExit(CodeGenFunction &CGF,
  2151. llvm::FunctionCallee dtor,
  2152. llvm::Constant *addr, bool TLS) {
  2153. assert(!CGF.getTarget().getTriple().isOSAIX() &&
  2154. "unexpected call to emitGlobalDtorWithCXAAtExit");
  2155. assert((TLS || CGF.getTypes().getCodeGenOpts().CXAAtExit) &&
  2156. "__cxa_atexit is disabled");
  2157. const char *Name = "__cxa_atexit";
  2158. if (TLS) {
  2159. const llvm::Triple &T = CGF.getTarget().getTriple();
  2160. Name = T.isOSDarwin() ? "_tlv_atexit" : "__cxa_thread_atexit";
  2161. }
  2162. // We're assuming that the destructor function is something we can
  2163. // reasonably call with the default CC. Go ahead and cast it to the
  2164. // right prototype.
  2165. llvm::Type *dtorTy =
  2166. llvm::FunctionType::get(CGF.VoidTy, CGF.Int8PtrTy, false)->getPointerTo();
  2167. // Preserve address space of addr.
  2168. auto AddrAS = addr ? addr->getType()->getPointerAddressSpace() : 0;
  2169. auto AddrInt8PtrTy =
  2170. AddrAS ? CGF.Int8Ty->getPointerTo(AddrAS) : CGF.Int8PtrTy;
  2171. // Create a variable that binds the atexit to this shared object.
  2172. llvm::Constant *handle =
  2173. CGF.CGM.CreateRuntimeVariable(CGF.Int8Ty, "__dso_handle");
  2174. auto *GV = cast<llvm::GlobalValue>(handle->stripPointerCasts());
  2175. GV->setVisibility(llvm::GlobalValue::HiddenVisibility);
  2176. // extern "C" int __cxa_atexit(void (*f)(void *), void *p, void *d);
  2177. llvm::Type *paramTys[] = {dtorTy, AddrInt8PtrTy, handle->getType()};
  2178. llvm::FunctionType *atexitTy =
  2179. llvm::FunctionType::get(CGF.IntTy, paramTys, false);
  2180. // Fetch the actual function.
  2181. llvm::FunctionCallee atexit = CGF.CGM.CreateRuntimeFunction(atexitTy, Name);
  2182. if (llvm::Function *fn = dyn_cast<llvm::Function>(atexit.getCallee()))
  2183. fn->setDoesNotThrow();
  2184. if (!addr)
  2185. // addr is null when we are trying to register a dtor annotated with
  2186. // __attribute__((destructor)) in a constructor function. Using null here is
  2187. // okay because this argument is just passed back to the destructor
  2188. // function.
  2189. addr = llvm::Constant::getNullValue(CGF.Int8PtrTy);
  2190. llvm::Value *args[] = {llvm::ConstantExpr::getBitCast(
  2191. cast<llvm::Constant>(dtor.getCallee()), dtorTy),
  2192. llvm::ConstantExpr::getBitCast(addr, AddrInt8PtrTy),
  2193. handle};
  2194. CGF.EmitNounwindRuntimeCall(atexit, args);
  2195. }
  2196. static llvm::Function *createGlobalInitOrCleanupFn(CodeGen::CodeGenModule &CGM,
  2197. StringRef FnName) {
  2198. // Create a function that registers/unregisters destructors that have the same
  2199. // priority.
  2200. llvm::FunctionType *FTy = llvm::FunctionType::get(CGM.VoidTy, false);
  2201. llvm::Function *GlobalInitOrCleanupFn = CGM.CreateGlobalInitOrCleanUpFunction(
  2202. FTy, FnName, CGM.getTypes().arrangeNullaryFunction(), SourceLocation());
  2203. return GlobalInitOrCleanupFn;
  2204. }
  2205. void CodeGenModule::unregisterGlobalDtorsWithUnAtExit() {
  2206. for (const auto &I : DtorsUsingAtExit) {
  2207. int Priority = I.first;
  2208. std::string GlobalCleanupFnName =
  2209. std::string("__GLOBAL_cleanup_") + llvm::to_string(Priority);
  2210. llvm::Function *GlobalCleanupFn =
  2211. createGlobalInitOrCleanupFn(*this, GlobalCleanupFnName);
  2212. CodeGenFunction CGF(*this);
  2213. CGF.StartFunction(GlobalDecl(), getContext().VoidTy, GlobalCleanupFn,
  2214. getTypes().arrangeNullaryFunction(), FunctionArgList(),
  2215. SourceLocation(), SourceLocation());
  2216. auto AL = ApplyDebugLocation::CreateArtificial(CGF);
  2217. // Get the destructor function type, void(*)(void).
  2218. llvm::FunctionType *dtorFuncTy = llvm::FunctionType::get(CGF.VoidTy, false);
  2219. llvm::Type *dtorTy = dtorFuncTy->getPointerTo();
  2220. // Destructor functions are run/unregistered in non-ascending
  2221. // order of their priorities.
  2222. const llvm::TinyPtrVector<llvm::Function *> &Dtors = I.second;
  2223. auto itv = Dtors.rbegin();
  2224. while (itv != Dtors.rend()) {
  2225. llvm::Function *Dtor = *itv;
  2226. // We're assuming that the destructor function is something we can
  2227. // reasonably call with the correct CC. Go ahead and cast it to the
  2228. // right prototype.
  2229. llvm::Constant *dtor = llvm::ConstantExpr::getBitCast(Dtor, dtorTy);
  2230. llvm::Value *V = CGF.unregisterGlobalDtorWithUnAtExit(dtor);
  2231. llvm::Value *NeedsDestruct =
  2232. CGF.Builder.CreateIsNull(V, "needs_destruct");
  2233. llvm::BasicBlock *DestructCallBlock =
  2234. CGF.createBasicBlock("destruct.call");
  2235. llvm::BasicBlock *EndBlock = CGF.createBasicBlock(
  2236. (itv + 1) != Dtors.rend() ? "unatexit.call" : "destruct.end");
  2237. // Check if unatexit returns a value of 0. If it does, jump to
  2238. // DestructCallBlock, otherwise jump to EndBlock directly.
  2239. CGF.Builder.CreateCondBr(NeedsDestruct, DestructCallBlock, EndBlock);
  2240. CGF.EmitBlock(DestructCallBlock);
  2241. // Emit the call to casted Dtor.
  2242. llvm::CallInst *CI = CGF.Builder.CreateCall(dtorFuncTy, dtor);
  2243. // Make sure the call and the callee agree on calling convention.
  2244. CI->setCallingConv(Dtor->getCallingConv());
  2245. CGF.EmitBlock(EndBlock);
  2246. itv++;
  2247. }
  2248. CGF.FinishFunction();
  2249. AddGlobalDtor(GlobalCleanupFn, Priority);
  2250. }
  2251. }
  2252. void CodeGenModule::registerGlobalDtorsWithAtExit() {
  2253. for (const auto &I : DtorsUsingAtExit) {
  2254. int Priority = I.first;
  2255. std::string GlobalInitFnName =
  2256. std::string("__GLOBAL_init_") + llvm::to_string(Priority);
  2257. llvm::Function *GlobalInitFn =
  2258. createGlobalInitOrCleanupFn(*this, GlobalInitFnName);
  2259. CodeGenFunction CGF(*this);
  2260. CGF.StartFunction(GlobalDecl(), getContext().VoidTy, GlobalInitFn,
  2261. getTypes().arrangeNullaryFunction(), FunctionArgList(),
  2262. SourceLocation(), SourceLocation());
  2263. auto AL = ApplyDebugLocation::CreateArtificial(CGF);
  2264. // Since constructor functions are run in non-descending order of their
  2265. // priorities, destructors are registered in non-descending order of their
  2266. // priorities, and since destructor functions are run in the reverse order
  2267. // of their registration, destructor functions are run in non-ascending
  2268. // order of their priorities.
  2269. const llvm::TinyPtrVector<llvm::Function *> &Dtors = I.second;
  2270. for (auto *Dtor : Dtors) {
  2271. // Register the destructor function calling __cxa_atexit if it is
  2272. // available. Otherwise fall back on calling atexit.
  2273. if (getCodeGenOpts().CXAAtExit) {
  2274. emitGlobalDtorWithCXAAtExit(CGF, Dtor, nullptr, false);
  2275. } else {
  2276. // Get the destructor function type, void(*)(void).
  2277. llvm::Type *dtorTy =
  2278. llvm::FunctionType::get(CGF.VoidTy, false)->getPointerTo();
  2279. // We're assuming that the destructor function is something we can
  2280. // reasonably call with the correct CC. Go ahead and cast it to the
  2281. // right prototype.
  2282. CGF.registerGlobalDtorWithAtExit(
  2283. llvm::ConstantExpr::getBitCast(Dtor, dtorTy));
  2284. }
  2285. }
  2286. CGF.FinishFunction();
  2287. AddGlobalCtor(GlobalInitFn, Priority);
  2288. }
  2289. if (getCXXABI().useSinitAndSterm())
  2290. unregisterGlobalDtorsWithUnAtExit();
  2291. }
  2292. /// Register a global destructor as best as we know how.
  2293. void ItaniumCXXABI::registerGlobalDtor(CodeGenFunction &CGF, const VarDecl &D,
  2294. llvm::FunctionCallee dtor,
  2295. llvm::Constant *addr) {
  2296. if (D.isNoDestroy(CGM.getContext()))
  2297. return;
  2298. // emitGlobalDtorWithCXAAtExit will emit a call to either __cxa_thread_atexit
  2299. // or __cxa_atexit depending on whether this VarDecl is a thread-local storage
  2300. // or not. CXAAtExit controls only __cxa_atexit, so use it if it is enabled.
  2301. // We can always use __cxa_thread_atexit.
  2302. if (CGM.getCodeGenOpts().CXAAtExit || D.getTLSKind())
  2303. return emitGlobalDtorWithCXAAtExit(CGF, dtor, addr, D.getTLSKind());
  2304. // In Apple kexts, we want to add a global destructor entry.
  2305. // FIXME: shouldn't this be guarded by some variable?
  2306. if (CGM.getLangOpts().AppleKext) {
  2307. // Generate a global destructor entry.
  2308. return CGM.AddCXXDtorEntry(dtor, addr);
  2309. }
  2310. CGF.registerGlobalDtorWithAtExit(D, dtor, addr);
  2311. }
  2312. static bool isThreadWrapperReplaceable(const VarDecl *VD,
  2313. CodeGen::CodeGenModule &CGM) {
  2314. assert(!VD->isStaticLocal() && "static local VarDecls don't need wrappers!");
  2315. // Darwin prefers to have references to thread local variables to go through
  2316. // the thread wrapper instead of directly referencing the backing variable.
  2317. return VD->getTLSKind() == VarDecl::TLS_Dynamic &&
  2318. CGM.getTarget().getTriple().isOSDarwin();
  2319. }
  2320. /// Get the appropriate linkage for the wrapper function. This is essentially
  2321. /// the weak form of the variable's linkage; every translation unit which needs
  2322. /// the wrapper emits a copy, and we want the linker to merge them.
  2323. static llvm::GlobalValue::LinkageTypes
  2324. getThreadLocalWrapperLinkage(const VarDecl *VD, CodeGen::CodeGenModule &CGM) {
  2325. llvm::GlobalValue::LinkageTypes VarLinkage =
  2326. CGM.getLLVMLinkageVarDefinition(VD, /*IsConstant=*/false);
  2327. // For internal linkage variables, we don't need an external or weak wrapper.
  2328. if (llvm::GlobalValue::isLocalLinkage(VarLinkage))
  2329. return VarLinkage;
  2330. // If the thread wrapper is replaceable, give it appropriate linkage.
  2331. if (isThreadWrapperReplaceable(VD, CGM))
  2332. if (!llvm::GlobalVariable::isLinkOnceLinkage(VarLinkage) &&
  2333. !llvm::GlobalVariable::isWeakODRLinkage(VarLinkage))
  2334. return VarLinkage;
  2335. return llvm::GlobalValue::WeakODRLinkage;
  2336. }
  2337. llvm::Function *
  2338. ItaniumCXXABI::getOrCreateThreadLocalWrapper(const VarDecl *VD,
  2339. llvm::Value *Val) {
  2340. // Mangle the name for the thread_local wrapper function.
  2341. SmallString<256> WrapperName;
  2342. {
  2343. llvm::raw_svector_ostream Out(WrapperName);
  2344. getMangleContext().mangleItaniumThreadLocalWrapper(VD, Out);
  2345. }
  2346. // FIXME: If VD is a definition, we should regenerate the function attributes
  2347. // before returning.
  2348. if (llvm::Value *V = CGM.getModule().getNamedValue(WrapperName))
  2349. return cast<llvm::Function>(V);
  2350. QualType RetQT = VD->getType();
  2351. if (RetQT->isReferenceType())
  2352. RetQT = RetQT.getNonReferenceType();
  2353. const CGFunctionInfo &FI = CGM.getTypes().arrangeBuiltinFunctionDeclaration(
  2354. getContext().getPointerType(RetQT), FunctionArgList());
  2355. llvm::FunctionType *FnTy = CGM.getTypes().GetFunctionType(FI);
  2356. llvm::Function *Wrapper =
  2357. llvm::Function::Create(FnTy, getThreadLocalWrapperLinkage(VD, CGM),
  2358. WrapperName.str(), &CGM.getModule());
  2359. if (CGM.supportsCOMDAT() && Wrapper->isWeakForLinker())
  2360. Wrapper->setComdat(CGM.getModule().getOrInsertComdat(Wrapper->getName()));
  2361. CGM.SetLLVMFunctionAttributes(GlobalDecl(), FI, Wrapper, /*IsThunk=*/false);
  2362. // Always resolve references to the wrapper at link time.
  2363. if (!Wrapper->hasLocalLinkage())
  2364. if (!isThreadWrapperReplaceable(VD, CGM) ||
  2365. llvm::GlobalVariable::isLinkOnceLinkage(Wrapper->getLinkage()) ||
  2366. llvm::GlobalVariable::isWeakODRLinkage(Wrapper->getLinkage()) ||
  2367. VD->getVisibility() == HiddenVisibility)
  2368. Wrapper->setVisibility(llvm::GlobalValue::HiddenVisibility);
  2369. if (isThreadWrapperReplaceable(VD, CGM)) {
  2370. Wrapper->setCallingConv(llvm::CallingConv::CXX_FAST_TLS);
  2371. Wrapper->addFnAttr(llvm::Attribute::NoUnwind);
  2372. }
  2373. ThreadWrappers.push_back({VD, Wrapper});
  2374. return Wrapper;
  2375. }
  2376. void ItaniumCXXABI::EmitThreadLocalInitFuncs(
  2377. CodeGenModule &CGM, ArrayRef<const VarDecl *> CXXThreadLocals,
  2378. ArrayRef<llvm::Function *> CXXThreadLocalInits,
  2379. ArrayRef<const VarDecl *> CXXThreadLocalInitVars) {
  2380. llvm::Function *InitFunc = nullptr;
  2381. // Separate initializers into those with ordered (or partially-ordered)
  2382. // initialization and those with unordered initialization.
  2383. llvm::SmallVector<llvm::Function *, 8> OrderedInits;
  2384. llvm::SmallDenseMap<const VarDecl *, llvm::Function *> UnorderedInits;
  2385. for (unsigned I = 0; I != CXXThreadLocalInits.size(); ++I) {
  2386. if (isTemplateInstantiation(
  2387. CXXThreadLocalInitVars[I]->getTemplateSpecializationKind()))
  2388. UnorderedInits[CXXThreadLocalInitVars[I]->getCanonicalDecl()] =
  2389. CXXThreadLocalInits[I];
  2390. else
  2391. OrderedInits.push_back(CXXThreadLocalInits[I]);
  2392. }
  2393. if (!OrderedInits.empty()) {
  2394. // Generate a guarded initialization function.
  2395. llvm::FunctionType *FTy =
  2396. llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
  2397. const CGFunctionInfo &FI = CGM.getTypes().arrangeNullaryFunction();
  2398. InitFunc = CGM.CreateGlobalInitOrCleanUpFunction(FTy, "__tls_init", FI,
  2399. SourceLocation(),
  2400. /*TLS=*/true);
  2401. llvm::GlobalVariable *Guard = new llvm::GlobalVariable(
  2402. CGM.getModule(), CGM.Int8Ty, /*isConstant=*/false,
  2403. llvm::GlobalVariable::InternalLinkage,
  2404. llvm::ConstantInt::get(CGM.Int8Ty, 0), "__tls_guard");
  2405. Guard->setThreadLocal(true);
  2406. Guard->setThreadLocalMode(CGM.GetDefaultLLVMTLSModel());
  2407. CharUnits GuardAlign = CharUnits::One();
  2408. Guard->setAlignment(GuardAlign.getAsAlign());
  2409. CodeGenFunction(CGM).GenerateCXXGlobalInitFunc(
  2410. InitFunc, OrderedInits, ConstantAddress(Guard, CGM.Int8Ty, GuardAlign));
  2411. // On Darwin platforms, use CXX_FAST_TLS calling convention.
  2412. if (CGM.getTarget().getTriple().isOSDarwin()) {
  2413. InitFunc->setCallingConv(llvm::CallingConv::CXX_FAST_TLS);
  2414. InitFunc->addFnAttr(llvm::Attribute::NoUnwind);
  2415. }
  2416. }
  2417. // Create declarations for thread wrappers for all thread-local variables
  2418. // with non-discardable definitions in this translation unit.
  2419. for (const VarDecl *VD : CXXThreadLocals) {
  2420. if (VD->hasDefinition() &&
  2421. !isDiscardableGVALinkage(getContext().GetGVALinkageForVariable(VD))) {
  2422. llvm::GlobalValue *GV = CGM.GetGlobalValue(CGM.getMangledName(VD));
  2423. getOrCreateThreadLocalWrapper(VD, GV);
  2424. }
  2425. }
  2426. // Emit all referenced thread wrappers.
  2427. for (auto VDAndWrapper : ThreadWrappers) {
  2428. const VarDecl *VD = VDAndWrapper.first;
  2429. llvm::GlobalVariable *Var =
  2430. cast<llvm::GlobalVariable>(CGM.GetGlobalValue(CGM.getMangledName(VD)));
  2431. llvm::Function *Wrapper = VDAndWrapper.second;
  2432. // Some targets require that all access to thread local variables go through
  2433. // the thread wrapper. This means that we cannot attempt to create a thread
  2434. // wrapper or a thread helper.
  2435. if (!VD->hasDefinition()) {
  2436. if (isThreadWrapperReplaceable(VD, CGM)) {
  2437. Wrapper->setLinkage(llvm::Function::ExternalLinkage);
  2438. continue;
  2439. }
  2440. // If this isn't a TU in which this variable is defined, the thread
  2441. // wrapper is discardable.
  2442. if (Wrapper->getLinkage() == llvm::Function::WeakODRLinkage)
  2443. Wrapper->setLinkage(llvm::Function::LinkOnceODRLinkage);
  2444. }
  2445. CGM.SetLLVMFunctionAttributesForDefinition(nullptr, Wrapper);
  2446. // Mangle the name for the thread_local initialization function.
  2447. SmallString<256> InitFnName;
  2448. {
  2449. llvm::raw_svector_ostream Out(InitFnName);
  2450. getMangleContext().mangleItaniumThreadLocalInit(VD, Out);
  2451. }
  2452. llvm::FunctionType *InitFnTy = llvm::FunctionType::get(CGM.VoidTy, false);
  2453. // If we have a definition for the variable, emit the initialization
  2454. // function as an alias to the global Init function (if any). Otherwise,
  2455. // produce a declaration of the initialization function.
  2456. llvm::GlobalValue *Init = nullptr;
  2457. bool InitIsInitFunc = false;
  2458. bool HasConstantInitialization = false;
  2459. if (!usesThreadWrapperFunction(VD)) {
  2460. HasConstantInitialization = true;
  2461. } else if (VD->hasDefinition()) {
  2462. InitIsInitFunc = true;
  2463. llvm::Function *InitFuncToUse = InitFunc;
  2464. if (isTemplateInstantiation(VD->getTemplateSpecializationKind()))
  2465. InitFuncToUse = UnorderedInits.lookup(VD->getCanonicalDecl());
  2466. if (InitFuncToUse)
  2467. Init = llvm::GlobalAlias::create(Var->getLinkage(), InitFnName.str(),
  2468. InitFuncToUse);
  2469. } else {
  2470. // Emit a weak global function referring to the initialization function.
  2471. // This function will not exist if the TU defining the thread_local
  2472. // variable in question does not need any dynamic initialization for
  2473. // its thread_local variables.
  2474. Init = llvm::Function::Create(InitFnTy,
  2475. llvm::GlobalVariable::ExternalWeakLinkage,
  2476. InitFnName.str(), &CGM.getModule());
  2477. const CGFunctionInfo &FI = CGM.getTypes().arrangeNullaryFunction();
  2478. CGM.SetLLVMFunctionAttributes(
  2479. GlobalDecl(), FI, cast<llvm::Function>(Init), /*IsThunk=*/false);
  2480. }
  2481. if (Init) {
  2482. Init->setVisibility(Var->getVisibility());
  2483. // Don't mark an extern_weak function DSO local on windows.
  2484. if (!CGM.getTriple().isOSWindows() || !Init->hasExternalWeakLinkage())
  2485. Init->setDSOLocal(Var->isDSOLocal());
  2486. }
  2487. llvm::LLVMContext &Context = CGM.getModule().getContext();
  2488. // The linker on AIX is not happy with missing weak symbols. However,
  2489. // other TUs will not know whether the initialization routine exists
  2490. // so create an empty, init function to satisfy the linker.
  2491. // This is needed whenever a thread wrapper function is not used, and
  2492. // also when the symbol is weak.
  2493. if (CGM.getTriple().isOSAIX() && VD->hasDefinition() &&
  2494. isEmittedWithConstantInitializer(VD, true) &&
  2495. !mayNeedDestruction(VD)) {
  2496. // Init should be null. If it were non-null, then the logic above would
  2497. // either be defining the function to be an alias or declaring the
  2498. // function with the expectation that the definition of the variable
  2499. // is elsewhere.
  2500. assert(Init == nullptr && "Expected Init to be null.");
  2501. llvm::Function *Func = llvm::Function::Create(
  2502. InitFnTy, Var->getLinkage(), InitFnName.str(), &CGM.getModule());
  2503. const CGFunctionInfo &FI = CGM.getTypes().arrangeNullaryFunction();
  2504. CGM.SetLLVMFunctionAttributes(GlobalDecl(), FI,
  2505. cast<llvm::Function>(Func),
  2506. /*IsThunk=*/false);
  2507. // Create a function body that just returns
  2508. llvm::BasicBlock *Entry = llvm::BasicBlock::Create(Context, "", Func);
  2509. CGBuilderTy Builder(CGM, Entry);
  2510. Builder.CreateRetVoid();
  2511. }
  2512. llvm::BasicBlock *Entry = llvm::BasicBlock::Create(Context, "", Wrapper);
  2513. CGBuilderTy Builder(CGM, Entry);
  2514. if (HasConstantInitialization) {
  2515. // No dynamic initialization to invoke.
  2516. } else if (InitIsInitFunc) {
  2517. if (Init) {
  2518. llvm::CallInst *CallVal = Builder.CreateCall(InitFnTy, Init);
  2519. if (isThreadWrapperReplaceable(VD, CGM)) {
  2520. CallVal->setCallingConv(llvm::CallingConv::CXX_FAST_TLS);
  2521. llvm::Function *Fn =
  2522. cast<llvm::Function>(cast<llvm::GlobalAlias>(Init)->getAliasee());
  2523. Fn->setCallingConv(llvm::CallingConv::CXX_FAST_TLS);
  2524. }
  2525. }
  2526. } else if (CGM.getTriple().isOSAIX()) {
  2527. // On AIX, except if constinit and also neither of class type or of
  2528. // (possibly multi-dimensional) array of class type, thread_local vars
  2529. // will have init routines regardless of whether they are
  2530. // const-initialized. Since the routine is guaranteed to exist, we can
  2531. // unconditionally call it without testing for its existance. This
  2532. // avoids potentially unresolved weak symbols which the AIX linker
  2533. // isn't happy with.
  2534. Builder.CreateCall(InitFnTy, Init);
  2535. } else {
  2536. // Don't know whether we have an init function. Call it if it exists.
  2537. llvm::Value *Have = Builder.CreateIsNotNull(Init);
  2538. llvm::BasicBlock *InitBB = llvm::BasicBlock::Create(Context, "", Wrapper);
  2539. llvm::BasicBlock *ExitBB = llvm::BasicBlock::Create(Context, "", Wrapper);
  2540. Builder.CreateCondBr(Have, InitBB, ExitBB);
  2541. Builder.SetInsertPoint(InitBB);
  2542. Builder.CreateCall(InitFnTy, Init);
  2543. Builder.CreateBr(ExitBB);
  2544. Builder.SetInsertPoint(ExitBB);
  2545. }
  2546. // For a reference, the result of the wrapper function is a pointer to
  2547. // the referenced object.
  2548. llvm::Value *Val = Builder.CreateThreadLocalAddress(Var);
  2549. if (VD->getType()->isReferenceType()) {
  2550. CharUnits Align = CGM.getContext().getDeclAlign(VD);
  2551. Val = Builder.CreateAlignedLoad(Var->getValueType(), Val, Align);
  2552. }
  2553. if (Val->getType() != Wrapper->getReturnType())
  2554. Val = Builder.CreatePointerBitCastOrAddrSpaceCast(
  2555. Val, Wrapper->getReturnType(), "");
  2556. Builder.CreateRet(Val);
  2557. }
  2558. }
  2559. LValue ItaniumCXXABI::EmitThreadLocalVarDeclLValue(CodeGenFunction &CGF,
  2560. const VarDecl *VD,
  2561. QualType LValType) {
  2562. llvm::Value *Val = CGF.CGM.GetAddrOfGlobalVar(VD);
  2563. llvm::Function *Wrapper = getOrCreateThreadLocalWrapper(VD, Val);
  2564. llvm::CallInst *CallVal = CGF.Builder.CreateCall(Wrapper);
  2565. CallVal->setCallingConv(Wrapper->getCallingConv());
  2566. LValue LV;
  2567. if (VD->getType()->isReferenceType())
  2568. LV = CGF.MakeNaturalAlignAddrLValue(CallVal, LValType);
  2569. else
  2570. LV = CGF.MakeAddrLValue(CallVal, LValType,
  2571. CGF.getContext().getDeclAlign(VD));
  2572. // FIXME: need setObjCGCLValueClass?
  2573. return LV;
  2574. }
  2575. /// Return whether the given global decl needs a VTT parameter, which it does
  2576. /// if it's a base constructor or destructor with virtual bases.
  2577. bool ItaniumCXXABI::NeedsVTTParameter(GlobalDecl GD) {
  2578. const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl());
  2579. // We don't have any virtual bases, just return early.
  2580. if (!MD->getParent()->getNumVBases())
  2581. return false;
  2582. // Check if we have a base constructor.
  2583. if (isa<CXXConstructorDecl>(MD) && GD.getCtorType() == Ctor_Base)
  2584. return true;
  2585. // Check if we have a base destructor.
  2586. if (isa<CXXDestructorDecl>(MD) && GD.getDtorType() == Dtor_Base)
  2587. return true;
  2588. return false;
  2589. }
  2590. namespace {
  2591. class ItaniumRTTIBuilder {
  2592. CodeGenModule &CGM; // Per-module state.
  2593. llvm::LLVMContext &VMContext;
  2594. const ItaniumCXXABI &CXXABI; // Per-module state.
  2595. /// Fields - The fields of the RTTI descriptor currently being built.
  2596. SmallVector<llvm::Constant *, 16> Fields;
  2597. /// GetAddrOfTypeName - Returns the mangled type name of the given type.
  2598. llvm::GlobalVariable *
  2599. GetAddrOfTypeName(QualType Ty, llvm::GlobalVariable::LinkageTypes Linkage);
  2600. /// GetAddrOfExternalRTTIDescriptor - Returns the constant for the RTTI
  2601. /// descriptor of the given type.
  2602. llvm::Constant *GetAddrOfExternalRTTIDescriptor(QualType Ty);
  2603. /// BuildVTablePointer - Build the vtable pointer for the given type.
  2604. void BuildVTablePointer(const Type *Ty);
  2605. /// BuildSIClassTypeInfo - Build an abi::__si_class_type_info, used for single
  2606. /// inheritance, according to the Itanium C++ ABI, 2.9.5p6b.
  2607. void BuildSIClassTypeInfo(const CXXRecordDecl *RD);
  2608. /// BuildVMIClassTypeInfo - Build an abi::__vmi_class_type_info, used for
  2609. /// classes with bases that do not satisfy the abi::__si_class_type_info
  2610. /// constraints, according ti the Itanium C++ ABI, 2.9.5p5c.
  2611. void BuildVMIClassTypeInfo(const CXXRecordDecl *RD);
  2612. /// BuildPointerTypeInfo - Build an abi::__pointer_type_info struct, used
  2613. /// for pointer types.
  2614. void BuildPointerTypeInfo(QualType PointeeTy);
  2615. /// BuildObjCObjectTypeInfo - Build the appropriate kind of
  2616. /// type_info for an object type.
  2617. void BuildObjCObjectTypeInfo(const ObjCObjectType *Ty);
  2618. /// BuildPointerToMemberTypeInfo - Build an abi::__pointer_to_member_type_info
  2619. /// struct, used for member pointer types.
  2620. void BuildPointerToMemberTypeInfo(const MemberPointerType *Ty);
  2621. public:
  2622. ItaniumRTTIBuilder(const ItaniumCXXABI &ABI)
  2623. : CGM(ABI.CGM), VMContext(CGM.getModule().getContext()), CXXABI(ABI) {}
  2624. // Pointer type info flags.
  2625. enum {
  2626. /// PTI_Const - Type has const qualifier.
  2627. PTI_Const = 0x1,
  2628. /// PTI_Volatile - Type has volatile qualifier.
  2629. PTI_Volatile = 0x2,
  2630. /// PTI_Restrict - Type has restrict qualifier.
  2631. PTI_Restrict = 0x4,
  2632. /// PTI_Incomplete - Type is incomplete.
  2633. PTI_Incomplete = 0x8,
  2634. /// PTI_ContainingClassIncomplete - Containing class is incomplete.
  2635. /// (in pointer to member).
  2636. PTI_ContainingClassIncomplete = 0x10,
  2637. /// PTI_TransactionSafe - Pointee is transaction_safe function (C++ TM TS).
  2638. //PTI_TransactionSafe = 0x20,
  2639. /// PTI_Noexcept - Pointee is noexcept function (C++1z).
  2640. PTI_Noexcept = 0x40,
  2641. };
  2642. // VMI type info flags.
  2643. enum {
  2644. /// VMI_NonDiamondRepeat - Class has non-diamond repeated inheritance.
  2645. VMI_NonDiamondRepeat = 0x1,
  2646. /// VMI_DiamondShaped - Class is diamond shaped.
  2647. VMI_DiamondShaped = 0x2
  2648. };
  2649. // Base class type info flags.
  2650. enum {
  2651. /// BCTI_Virtual - Base class is virtual.
  2652. BCTI_Virtual = 0x1,
  2653. /// BCTI_Public - Base class is public.
  2654. BCTI_Public = 0x2
  2655. };
  2656. /// BuildTypeInfo - Build the RTTI type info struct for the given type, or
  2657. /// link to an existing RTTI descriptor if one already exists.
  2658. llvm::Constant *BuildTypeInfo(QualType Ty);
  2659. /// BuildTypeInfo - Build the RTTI type info struct for the given type.
  2660. llvm::Constant *BuildTypeInfo(
  2661. QualType Ty,
  2662. llvm::GlobalVariable::LinkageTypes Linkage,
  2663. llvm::GlobalValue::VisibilityTypes Visibility,
  2664. llvm::GlobalValue::DLLStorageClassTypes DLLStorageClass);
  2665. };
  2666. }
  2667. llvm::GlobalVariable *ItaniumRTTIBuilder::GetAddrOfTypeName(
  2668. QualType Ty, llvm::GlobalVariable::LinkageTypes Linkage) {
  2669. SmallString<256> Name;
  2670. llvm::raw_svector_ostream Out(Name);
  2671. CGM.getCXXABI().getMangleContext().mangleCXXRTTIName(Ty, Out);
  2672. // We know that the mangled name of the type starts at index 4 of the
  2673. // mangled name of the typename, so we can just index into it in order to
  2674. // get the mangled name of the type.
  2675. llvm::Constant *Init = llvm::ConstantDataArray::getString(VMContext,
  2676. Name.substr(4));
  2677. auto Align = CGM.getContext().getTypeAlignInChars(CGM.getContext().CharTy);
  2678. llvm::GlobalVariable *GV = CGM.CreateOrReplaceCXXRuntimeVariable(
  2679. Name, Init->getType(), Linkage, Align.getAsAlign());
  2680. GV->setInitializer(Init);
  2681. return GV;
  2682. }
  2683. llvm::Constant *
  2684. ItaniumRTTIBuilder::GetAddrOfExternalRTTIDescriptor(QualType Ty) {
  2685. // Mangle the RTTI name.
  2686. SmallString<256> Name;
  2687. llvm::raw_svector_ostream Out(Name);
  2688. CGM.getCXXABI().getMangleContext().mangleCXXRTTI(Ty, Out);
  2689. // Look for an existing global.
  2690. llvm::GlobalVariable *GV = CGM.getModule().getNamedGlobal(Name);
  2691. if (!GV) {
  2692. // Create a new global variable.
  2693. // Note for the future: If we would ever like to do deferred emission of
  2694. // RTTI, check if emitting vtables opportunistically need any adjustment.
  2695. GV = new llvm::GlobalVariable(CGM.getModule(), CGM.Int8PtrTy,
  2696. /*isConstant=*/true,
  2697. llvm::GlobalValue::ExternalLinkage, nullptr,
  2698. Name);
  2699. const CXXRecordDecl *RD = Ty->getAsCXXRecordDecl();
  2700. CGM.setGVProperties(GV, RD);
  2701. // Import the typeinfo symbol when all non-inline virtual methods are
  2702. // imported.
  2703. if (CGM.getTarget().hasPS4DLLImportExport()) {
  2704. if (RD && CXXRecordAllNonInlineVirtualsHaveAttr<DLLImportAttr>(RD)) {
  2705. GV->setDLLStorageClass(llvm::GlobalVariable::DLLImportStorageClass);
  2706. CGM.setDSOLocal(GV);
  2707. }
  2708. }
  2709. }
  2710. return llvm::ConstantExpr::getBitCast(GV, CGM.Int8PtrTy);
  2711. }
  2712. /// TypeInfoIsInStandardLibrary - Given a builtin type, returns whether the type
  2713. /// info for that type is defined in the standard library.
  2714. static bool TypeInfoIsInStandardLibrary(const BuiltinType *Ty) {
  2715. // Itanium C++ ABI 2.9.2:
  2716. // Basic type information (e.g. for "int", "bool", etc.) will be kept in
  2717. // the run-time support library. Specifically, the run-time support
  2718. // library should contain type_info objects for the types X, X* and
  2719. // X const*, for every X in: void, std::nullptr_t, bool, wchar_t, char,
  2720. // unsigned char, signed char, short, unsigned short, int, unsigned int,
  2721. // long, unsigned long, long long, unsigned long long, float, double,
  2722. // long double, char16_t, char32_t, and the IEEE 754r decimal and
  2723. // half-precision floating point types.
  2724. //
  2725. // GCC also emits RTTI for __int128.
  2726. // FIXME: We do not emit RTTI information for decimal types here.
  2727. // Types added here must also be added to EmitFundamentalRTTIDescriptors.
  2728. switch (Ty->getKind()) {
  2729. case BuiltinType::Void:
  2730. case BuiltinType::NullPtr:
  2731. case BuiltinType::Bool:
  2732. case BuiltinType::WChar_S:
  2733. case BuiltinType::WChar_U:
  2734. case BuiltinType::Char_U:
  2735. case BuiltinType::Char_S:
  2736. case BuiltinType::UChar:
  2737. case BuiltinType::SChar:
  2738. case BuiltinType::Short:
  2739. case BuiltinType::UShort:
  2740. case BuiltinType::Int:
  2741. case BuiltinType::UInt:
  2742. case BuiltinType::Long:
  2743. case BuiltinType::ULong:
  2744. case BuiltinType::LongLong:
  2745. case BuiltinType::ULongLong:
  2746. case BuiltinType::Half:
  2747. case BuiltinType::Float:
  2748. case BuiltinType::Double:
  2749. case BuiltinType::LongDouble:
  2750. case BuiltinType::Float16:
  2751. case BuiltinType::Float128:
  2752. case BuiltinType::Ibm128:
  2753. case BuiltinType::Char8:
  2754. case BuiltinType::Char16:
  2755. case BuiltinType::Char32:
  2756. case BuiltinType::Int128:
  2757. case BuiltinType::UInt128:
  2758. return true;
  2759. #define IMAGE_TYPE(ImgType, Id, SingletonId, Access, Suffix) \
  2760. case BuiltinType::Id:
  2761. #include "clang/Basic/OpenCLImageTypes.def"
  2762. #define EXT_OPAQUE_TYPE(ExtType, Id, Ext) \
  2763. case BuiltinType::Id:
  2764. #include "clang/Basic/OpenCLExtensionTypes.def"
  2765. case BuiltinType::OCLSampler:
  2766. case BuiltinType::OCLEvent:
  2767. case BuiltinType::OCLClkEvent:
  2768. case BuiltinType::OCLQueue:
  2769. case BuiltinType::OCLReserveID:
  2770. #define SVE_TYPE(Name, Id, SingletonId) \
  2771. case BuiltinType::Id:
  2772. #include "clang/Basic/AArch64SVEACLETypes.def"
  2773. #define PPC_VECTOR_TYPE(Name, Id, Size) \
  2774. case BuiltinType::Id:
  2775. #include "clang/Basic/PPCTypes.def"
  2776. #define RVV_TYPE(Name, Id, SingletonId) case BuiltinType::Id:
  2777. #include "clang/Basic/RISCVVTypes.def"
  2778. case BuiltinType::ShortAccum:
  2779. case BuiltinType::Accum:
  2780. case BuiltinType::LongAccum:
  2781. case BuiltinType::UShortAccum:
  2782. case BuiltinType::UAccum:
  2783. case BuiltinType::ULongAccum:
  2784. case BuiltinType::ShortFract:
  2785. case BuiltinType::Fract:
  2786. case BuiltinType::LongFract:
  2787. case BuiltinType::UShortFract:
  2788. case BuiltinType::UFract:
  2789. case BuiltinType::ULongFract:
  2790. case BuiltinType::SatShortAccum:
  2791. case BuiltinType::SatAccum:
  2792. case BuiltinType::SatLongAccum:
  2793. case BuiltinType::SatUShortAccum:
  2794. case BuiltinType::SatUAccum:
  2795. case BuiltinType::SatULongAccum:
  2796. case BuiltinType::SatShortFract:
  2797. case BuiltinType::SatFract:
  2798. case BuiltinType::SatLongFract:
  2799. case BuiltinType::SatUShortFract:
  2800. case BuiltinType::SatUFract:
  2801. case BuiltinType::SatULongFract:
  2802. case BuiltinType::BFloat16:
  2803. return false;
  2804. case BuiltinType::Dependent:
  2805. #define BUILTIN_TYPE(Id, SingletonId)
  2806. #define PLACEHOLDER_TYPE(Id, SingletonId) \
  2807. case BuiltinType::Id:
  2808. #include "clang/AST/BuiltinTypes.def"
  2809. llvm_unreachable("asking for RRTI for a placeholder type!");
  2810. case BuiltinType::ObjCId:
  2811. case BuiltinType::ObjCClass:
  2812. case BuiltinType::ObjCSel:
  2813. llvm_unreachable("FIXME: Objective-C types are unsupported!");
  2814. }
  2815. llvm_unreachable("Invalid BuiltinType Kind!");
  2816. }
  2817. static bool TypeInfoIsInStandardLibrary(const PointerType *PointerTy) {
  2818. QualType PointeeTy = PointerTy->getPointeeType();
  2819. const BuiltinType *BuiltinTy = dyn_cast<BuiltinType>(PointeeTy);
  2820. if (!BuiltinTy)
  2821. return false;
  2822. // Check the qualifiers.
  2823. Qualifiers Quals = PointeeTy.getQualifiers();
  2824. Quals.removeConst();
  2825. if (!Quals.empty())
  2826. return false;
  2827. return TypeInfoIsInStandardLibrary(BuiltinTy);
  2828. }
  2829. /// IsStandardLibraryRTTIDescriptor - Returns whether the type
  2830. /// information for the given type exists in the standard library.
  2831. static bool IsStandardLibraryRTTIDescriptor(QualType Ty) {
  2832. // Type info for builtin types is defined in the standard library.
  2833. if (const BuiltinType *BuiltinTy = dyn_cast<BuiltinType>(Ty))
  2834. return TypeInfoIsInStandardLibrary(BuiltinTy);
  2835. // Type info for some pointer types to builtin types is defined in the
  2836. // standard library.
  2837. if (const PointerType *PointerTy = dyn_cast<PointerType>(Ty))
  2838. return TypeInfoIsInStandardLibrary(PointerTy);
  2839. return false;
  2840. }
  2841. /// ShouldUseExternalRTTIDescriptor - Returns whether the type information for
  2842. /// the given type exists somewhere else, and that we should not emit the type
  2843. /// information in this translation unit. Assumes that it is not a
  2844. /// standard-library type.
  2845. static bool ShouldUseExternalRTTIDescriptor(CodeGenModule &CGM,
  2846. QualType Ty) {
  2847. ASTContext &Context = CGM.getContext();
  2848. // If RTTI is disabled, assume it might be disabled in the
  2849. // translation unit that defines any potential key function, too.
  2850. if (!Context.getLangOpts().RTTI) return false;
  2851. if (const RecordType *RecordTy = dyn_cast<RecordType>(Ty)) {
  2852. const CXXRecordDecl *RD = cast<CXXRecordDecl>(RecordTy->getDecl());
  2853. if (!RD->hasDefinition())
  2854. return false;
  2855. if (!RD->isDynamicClass())
  2856. return false;
  2857. // FIXME: this may need to be reconsidered if the key function
  2858. // changes.
  2859. // N.B. We must always emit the RTTI data ourselves if there exists a key
  2860. // function.
  2861. bool IsDLLImport = RD->hasAttr<DLLImportAttr>();
  2862. // Don't import the RTTI but emit it locally.
  2863. if (CGM.getTriple().isWindowsGNUEnvironment())
  2864. return false;
  2865. if (CGM.getVTables().isVTableExternal(RD)) {
  2866. if (CGM.getTarget().hasPS4DLLImportExport())
  2867. return true;
  2868. return IsDLLImport && !CGM.getTriple().isWindowsItaniumEnvironment()
  2869. ? false
  2870. : true;
  2871. }
  2872. if (IsDLLImport)
  2873. return true;
  2874. }
  2875. return false;
  2876. }
  2877. /// IsIncompleteClassType - Returns whether the given record type is incomplete.
  2878. static bool IsIncompleteClassType(const RecordType *RecordTy) {
  2879. return !RecordTy->getDecl()->isCompleteDefinition();
  2880. }
  2881. /// ContainsIncompleteClassType - Returns whether the given type contains an
  2882. /// incomplete class type. This is true if
  2883. ///
  2884. /// * The given type is an incomplete class type.
  2885. /// * The given type is a pointer type whose pointee type contains an
  2886. /// incomplete class type.
  2887. /// * The given type is a member pointer type whose class is an incomplete
  2888. /// class type.
  2889. /// * The given type is a member pointer type whoise pointee type contains an
  2890. /// incomplete class type.
  2891. /// is an indirect or direct pointer to an incomplete class type.
  2892. static bool ContainsIncompleteClassType(QualType Ty) {
  2893. if (const RecordType *RecordTy = dyn_cast<RecordType>(Ty)) {
  2894. if (IsIncompleteClassType(RecordTy))
  2895. return true;
  2896. }
  2897. if (const PointerType *PointerTy = dyn_cast<PointerType>(Ty))
  2898. return ContainsIncompleteClassType(PointerTy->getPointeeType());
  2899. if (const MemberPointerType *MemberPointerTy =
  2900. dyn_cast<MemberPointerType>(Ty)) {
  2901. // Check if the class type is incomplete.
  2902. const RecordType *ClassType = cast<RecordType>(MemberPointerTy->getClass());
  2903. if (IsIncompleteClassType(ClassType))
  2904. return true;
  2905. return ContainsIncompleteClassType(MemberPointerTy->getPointeeType());
  2906. }
  2907. return false;
  2908. }
  2909. // CanUseSingleInheritance - Return whether the given record decl has a "single,
  2910. // public, non-virtual base at offset zero (i.e. the derived class is dynamic
  2911. // iff the base is)", according to Itanium C++ ABI, 2.95p6b.
  2912. static bool CanUseSingleInheritance(const CXXRecordDecl *RD) {
  2913. // Check the number of bases.
  2914. if (RD->getNumBases() != 1)
  2915. return false;
  2916. // Get the base.
  2917. CXXRecordDecl::base_class_const_iterator Base = RD->bases_begin();
  2918. // Check that the base is not virtual.
  2919. if (Base->isVirtual())
  2920. return false;
  2921. // Check that the base is public.
  2922. if (Base->getAccessSpecifier() != AS_public)
  2923. return false;
  2924. // Check that the class is dynamic iff the base is.
  2925. auto *BaseDecl =
  2926. cast<CXXRecordDecl>(Base->getType()->castAs<RecordType>()->getDecl());
  2927. if (!BaseDecl->isEmpty() &&
  2928. BaseDecl->isDynamicClass() != RD->isDynamicClass())
  2929. return false;
  2930. return true;
  2931. }
  2932. void ItaniumRTTIBuilder::BuildVTablePointer(const Type *Ty) {
  2933. // abi::__class_type_info.
  2934. static const char * const ClassTypeInfo =
  2935. "_ZTVN10__cxxabiv117__class_type_infoE";
  2936. // abi::__si_class_type_info.
  2937. static const char * const SIClassTypeInfo =
  2938. "_ZTVN10__cxxabiv120__si_class_type_infoE";
  2939. // abi::__vmi_class_type_info.
  2940. static const char * const VMIClassTypeInfo =
  2941. "_ZTVN10__cxxabiv121__vmi_class_type_infoE";
  2942. const char *VTableName = nullptr;
  2943. switch (Ty->getTypeClass()) {
  2944. #define TYPE(Class, Base)
  2945. #define ABSTRACT_TYPE(Class, Base)
  2946. #define NON_CANONICAL_UNLESS_DEPENDENT_TYPE(Class, Base) case Type::Class:
  2947. #define NON_CANONICAL_TYPE(Class, Base) case Type::Class:
  2948. #define DEPENDENT_TYPE(Class, Base) case Type::Class:
  2949. #include "clang/AST/TypeNodes.inc"
  2950. llvm_unreachable("Non-canonical and dependent types shouldn't get here");
  2951. case Type::LValueReference:
  2952. case Type::RValueReference:
  2953. llvm_unreachable("References shouldn't get here");
  2954. case Type::Auto:
  2955. case Type::DeducedTemplateSpecialization:
  2956. llvm_unreachable("Undeduced type shouldn't get here");
  2957. case Type::Pipe:
  2958. llvm_unreachable("Pipe types shouldn't get here");
  2959. case Type::Builtin:
  2960. case Type::BitInt:
  2961. // GCC treats vector and complex types as fundamental types.
  2962. case Type::Vector:
  2963. case Type::ExtVector:
  2964. case Type::ConstantMatrix:
  2965. case Type::Complex:
  2966. case Type::Atomic:
  2967. // FIXME: GCC treats block pointers as fundamental types?!
  2968. case Type::BlockPointer:
  2969. // abi::__fundamental_type_info.
  2970. VTableName = "_ZTVN10__cxxabiv123__fundamental_type_infoE";
  2971. break;
  2972. case Type::ConstantArray:
  2973. case Type::IncompleteArray:
  2974. case Type::VariableArray:
  2975. // abi::__array_type_info.
  2976. VTableName = "_ZTVN10__cxxabiv117__array_type_infoE";
  2977. break;
  2978. case Type::FunctionNoProto:
  2979. case Type::FunctionProto:
  2980. // abi::__function_type_info.
  2981. VTableName = "_ZTVN10__cxxabiv120__function_type_infoE";
  2982. break;
  2983. case Type::Enum:
  2984. // abi::__enum_type_info.
  2985. VTableName = "_ZTVN10__cxxabiv116__enum_type_infoE";
  2986. break;
  2987. case Type::Record: {
  2988. const CXXRecordDecl *RD =
  2989. cast<CXXRecordDecl>(cast<RecordType>(Ty)->getDecl());
  2990. if (!RD->hasDefinition() || !RD->getNumBases()) {
  2991. VTableName = ClassTypeInfo;
  2992. } else if (CanUseSingleInheritance(RD)) {
  2993. VTableName = SIClassTypeInfo;
  2994. } else {
  2995. VTableName = VMIClassTypeInfo;
  2996. }
  2997. break;
  2998. }
  2999. case Type::ObjCObject:
  3000. // Ignore protocol qualifiers.
  3001. Ty = cast<ObjCObjectType>(Ty)->getBaseType().getTypePtr();
  3002. // Handle id and Class.
  3003. if (isa<BuiltinType>(Ty)) {
  3004. VTableName = ClassTypeInfo;
  3005. break;
  3006. }
  3007. assert(isa<ObjCInterfaceType>(Ty));
  3008. [[fallthrough]];
  3009. case Type::ObjCInterface:
  3010. if (cast<ObjCInterfaceType>(Ty)->getDecl()->getSuperClass()) {
  3011. VTableName = SIClassTypeInfo;
  3012. } else {
  3013. VTableName = ClassTypeInfo;
  3014. }
  3015. break;
  3016. case Type::ObjCObjectPointer:
  3017. case Type::Pointer:
  3018. // abi::__pointer_type_info.
  3019. VTableName = "_ZTVN10__cxxabiv119__pointer_type_infoE";
  3020. break;
  3021. case Type::MemberPointer:
  3022. // abi::__pointer_to_member_type_info.
  3023. VTableName = "_ZTVN10__cxxabiv129__pointer_to_member_type_infoE";
  3024. break;
  3025. }
  3026. llvm::Constant *VTable = nullptr;
  3027. // Check if the alias exists. If it doesn't, then get or create the global.
  3028. if (CGM.getItaniumVTableContext().isRelativeLayout())
  3029. VTable = CGM.getModule().getNamedAlias(VTableName);
  3030. if (!VTable)
  3031. VTable = CGM.getModule().getOrInsertGlobal(VTableName, CGM.Int8PtrTy);
  3032. CGM.setDSOLocal(cast<llvm::GlobalValue>(VTable->stripPointerCasts()));
  3033. llvm::Type *PtrDiffTy =
  3034. CGM.getTypes().ConvertType(CGM.getContext().getPointerDiffType());
  3035. // The vtable address point is 2.
  3036. if (CGM.getItaniumVTableContext().isRelativeLayout()) {
  3037. // The vtable address point is 8 bytes after its start:
  3038. // 4 for the offset to top + 4 for the relative offset to rtti.
  3039. llvm::Constant *Eight = llvm::ConstantInt::get(CGM.Int32Ty, 8);
  3040. VTable = llvm::ConstantExpr::getBitCast(VTable, CGM.Int8PtrTy);
  3041. VTable =
  3042. llvm::ConstantExpr::getInBoundsGetElementPtr(CGM.Int8Ty, VTable, Eight);
  3043. } else {
  3044. llvm::Constant *Two = llvm::ConstantInt::get(PtrDiffTy, 2);
  3045. VTable = llvm::ConstantExpr::getInBoundsGetElementPtr(CGM.Int8PtrTy, VTable,
  3046. Two);
  3047. }
  3048. VTable = llvm::ConstantExpr::getBitCast(VTable, CGM.Int8PtrTy);
  3049. Fields.push_back(VTable);
  3050. }
  3051. /// Return the linkage that the type info and type info name constants
  3052. /// should have for the given type.
  3053. static llvm::GlobalVariable::LinkageTypes getTypeInfoLinkage(CodeGenModule &CGM,
  3054. QualType Ty) {
  3055. // Itanium C++ ABI 2.9.5p7:
  3056. // In addition, it and all of the intermediate abi::__pointer_type_info
  3057. // structs in the chain down to the abi::__class_type_info for the
  3058. // incomplete class type must be prevented from resolving to the
  3059. // corresponding type_info structs for the complete class type, possibly
  3060. // by making them local static objects. Finally, a dummy class RTTI is
  3061. // generated for the incomplete type that will not resolve to the final
  3062. // complete class RTTI (because the latter need not exist), possibly by
  3063. // making it a local static object.
  3064. if (ContainsIncompleteClassType(Ty))
  3065. return llvm::GlobalValue::InternalLinkage;
  3066. switch (Ty->getLinkage()) {
  3067. case NoLinkage:
  3068. case InternalLinkage:
  3069. case UniqueExternalLinkage:
  3070. return llvm::GlobalValue::InternalLinkage;
  3071. case VisibleNoLinkage:
  3072. case ModuleInternalLinkage:
  3073. case ModuleLinkage:
  3074. case ExternalLinkage:
  3075. // RTTI is not enabled, which means that this type info struct is going
  3076. // to be used for exception handling. Give it linkonce_odr linkage.
  3077. if (!CGM.getLangOpts().RTTI)
  3078. return llvm::GlobalValue::LinkOnceODRLinkage;
  3079. if (const RecordType *Record = dyn_cast<RecordType>(Ty)) {
  3080. const CXXRecordDecl *RD = cast<CXXRecordDecl>(Record->getDecl());
  3081. if (RD->hasAttr<WeakAttr>())
  3082. return llvm::GlobalValue::WeakODRLinkage;
  3083. if (CGM.getTriple().isWindowsItaniumEnvironment())
  3084. if (RD->hasAttr<DLLImportAttr>() &&
  3085. ShouldUseExternalRTTIDescriptor(CGM, Ty))
  3086. return llvm::GlobalValue::ExternalLinkage;
  3087. // MinGW always uses LinkOnceODRLinkage for type info.
  3088. if (RD->isDynamicClass() &&
  3089. !CGM.getContext()
  3090. .getTargetInfo()
  3091. .getTriple()
  3092. .isWindowsGNUEnvironment())
  3093. return CGM.getVTableLinkage(RD);
  3094. }
  3095. return llvm::GlobalValue::LinkOnceODRLinkage;
  3096. }
  3097. llvm_unreachable("Invalid linkage!");
  3098. }
  3099. llvm::Constant *ItaniumRTTIBuilder::BuildTypeInfo(QualType Ty) {
  3100. // We want to operate on the canonical type.
  3101. Ty = Ty.getCanonicalType();
  3102. // Check if we've already emitted an RTTI descriptor for this type.
  3103. SmallString<256> Name;
  3104. llvm::raw_svector_ostream Out(Name);
  3105. CGM.getCXXABI().getMangleContext().mangleCXXRTTI(Ty, Out);
  3106. llvm::GlobalVariable *OldGV = CGM.getModule().getNamedGlobal(Name);
  3107. if (OldGV && !OldGV->isDeclaration()) {
  3108. assert(!OldGV->hasAvailableExternallyLinkage() &&
  3109. "available_externally typeinfos not yet implemented");
  3110. return llvm::ConstantExpr::getBitCast(OldGV, CGM.Int8PtrTy);
  3111. }
  3112. // Check if there is already an external RTTI descriptor for this type.
  3113. if (IsStandardLibraryRTTIDescriptor(Ty) ||
  3114. ShouldUseExternalRTTIDescriptor(CGM, Ty))
  3115. return GetAddrOfExternalRTTIDescriptor(Ty);
  3116. // Emit the standard library with external linkage.
  3117. llvm::GlobalVariable::LinkageTypes Linkage = getTypeInfoLinkage(CGM, Ty);
  3118. // Give the type_info object and name the formal visibility of the
  3119. // type itself.
  3120. llvm::GlobalValue::VisibilityTypes llvmVisibility;
  3121. if (llvm::GlobalValue::isLocalLinkage(Linkage))
  3122. // If the linkage is local, only default visibility makes sense.
  3123. llvmVisibility = llvm::GlobalValue::DefaultVisibility;
  3124. else if (CXXABI.classifyRTTIUniqueness(Ty, Linkage) ==
  3125. ItaniumCXXABI::RUK_NonUniqueHidden)
  3126. llvmVisibility = llvm::GlobalValue::HiddenVisibility;
  3127. else
  3128. llvmVisibility = CodeGenModule::GetLLVMVisibility(Ty->getVisibility());
  3129. llvm::GlobalValue::DLLStorageClassTypes DLLStorageClass =
  3130. llvm::GlobalValue::DefaultStorageClass;
  3131. if (auto RD = Ty->getAsCXXRecordDecl()) {
  3132. if ((CGM.getTriple().isWindowsItaniumEnvironment() &&
  3133. RD->hasAttr<DLLExportAttr>()) ||
  3134. (CGM.shouldMapVisibilityToDLLExport(RD) &&
  3135. !llvm::GlobalValue::isLocalLinkage(Linkage) &&
  3136. llvmVisibility == llvm::GlobalValue::DefaultVisibility))
  3137. DLLStorageClass = llvm::GlobalValue::DLLExportStorageClass;
  3138. }
  3139. return BuildTypeInfo(Ty, Linkage, llvmVisibility, DLLStorageClass);
  3140. }
  3141. llvm::Constant *ItaniumRTTIBuilder::BuildTypeInfo(
  3142. QualType Ty,
  3143. llvm::GlobalVariable::LinkageTypes Linkage,
  3144. llvm::GlobalValue::VisibilityTypes Visibility,
  3145. llvm::GlobalValue::DLLStorageClassTypes DLLStorageClass) {
  3146. // Add the vtable pointer.
  3147. BuildVTablePointer(cast<Type>(Ty));
  3148. // And the name.
  3149. llvm::GlobalVariable *TypeName = GetAddrOfTypeName(Ty, Linkage);
  3150. llvm::Constant *TypeNameField;
  3151. // If we're supposed to demote the visibility, be sure to set a flag
  3152. // to use a string comparison for type_info comparisons.
  3153. ItaniumCXXABI::RTTIUniquenessKind RTTIUniqueness =
  3154. CXXABI.classifyRTTIUniqueness(Ty, Linkage);
  3155. if (RTTIUniqueness != ItaniumCXXABI::RUK_Unique) {
  3156. // The flag is the sign bit, which on ARM64 is defined to be clear
  3157. // for global pointers. This is very ARM64-specific.
  3158. TypeNameField = llvm::ConstantExpr::getPtrToInt(TypeName, CGM.Int64Ty);
  3159. llvm::Constant *flag =
  3160. llvm::ConstantInt::get(CGM.Int64Ty, ((uint64_t)1) << 63);
  3161. TypeNameField = llvm::ConstantExpr::getAdd(TypeNameField, flag);
  3162. TypeNameField =
  3163. llvm::ConstantExpr::getIntToPtr(TypeNameField, CGM.Int8PtrTy);
  3164. } else {
  3165. TypeNameField = llvm::ConstantExpr::getBitCast(TypeName, CGM.Int8PtrTy);
  3166. }
  3167. Fields.push_back(TypeNameField);
  3168. switch (Ty->getTypeClass()) {
  3169. #define TYPE(Class, Base)
  3170. #define ABSTRACT_TYPE(Class, Base)
  3171. #define NON_CANONICAL_UNLESS_DEPENDENT_TYPE(Class, Base) case Type::Class:
  3172. #define NON_CANONICAL_TYPE(Class, Base) case Type::Class:
  3173. #define DEPENDENT_TYPE(Class, Base) case Type::Class:
  3174. #include "clang/AST/TypeNodes.inc"
  3175. llvm_unreachable("Non-canonical and dependent types shouldn't get here");
  3176. // GCC treats vector types as fundamental types.
  3177. case Type::Builtin:
  3178. case Type::Vector:
  3179. case Type::ExtVector:
  3180. case Type::ConstantMatrix:
  3181. case Type::Complex:
  3182. case Type::BlockPointer:
  3183. // Itanium C++ ABI 2.9.5p4:
  3184. // abi::__fundamental_type_info adds no data members to std::type_info.
  3185. break;
  3186. case Type::LValueReference:
  3187. case Type::RValueReference:
  3188. llvm_unreachable("References shouldn't get here");
  3189. case Type::Auto:
  3190. case Type::DeducedTemplateSpecialization:
  3191. llvm_unreachable("Undeduced type shouldn't get here");
  3192. case Type::Pipe:
  3193. break;
  3194. case Type::BitInt:
  3195. break;
  3196. case Type::ConstantArray:
  3197. case Type::IncompleteArray:
  3198. case Type::VariableArray:
  3199. // Itanium C++ ABI 2.9.5p5:
  3200. // abi::__array_type_info adds no data members to std::type_info.
  3201. break;
  3202. case Type::FunctionNoProto:
  3203. case Type::FunctionProto:
  3204. // Itanium C++ ABI 2.9.5p5:
  3205. // abi::__function_type_info adds no data members to std::type_info.
  3206. break;
  3207. case Type::Enum:
  3208. // Itanium C++ ABI 2.9.5p5:
  3209. // abi::__enum_type_info adds no data members to std::type_info.
  3210. break;
  3211. case Type::Record: {
  3212. const CXXRecordDecl *RD =
  3213. cast<CXXRecordDecl>(cast<RecordType>(Ty)->getDecl());
  3214. if (!RD->hasDefinition() || !RD->getNumBases()) {
  3215. // We don't need to emit any fields.
  3216. break;
  3217. }
  3218. if (CanUseSingleInheritance(RD))
  3219. BuildSIClassTypeInfo(RD);
  3220. else
  3221. BuildVMIClassTypeInfo(RD);
  3222. break;
  3223. }
  3224. case Type::ObjCObject:
  3225. case Type::ObjCInterface:
  3226. BuildObjCObjectTypeInfo(cast<ObjCObjectType>(Ty));
  3227. break;
  3228. case Type::ObjCObjectPointer:
  3229. BuildPointerTypeInfo(cast<ObjCObjectPointerType>(Ty)->getPointeeType());
  3230. break;
  3231. case Type::Pointer:
  3232. BuildPointerTypeInfo(cast<PointerType>(Ty)->getPointeeType());
  3233. break;
  3234. case Type::MemberPointer:
  3235. BuildPointerToMemberTypeInfo(cast<MemberPointerType>(Ty));
  3236. break;
  3237. case Type::Atomic:
  3238. // No fields, at least for the moment.
  3239. break;
  3240. }
  3241. llvm::Constant *Init = llvm::ConstantStruct::getAnon(Fields);
  3242. SmallString<256> Name;
  3243. llvm::raw_svector_ostream Out(Name);
  3244. CGM.getCXXABI().getMangleContext().mangleCXXRTTI(Ty, Out);
  3245. llvm::Module &M = CGM.getModule();
  3246. llvm::GlobalVariable *OldGV = M.getNamedGlobal(Name);
  3247. llvm::GlobalVariable *GV =
  3248. new llvm::GlobalVariable(M, Init->getType(),
  3249. /*isConstant=*/true, Linkage, Init, Name);
  3250. // Export the typeinfo in the same circumstances as the vtable is exported.
  3251. auto GVDLLStorageClass = DLLStorageClass;
  3252. if (CGM.getTarget().hasPS4DLLImportExport()) {
  3253. if (const RecordType *RecordTy = dyn_cast<RecordType>(Ty)) {
  3254. const CXXRecordDecl *RD = cast<CXXRecordDecl>(RecordTy->getDecl());
  3255. if (RD->hasAttr<DLLExportAttr>() ||
  3256. CXXRecordAllNonInlineVirtualsHaveAttr<DLLExportAttr>(RD)) {
  3257. GVDLLStorageClass = llvm::GlobalVariable::DLLExportStorageClass;
  3258. }
  3259. }
  3260. }
  3261. // If there's already an old global variable, replace it with the new one.
  3262. if (OldGV) {
  3263. GV->takeName(OldGV);
  3264. llvm::Constant *NewPtr =
  3265. llvm::ConstantExpr::getBitCast(GV, OldGV->getType());
  3266. OldGV->replaceAllUsesWith(NewPtr);
  3267. OldGV->eraseFromParent();
  3268. }
  3269. if (CGM.supportsCOMDAT() && GV->isWeakForLinker())
  3270. GV->setComdat(M.getOrInsertComdat(GV->getName()));
  3271. CharUnits Align = CGM.getContext().toCharUnitsFromBits(
  3272. CGM.getTarget().getPointerAlign(LangAS::Default));
  3273. GV->setAlignment(Align.getAsAlign());
  3274. // The Itanium ABI specifies that type_info objects must be globally
  3275. // unique, with one exception: if the type is an incomplete class
  3276. // type or a (possibly indirect) pointer to one. That exception
  3277. // affects the general case of comparing type_info objects produced
  3278. // by the typeid operator, which is why the comparison operators on
  3279. // std::type_info generally use the type_info name pointers instead
  3280. // of the object addresses. However, the language's built-in uses
  3281. // of RTTI generally require class types to be complete, even when
  3282. // manipulating pointers to those class types. This allows the
  3283. // implementation of dynamic_cast to rely on address equality tests,
  3284. // which is much faster.
  3285. // All of this is to say that it's important that both the type_info
  3286. // object and the type_info name be uniqued when weakly emitted.
  3287. TypeName->setVisibility(Visibility);
  3288. CGM.setDSOLocal(TypeName);
  3289. GV->setVisibility(Visibility);
  3290. CGM.setDSOLocal(GV);
  3291. TypeName->setDLLStorageClass(DLLStorageClass);
  3292. GV->setDLLStorageClass(CGM.getTarget().hasPS4DLLImportExport()
  3293. ? GVDLLStorageClass
  3294. : DLLStorageClass);
  3295. TypeName->setPartition(CGM.getCodeGenOpts().SymbolPartition);
  3296. GV->setPartition(CGM.getCodeGenOpts().SymbolPartition);
  3297. return llvm::ConstantExpr::getBitCast(GV, CGM.Int8PtrTy);
  3298. }
  3299. /// BuildObjCObjectTypeInfo - Build the appropriate kind of type_info
  3300. /// for the given Objective-C object type.
  3301. void ItaniumRTTIBuilder::BuildObjCObjectTypeInfo(const ObjCObjectType *OT) {
  3302. // Drop qualifiers.
  3303. const Type *T = OT->getBaseType().getTypePtr();
  3304. assert(isa<BuiltinType>(T) || isa<ObjCInterfaceType>(T));
  3305. // The builtin types are abi::__class_type_infos and don't require
  3306. // extra fields.
  3307. if (isa<BuiltinType>(T)) return;
  3308. ObjCInterfaceDecl *Class = cast<ObjCInterfaceType>(T)->getDecl();
  3309. ObjCInterfaceDecl *Super = Class->getSuperClass();
  3310. // Root classes are also __class_type_info.
  3311. if (!Super) return;
  3312. QualType SuperTy = CGM.getContext().getObjCInterfaceType(Super);
  3313. // Everything else is single inheritance.
  3314. llvm::Constant *BaseTypeInfo =
  3315. ItaniumRTTIBuilder(CXXABI).BuildTypeInfo(SuperTy);
  3316. Fields.push_back(BaseTypeInfo);
  3317. }
  3318. /// BuildSIClassTypeInfo - Build an abi::__si_class_type_info, used for single
  3319. /// inheritance, according to the Itanium C++ ABI, 2.95p6b.
  3320. void ItaniumRTTIBuilder::BuildSIClassTypeInfo(const CXXRecordDecl *RD) {
  3321. // Itanium C++ ABI 2.9.5p6b:
  3322. // It adds to abi::__class_type_info a single member pointing to the
  3323. // type_info structure for the base type,
  3324. llvm::Constant *BaseTypeInfo =
  3325. ItaniumRTTIBuilder(CXXABI).BuildTypeInfo(RD->bases_begin()->getType());
  3326. Fields.push_back(BaseTypeInfo);
  3327. }
  3328. namespace {
  3329. /// SeenBases - Contains virtual and non-virtual bases seen when traversing
  3330. /// a class hierarchy.
  3331. struct SeenBases {
  3332. llvm::SmallPtrSet<const CXXRecordDecl *, 16> NonVirtualBases;
  3333. llvm::SmallPtrSet<const CXXRecordDecl *, 16> VirtualBases;
  3334. };
  3335. }
  3336. /// ComputeVMIClassTypeInfoFlags - Compute the value of the flags member in
  3337. /// abi::__vmi_class_type_info.
  3338. ///
  3339. static unsigned ComputeVMIClassTypeInfoFlags(const CXXBaseSpecifier *Base,
  3340. SeenBases &Bases) {
  3341. unsigned Flags = 0;
  3342. auto *BaseDecl =
  3343. cast<CXXRecordDecl>(Base->getType()->castAs<RecordType>()->getDecl());
  3344. if (Base->isVirtual()) {
  3345. // Mark the virtual base as seen.
  3346. if (!Bases.VirtualBases.insert(BaseDecl).second) {
  3347. // If this virtual base has been seen before, then the class is diamond
  3348. // shaped.
  3349. Flags |= ItaniumRTTIBuilder::VMI_DiamondShaped;
  3350. } else {
  3351. if (Bases.NonVirtualBases.count(BaseDecl))
  3352. Flags |= ItaniumRTTIBuilder::VMI_NonDiamondRepeat;
  3353. }
  3354. } else {
  3355. // Mark the non-virtual base as seen.
  3356. if (!Bases.NonVirtualBases.insert(BaseDecl).second) {
  3357. // If this non-virtual base has been seen before, then the class has non-
  3358. // diamond shaped repeated inheritance.
  3359. Flags |= ItaniumRTTIBuilder::VMI_NonDiamondRepeat;
  3360. } else {
  3361. if (Bases.VirtualBases.count(BaseDecl))
  3362. Flags |= ItaniumRTTIBuilder::VMI_NonDiamondRepeat;
  3363. }
  3364. }
  3365. // Walk all bases.
  3366. for (const auto &I : BaseDecl->bases())
  3367. Flags |= ComputeVMIClassTypeInfoFlags(&I, Bases);
  3368. return Flags;
  3369. }
  3370. static unsigned ComputeVMIClassTypeInfoFlags(const CXXRecordDecl *RD) {
  3371. unsigned Flags = 0;
  3372. SeenBases Bases;
  3373. // Walk all bases.
  3374. for (const auto &I : RD->bases())
  3375. Flags |= ComputeVMIClassTypeInfoFlags(&I, Bases);
  3376. return Flags;
  3377. }
  3378. /// BuildVMIClassTypeInfo - Build an abi::__vmi_class_type_info, used for
  3379. /// classes with bases that do not satisfy the abi::__si_class_type_info
  3380. /// constraints, according ti the Itanium C++ ABI, 2.9.5p5c.
  3381. void ItaniumRTTIBuilder::BuildVMIClassTypeInfo(const CXXRecordDecl *RD) {
  3382. llvm::Type *UnsignedIntLTy =
  3383. CGM.getTypes().ConvertType(CGM.getContext().UnsignedIntTy);
  3384. // Itanium C++ ABI 2.9.5p6c:
  3385. // __flags is a word with flags describing details about the class
  3386. // structure, which may be referenced by using the __flags_masks
  3387. // enumeration. These flags refer to both direct and indirect bases.
  3388. unsigned Flags = ComputeVMIClassTypeInfoFlags(RD);
  3389. Fields.push_back(llvm::ConstantInt::get(UnsignedIntLTy, Flags));
  3390. // Itanium C++ ABI 2.9.5p6c:
  3391. // __base_count is a word with the number of direct proper base class
  3392. // descriptions that follow.
  3393. Fields.push_back(llvm::ConstantInt::get(UnsignedIntLTy, RD->getNumBases()));
  3394. if (!RD->getNumBases())
  3395. return;
  3396. // Now add the base class descriptions.
  3397. // Itanium C++ ABI 2.9.5p6c:
  3398. // __base_info[] is an array of base class descriptions -- one for every
  3399. // direct proper base. Each description is of the type:
  3400. //
  3401. // struct abi::__base_class_type_info {
  3402. // public:
  3403. // const __class_type_info *__base_type;
  3404. // long __offset_flags;
  3405. //
  3406. // enum __offset_flags_masks {
  3407. // __virtual_mask = 0x1,
  3408. // __public_mask = 0x2,
  3409. // __offset_shift = 8
  3410. // };
  3411. // };
  3412. // If we're in mingw and 'long' isn't wide enough for a pointer, use 'long
  3413. // long' instead of 'long' for __offset_flags. libstdc++abi uses long long on
  3414. // LLP64 platforms.
  3415. // FIXME: Consider updating libc++abi to match, and extend this logic to all
  3416. // LLP64 platforms.
  3417. QualType OffsetFlagsTy = CGM.getContext().LongTy;
  3418. const TargetInfo &TI = CGM.getContext().getTargetInfo();
  3419. if (TI.getTriple().isOSCygMing() &&
  3420. TI.getPointerWidth(LangAS::Default) > TI.getLongWidth())
  3421. OffsetFlagsTy = CGM.getContext().LongLongTy;
  3422. llvm::Type *OffsetFlagsLTy =
  3423. CGM.getTypes().ConvertType(OffsetFlagsTy);
  3424. for (const auto &Base : RD->bases()) {
  3425. // The __base_type member points to the RTTI for the base type.
  3426. Fields.push_back(ItaniumRTTIBuilder(CXXABI).BuildTypeInfo(Base.getType()));
  3427. auto *BaseDecl =
  3428. cast<CXXRecordDecl>(Base.getType()->castAs<RecordType>()->getDecl());
  3429. int64_t OffsetFlags = 0;
  3430. // All but the lower 8 bits of __offset_flags are a signed offset.
  3431. // For a non-virtual base, this is the offset in the object of the base
  3432. // subobject. For a virtual base, this is the offset in the virtual table of
  3433. // the virtual base offset for the virtual base referenced (negative).
  3434. CharUnits Offset;
  3435. if (Base.isVirtual())
  3436. Offset =
  3437. CGM.getItaniumVTableContext().getVirtualBaseOffsetOffset(RD, BaseDecl);
  3438. else {
  3439. const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
  3440. Offset = Layout.getBaseClassOffset(BaseDecl);
  3441. };
  3442. OffsetFlags = uint64_t(Offset.getQuantity()) << 8;
  3443. // The low-order byte of __offset_flags contains flags, as given by the
  3444. // masks from the enumeration __offset_flags_masks.
  3445. if (Base.isVirtual())
  3446. OffsetFlags |= BCTI_Virtual;
  3447. if (Base.getAccessSpecifier() == AS_public)
  3448. OffsetFlags |= BCTI_Public;
  3449. Fields.push_back(llvm::ConstantInt::get(OffsetFlagsLTy, OffsetFlags));
  3450. }
  3451. }
  3452. /// Compute the flags for a __pbase_type_info, and remove the corresponding
  3453. /// pieces from \p Type.
  3454. static unsigned extractPBaseFlags(ASTContext &Ctx, QualType &Type) {
  3455. unsigned Flags = 0;
  3456. if (Type.isConstQualified())
  3457. Flags |= ItaniumRTTIBuilder::PTI_Const;
  3458. if (Type.isVolatileQualified())
  3459. Flags |= ItaniumRTTIBuilder::PTI_Volatile;
  3460. if (Type.isRestrictQualified())
  3461. Flags |= ItaniumRTTIBuilder::PTI_Restrict;
  3462. Type = Type.getUnqualifiedType();
  3463. // Itanium C++ ABI 2.9.5p7:
  3464. // When the abi::__pbase_type_info is for a direct or indirect pointer to an
  3465. // incomplete class type, the incomplete target type flag is set.
  3466. if (ContainsIncompleteClassType(Type))
  3467. Flags |= ItaniumRTTIBuilder::PTI_Incomplete;
  3468. if (auto *Proto = Type->getAs<FunctionProtoType>()) {
  3469. if (Proto->isNothrow()) {
  3470. Flags |= ItaniumRTTIBuilder::PTI_Noexcept;
  3471. Type = Ctx.getFunctionTypeWithExceptionSpec(Type, EST_None);
  3472. }
  3473. }
  3474. return Flags;
  3475. }
  3476. /// BuildPointerTypeInfo - Build an abi::__pointer_type_info struct,
  3477. /// used for pointer types.
  3478. void ItaniumRTTIBuilder::BuildPointerTypeInfo(QualType PointeeTy) {
  3479. // Itanium C++ ABI 2.9.5p7:
  3480. // __flags is a flag word describing the cv-qualification and other
  3481. // attributes of the type pointed to
  3482. unsigned Flags = extractPBaseFlags(CGM.getContext(), PointeeTy);
  3483. llvm::Type *UnsignedIntLTy =
  3484. CGM.getTypes().ConvertType(CGM.getContext().UnsignedIntTy);
  3485. Fields.push_back(llvm::ConstantInt::get(UnsignedIntLTy, Flags));
  3486. // Itanium C++ ABI 2.9.5p7:
  3487. // __pointee is a pointer to the std::type_info derivation for the
  3488. // unqualified type being pointed to.
  3489. llvm::Constant *PointeeTypeInfo =
  3490. ItaniumRTTIBuilder(CXXABI).BuildTypeInfo(PointeeTy);
  3491. Fields.push_back(PointeeTypeInfo);
  3492. }
  3493. /// BuildPointerToMemberTypeInfo - Build an abi::__pointer_to_member_type_info
  3494. /// struct, used for member pointer types.
  3495. void
  3496. ItaniumRTTIBuilder::BuildPointerToMemberTypeInfo(const MemberPointerType *Ty) {
  3497. QualType PointeeTy = Ty->getPointeeType();
  3498. // Itanium C++ ABI 2.9.5p7:
  3499. // __flags is a flag word describing the cv-qualification and other
  3500. // attributes of the type pointed to.
  3501. unsigned Flags = extractPBaseFlags(CGM.getContext(), PointeeTy);
  3502. const RecordType *ClassType = cast<RecordType>(Ty->getClass());
  3503. if (IsIncompleteClassType(ClassType))
  3504. Flags |= PTI_ContainingClassIncomplete;
  3505. llvm::Type *UnsignedIntLTy =
  3506. CGM.getTypes().ConvertType(CGM.getContext().UnsignedIntTy);
  3507. Fields.push_back(llvm::ConstantInt::get(UnsignedIntLTy, Flags));
  3508. // Itanium C++ ABI 2.9.5p7:
  3509. // __pointee is a pointer to the std::type_info derivation for the
  3510. // unqualified type being pointed to.
  3511. llvm::Constant *PointeeTypeInfo =
  3512. ItaniumRTTIBuilder(CXXABI).BuildTypeInfo(PointeeTy);
  3513. Fields.push_back(PointeeTypeInfo);
  3514. // Itanium C++ ABI 2.9.5p9:
  3515. // __context is a pointer to an abi::__class_type_info corresponding to the
  3516. // class type containing the member pointed to
  3517. // (e.g., the "A" in "int A::*").
  3518. Fields.push_back(
  3519. ItaniumRTTIBuilder(CXXABI).BuildTypeInfo(QualType(ClassType, 0)));
  3520. }
  3521. llvm::Constant *ItaniumCXXABI::getAddrOfRTTIDescriptor(QualType Ty) {
  3522. return ItaniumRTTIBuilder(*this).BuildTypeInfo(Ty);
  3523. }
  3524. void ItaniumCXXABI::EmitFundamentalRTTIDescriptors(const CXXRecordDecl *RD) {
  3525. // Types added here must also be added to TypeInfoIsInStandardLibrary.
  3526. QualType FundamentalTypes[] = {
  3527. getContext().VoidTy, getContext().NullPtrTy,
  3528. getContext().BoolTy, getContext().WCharTy,
  3529. getContext().CharTy, getContext().UnsignedCharTy,
  3530. getContext().SignedCharTy, getContext().ShortTy,
  3531. getContext().UnsignedShortTy, getContext().IntTy,
  3532. getContext().UnsignedIntTy, getContext().LongTy,
  3533. getContext().UnsignedLongTy, getContext().LongLongTy,
  3534. getContext().UnsignedLongLongTy, getContext().Int128Ty,
  3535. getContext().UnsignedInt128Ty, getContext().HalfTy,
  3536. getContext().FloatTy, getContext().DoubleTy,
  3537. getContext().LongDoubleTy, getContext().Float128Ty,
  3538. getContext().Char8Ty, getContext().Char16Ty,
  3539. getContext().Char32Ty
  3540. };
  3541. llvm::GlobalValue::DLLStorageClassTypes DLLStorageClass =
  3542. RD->hasAttr<DLLExportAttr>() || CGM.shouldMapVisibilityToDLLExport(RD)
  3543. ? llvm::GlobalValue::DLLExportStorageClass
  3544. : llvm::GlobalValue::DefaultStorageClass;
  3545. llvm::GlobalValue::VisibilityTypes Visibility =
  3546. CodeGenModule::GetLLVMVisibility(RD->getVisibility());
  3547. for (const QualType &FundamentalType : FundamentalTypes) {
  3548. QualType PointerType = getContext().getPointerType(FundamentalType);
  3549. QualType PointerTypeConst = getContext().getPointerType(
  3550. FundamentalType.withConst());
  3551. for (QualType Type : {FundamentalType, PointerType, PointerTypeConst})
  3552. ItaniumRTTIBuilder(*this).BuildTypeInfo(
  3553. Type, llvm::GlobalValue::ExternalLinkage,
  3554. Visibility, DLLStorageClass);
  3555. }
  3556. }
  3557. /// What sort of uniqueness rules should we use for the RTTI for the
  3558. /// given type?
  3559. ItaniumCXXABI::RTTIUniquenessKind ItaniumCXXABI::classifyRTTIUniqueness(
  3560. QualType CanTy, llvm::GlobalValue::LinkageTypes Linkage) const {
  3561. if (shouldRTTIBeUnique())
  3562. return RUK_Unique;
  3563. // It's only necessary for linkonce_odr or weak_odr linkage.
  3564. if (Linkage != llvm::GlobalValue::LinkOnceODRLinkage &&
  3565. Linkage != llvm::GlobalValue::WeakODRLinkage)
  3566. return RUK_Unique;
  3567. // It's only necessary with default visibility.
  3568. if (CanTy->getVisibility() != DefaultVisibility)
  3569. return RUK_Unique;
  3570. // If we're not required to publish this symbol, hide it.
  3571. if (Linkage == llvm::GlobalValue::LinkOnceODRLinkage)
  3572. return RUK_NonUniqueHidden;
  3573. // If we're required to publish this symbol, as we might be under an
  3574. // explicit instantiation, leave it with default visibility but
  3575. // enable string-comparisons.
  3576. assert(Linkage == llvm::GlobalValue::WeakODRLinkage);
  3577. return RUK_NonUniqueVisible;
  3578. }
  3579. // Find out how to codegen the complete destructor and constructor
  3580. namespace {
  3581. enum class StructorCodegen { Emit, RAUW, Alias, COMDAT };
  3582. }
  3583. static StructorCodegen getCodegenToUse(CodeGenModule &CGM,
  3584. const CXXMethodDecl *MD) {
  3585. if (!CGM.getCodeGenOpts().CXXCtorDtorAliases)
  3586. return StructorCodegen::Emit;
  3587. // The complete and base structors are not equivalent if there are any virtual
  3588. // bases, so emit separate functions.
  3589. if (MD->getParent()->getNumVBases())
  3590. return StructorCodegen::Emit;
  3591. GlobalDecl AliasDecl;
  3592. if (const auto *DD = dyn_cast<CXXDestructorDecl>(MD)) {
  3593. AliasDecl = GlobalDecl(DD, Dtor_Complete);
  3594. } else {
  3595. const auto *CD = cast<CXXConstructorDecl>(MD);
  3596. AliasDecl = GlobalDecl(CD, Ctor_Complete);
  3597. }
  3598. llvm::GlobalValue::LinkageTypes Linkage = CGM.getFunctionLinkage(AliasDecl);
  3599. if (llvm::GlobalValue::isDiscardableIfUnused(Linkage))
  3600. return StructorCodegen::RAUW;
  3601. // FIXME: Should we allow available_externally aliases?
  3602. if (!llvm::GlobalAlias::isValidLinkage(Linkage))
  3603. return StructorCodegen::RAUW;
  3604. if (llvm::GlobalValue::isWeakForLinker(Linkage)) {
  3605. // Only ELF and wasm support COMDATs with arbitrary names (C5/D5).
  3606. if (CGM.getTarget().getTriple().isOSBinFormatELF() ||
  3607. CGM.getTarget().getTriple().isOSBinFormatWasm())
  3608. return StructorCodegen::COMDAT;
  3609. return StructorCodegen::Emit;
  3610. }
  3611. return StructorCodegen::Alias;
  3612. }
  3613. static void emitConstructorDestructorAlias(CodeGenModule &CGM,
  3614. GlobalDecl AliasDecl,
  3615. GlobalDecl TargetDecl) {
  3616. llvm::GlobalValue::LinkageTypes Linkage = CGM.getFunctionLinkage(AliasDecl);
  3617. StringRef MangledName = CGM.getMangledName(AliasDecl);
  3618. llvm::GlobalValue *Entry = CGM.GetGlobalValue(MangledName);
  3619. if (Entry && !Entry->isDeclaration())
  3620. return;
  3621. auto *Aliasee = cast<llvm::GlobalValue>(CGM.GetAddrOfGlobal(TargetDecl));
  3622. // Create the alias with no name.
  3623. auto *Alias = llvm::GlobalAlias::create(Linkage, "", Aliasee);
  3624. // Constructors and destructors are always unnamed_addr.
  3625. Alias->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
  3626. // Switch any previous uses to the alias.
  3627. if (Entry) {
  3628. assert(Entry->getType() == Aliasee->getType() &&
  3629. "declaration exists with different type");
  3630. Alias->takeName(Entry);
  3631. Entry->replaceAllUsesWith(Alias);
  3632. Entry->eraseFromParent();
  3633. } else {
  3634. Alias->setName(MangledName);
  3635. }
  3636. // Finally, set up the alias with its proper name and attributes.
  3637. CGM.SetCommonAttributes(AliasDecl, Alias);
  3638. }
  3639. void ItaniumCXXABI::emitCXXStructor(GlobalDecl GD) {
  3640. auto *MD = cast<CXXMethodDecl>(GD.getDecl());
  3641. auto *CD = dyn_cast<CXXConstructorDecl>(MD);
  3642. const CXXDestructorDecl *DD = CD ? nullptr : cast<CXXDestructorDecl>(MD);
  3643. StructorCodegen CGType = getCodegenToUse(CGM, MD);
  3644. if (CD ? GD.getCtorType() == Ctor_Complete
  3645. : GD.getDtorType() == Dtor_Complete) {
  3646. GlobalDecl BaseDecl;
  3647. if (CD)
  3648. BaseDecl = GD.getWithCtorType(Ctor_Base);
  3649. else
  3650. BaseDecl = GD.getWithDtorType(Dtor_Base);
  3651. if (CGType == StructorCodegen::Alias || CGType == StructorCodegen::COMDAT) {
  3652. emitConstructorDestructorAlias(CGM, GD, BaseDecl);
  3653. return;
  3654. }
  3655. if (CGType == StructorCodegen::RAUW) {
  3656. StringRef MangledName = CGM.getMangledName(GD);
  3657. auto *Aliasee = CGM.GetAddrOfGlobal(BaseDecl);
  3658. CGM.addReplacement(MangledName, Aliasee);
  3659. return;
  3660. }
  3661. }
  3662. // The base destructor is equivalent to the base destructor of its
  3663. // base class if there is exactly one non-virtual base class with a
  3664. // non-trivial destructor, there are no fields with a non-trivial
  3665. // destructor, and the body of the destructor is trivial.
  3666. if (DD && GD.getDtorType() == Dtor_Base &&
  3667. CGType != StructorCodegen::COMDAT &&
  3668. !CGM.TryEmitBaseDestructorAsAlias(DD))
  3669. return;
  3670. // FIXME: The deleting destructor is equivalent to the selected operator
  3671. // delete if:
  3672. // * either the delete is a destroying operator delete or the destructor
  3673. // would be trivial if it weren't virtual,
  3674. // * the conversion from the 'this' parameter to the first parameter of the
  3675. // destructor is equivalent to a bitcast,
  3676. // * the destructor does not have an implicit "this" return, and
  3677. // * the operator delete has the same calling convention and IR function type
  3678. // as the destructor.
  3679. // In such cases we should try to emit the deleting dtor as an alias to the
  3680. // selected 'operator delete'.
  3681. llvm::Function *Fn = CGM.codegenCXXStructor(GD);
  3682. if (CGType == StructorCodegen::COMDAT) {
  3683. SmallString<256> Buffer;
  3684. llvm::raw_svector_ostream Out(Buffer);
  3685. if (DD)
  3686. getMangleContext().mangleCXXDtorComdat(DD, Out);
  3687. else
  3688. getMangleContext().mangleCXXCtorComdat(CD, Out);
  3689. llvm::Comdat *C = CGM.getModule().getOrInsertComdat(Out.str());
  3690. Fn->setComdat(C);
  3691. } else {
  3692. CGM.maybeSetTrivialComdat(*MD, *Fn);
  3693. }
  3694. }
  3695. static llvm::FunctionCallee getBeginCatchFn(CodeGenModule &CGM) {
  3696. // void *__cxa_begin_catch(void*);
  3697. llvm::FunctionType *FTy = llvm::FunctionType::get(
  3698. CGM.Int8PtrTy, CGM.Int8PtrTy, /*isVarArg=*/false);
  3699. return CGM.CreateRuntimeFunction(FTy, "__cxa_begin_catch");
  3700. }
  3701. static llvm::FunctionCallee getEndCatchFn(CodeGenModule &CGM) {
  3702. // void __cxa_end_catch();
  3703. llvm::FunctionType *FTy =
  3704. llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
  3705. return CGM.CreateRuntimeFunction(FTy, "__cxa_end_catch");
  3706. }
  3707. static llvm::FunctionCallee getGetExceptionPtrFn(CodeGenModule &CGM) {
  3708. // void *__cxa_get_exception_ptr(void*);
  3709. llvm::FunctionType *FTy = llvm::FunctionType::get(
  3710. CGM.Int8PtrTy, CGM.Int8PtrTy, /*isVarArg=*/false);
  3711. return CGM.CreateRuntimeFunction(FTy, "__cxa_get_exception_ptr");
  3712. }
  3713. namespace {
  3714. /// A cleanup to call __cxa_end_catch. In many cases, the caught
  3715. /// exception type lets us state definitively that the thrown exception
  3716. /// type does not have a destructor. In particular:
  3717. /// - Catch-alls tell us nothing, so we have to conservatively
  3718. /// assume that the thrown exception might have a destructor.
  3719. /// - Catches by reference behave according to their base types.
  3720. /// - Catches of non-record types will only trigger for exceptions
  3721. /// of non-record types, which never have destructors.
  3722. /// - Catches of record types can trigger for arbitrary subclasses
  3723. /// of the caught type, so we have to assume the actual thrown
  3724. /// exception type might have a throwing destructor, even if the
  3725. /// caught type's destructor is trivial or nothrow.
  3726. struct CallEndCatch final : EHScopeStack::Cleanup {
  3727. CallEndCatch(bool MightThrow) : MightThrow(MightThrow) {}
  3728. bool MightThrow;
  3729. void Emit(CodeGenFunction &CGF, Flags flags) override {
  3730. if (!MightThrow) {
  3731. CGF.EmitNounwindRuntimeCall(getEndCatchFn(CGF.CGM));
  3732. return;
  3733. }
  3734. CGF.EmitRuntimeCallOrInvoke(getEndCatchFn(CGF.CGM));
  3735. }
  3736. };
  3737. }
  3738. /// Emits a call to __cxa_begin_catch and enters a cleanup to call
  3739. /// __cxa_end_catch.
  3740. ///
  3741. /// \param EndMightThrow - true if __cxa_end_catch might throw
  3742. static llvm::Value *CallBeginCatch(CodeGenFunction &CGF,
  3743. llvm::Value *Exn,
  3744. bool EndMightThrow) {
  3745. llvm::CallInst *call =
  3746. CGF.EmitNounwindRuntimeCall(getBeginCatchFn(CGF.CGM), Exn);
  3747. CGF.EHStack.pushCleanup<CallEndCatch>(NormalAndEHCleanup, EndMightThrow);
  3748. return call;
  3749. }
  3750. /// A "special initializer" callback for initializing a catch
  3751. /// parameter during catch initialization.
  3752. static void InitCatchParam(CodeGenFunction &CGF,
  3753. const VarDecl &CatchParam,
  3754. Address ParamAddr,
  3755. SourceLocation Loc) {
  3756. // Load the exception from where the landing pad saved it.
  3757. llvm::Value *Exn = CGF.getExceptionFromSlot();
  3758. CanQualType CatchType =
  3759. CGF.CGM.getContext().getCanonicalType(CatchParam.getType());
  3760. llvm::Type *LLVMCatchTy = CGF.ConvertTypeForMem(CatchType);
  3761. // If we're catching by reference, we can just cast the object
  3762. // pointer to the appropriate pointer.
  3763. if (isa<ReferenceType>(CatchType)) {
  3764. QualType CaughtType = cast<ReferenceType>(CatchType)->getPointeeType();
  3765. bool EndCatchMightThrow = CaughtType->isRecordType();
  3766. // __cxa_begin_catch returns the adjusted object pointer.
  3767. llvm::Value *AdjustedExn = CallBeginCatch(CGF, Exn, EndCatchMightThrow);
  3768. // We have no way to tell the personality function that we're
  3769. // catching by reference, so if we're catching a pointer,
  3770. // __cxa_begin_catch will actually return that pointer by value.
  3771. if (const PointerType *PT = dyn_cast<PointerType>(CaughtType)) {
  3772. QualType PointeeType = PT->getPointeeType();
  3773. // When catching by reference, generally we should just ignore
  3774. // this by-value pointer and use the exception object instead.
  3775. if (!PointeeType->isRecordType()) {
  3776. // Exn points to the struct _Unwind_Exception header, which
  3777. // we have to skip past in order to reach the exception data.
  3778. unsigned HeaderSize =
  3779. CGF.CGM.getTargetCodeGenInfo().getSizeOfUnwindException();
  3780. AdjustedExn =
  3781. CGF.Builder.CreateConstGEP1_32(CGF.Int8Ty, Exn, HeaderSize);
  3782. // However, if we're catching a pointer-to-record type that won't
  3783. // work, because the personality function might have adjusted
  3784. // the pointer. There's actually no way for us to fully satisfy
  3785. // the language/ABI contract here: we can't use Exn because it
  3786. // might have the wrong adjustment, but we can't use the by-value
  3787. // pointer because it's off by a level of abstraction.
  3788. //
  3789. // The current solution is to dump the adjusted pointer into an
  3790. // alloca, which breaks language semantics (because changing the
  3791. // pointer doesn't change the exception) but at least works.
  3792. // The better solution would be to filter out non-exact matches
  3793. // and rethrow them, but this is tricky because the rethrow
  3794. // really needs to be catchable by other sites at this landing
  3795. // pad. The best solution is to fix the personality function.
  3796. } else {
  3797. // Pull the pointer for the reference type off.
  3798. llvm::Type *PtrTy = CGF.ConvertTypeForMem(CaughtType);
  3799. // Create the temporary and write the adjusted pointer into it.
  3800. Address ExnPtrTmp =
  3801. CGF.CreateTempAlloca(PtrTy, CGF.getPointerAlign(), "exn.byref.tmp");
  3802. llvm::Value *Casted = CGF.Builder.CreateBitCast(AdjustedExn, PtrTy);
  3803. CGF.Builder.CreateStore(Casted, ExnPtrTmp);
  3804. // Bind the reference to the temporary.
  3805. AdjustedExn = ExnPtrTmp.getPointer();
  3806. }
  3807. }
  3808. llvm::Value *ExnCast =
  3809. CGF.Builder.CreateBitCast(AdjustedExn, LLVMCatchTy, "exn.byref");
  3810. CGF.Builder.CreateStore(ExnCast, ParamAddr);
  3811. return;
  3812. }
  3813. // Scalars and complexes.
  3814. TypeEvaluationKind TEK = CGF.getEvaluationKind(CatchType);
  3815. if (TEK != TEK_Aggregate) {
  3816. llvm::Value *AdjustedExn = CallBeginCatch(CGF, Exn, false);
  3817. // If the catch type is a pointer type, __cxa_begin_catch returns
  3818. // the pointer by value.
  3819. if (CatchType->hasPointerRepresentation()) {
  3820. llvm::Value *CastExn =
  3821. CGF.Builder.CreateBitCast(AdjustedExn, LLVMCatchTy, "exn.casted");
  3822. switch (CatchType.getQualifiers().getObjCLifetime()) {
  3823. case Qualifiers::OCL_Strong:
  3824. CastExn = CGF.EmitARCRetainNonBlock(CastExn);
  3825. [[fallthrough]];
  3826. case Qualifiers::OCL_None:
  3827. case Qualifiers::OCL_ExplicitNone:
  3828. case Qualifiers::OCL_Autoreleasing:
  3829. CGF.Builder.CreateStore(CastExn, ParamAddr);
  3830. return;
  3831. case Qualifiers::OCL_Weak:
  3832. CGF.EmitARCInitWeak(ParamAddr, CastExn);
  3833. return;
  3834. }
  3835. llvm_unreachable("bad ownership qualifier!");
  3836. }
  3837. // Otherwise, it returns a pointer into the exception object.
  3838. llvm::Type *PtrTy = LLVMCatchTy->getPointerTo(0); // addrspace 0 ok
  3839. llvm::Value *Cast = CGF.Builder.CreateBitCast(AdjustedExn, PtrTy);
  3840. LValue srcLV = CGF.MakeNaturalAlignAddrLValue(Cast, CatchType);
  3841. LValue destLV = CGF.MakeAddrLValue(ParamAddr, CatchType);
  3842. switch (TEK) {
  3843. case TEK_Complex:
  3844. CGF.EmitStoreOfComplex(CGF.EmitLoadOfComplex(srcLV, Loc), destLV,
  3845. /*init*/ true);
  3846. return;
  3847. case TEK_Scalar: {
  3848. llvm::Value *ExnLoad = CGF.EmitLoadOfScalar(srcLV, Loc);
  3849. CGF.EmitStoreOfScalar(ExnLoad, destLV, /*init*/ true);
  3850. return;
  3851. }
  3852. case TEK_Aggregate:
  3853. llvm_unreachable("evaluation kind filtered out!");
  3854. }
  3855. llvm_unreachable("bad evaluation kind");
  3856. }
  3857. assert(isa<RecordType>(CatchType) && "unexpected catch type!");
  3858. auto catchRD = CatchType->getAsCXXRecordDecl();
  3859. CharUnits caughtExnAlignment = CGF.CGM.getClassPointerAlignment(catchRD);
  3860. llvm::Type *PtrTy = LLVMCatchTy->getPointerTo(0); // addrspace 0 ok
  3861. // Check for a copy expression. If we don't have a copy expression,
  3862. // that means a trivial copy is okay.
  3863. const Expr *copyExpr = CatchParam.getInit();
  3864. if (!copyExpr) {
  3865. llvm::Value *rawAdjustedExn = CallBeginCatch(CGF, Exn, true);
  3866. Address adjustedExn(CGF.Builder.CreateBitCast(rawAdjustedExn, PtrTy),
  3867. LLVMCatchTy, caughtExnAlignment);
  3868. LValue Dest = CGF.MakeAddrLValue(ParamAddr, CatchType);
  3869. LValue Src = CGF.MakeAddrLValue(adjustedExn, CatchType);
  3870. CGF.EmitAggregateCopy(Dest, Src, CatchType, AggValueSlot::DoesNotOverlap);
  3871. return;
  3872. }
  3873. // We have to call __cxa_get_exception_ptr to get the adjusted
  3874. // pointer before copying.
  3875. llvm::CallInst *rawAdjustedExn =
  3876. CGF.EmitNounwindRuntimeCall(getGetExceptionPtrFn(CGF.CGM), Exn);
  3877. // Cast that to the appropriate type.
  3878. Address adjustedExn(CGF.Builder.CreateBitCast(rawAdjustedExn, PtrTy),
  3879. LLVMCatchTy, caughtExnAlignment);
  3880. // The copy expression is defined in terms of an OpaqueValueExpr.
  3881. // Find it and map it to the adjusted expression.
  3882. CodeGenFunction::OpaqueValueMapping
  3883. opaque(CGF, OpaqueValueExpr::findInCopyConstruct(copyExpr),
  3884. CGF.MakeAddrLValue(adjustedExn, CatchParam.getType()));
  3885. // Call the copy ctor in a terminate scope.
  3886. CGF.EHStack.pushTerminate();
  3887. // Perform the copy construction.
  3888. CGF.EmitAggExpr(copyExpr,
  3889. AggValueSlot::forAddr(ParamAddr, Qualifiers(),
  3890. AggValueSlot::IsNotDestructed,
  3891. AggValueSlot::DoesNotNeedGCBarriers,
  3892. AggValueSlot::IsNotAliased,
  3893. AggValueSlot::DoesNotOverlap));
  3894. // Leave the terminate scope.
  3895. CGF.EHStack.popTerminate();
  3896. // Undo the opaque value mapping.
  3897. opaque.pop();
  3898. // Finally we can call __cxa_begin_catch.
  3899. CallBeginCatch(CGF, Exn, true);
  3900. }
  3901. /// Begins a catch statement by initializing the catch variable and
  3902. /// calling __cxa_begin_catch.
  3903. void ItaniumCXXABI::emitBeginCatch(CodeGenFunction &CGF,
  3904. const CXXCatchStmt *S) {
  3905. // We have to be very careful with the ordering of cleanups here:
  3906. // C++ [except.throw]p4:
  3907. // The destruction [of the exception temporary] occurs
  3908. // immediately after the destruction of the object declared in
  3909. // the exception-declaration in the handler.
  3910. //
  3911. // So the precise ordering is:
  3912. // 1. Construct catch variable.
  3913. // 2. __cxa_begin_catch
  3914. // 3. Enter __cxa_end_catch cleanup
  3915. // 4. Enter dtor cleanup
  3916. //
  3917. // We do this by using a slightly abnormal initialization process.
  3918. // Delegation sequence:
  3919. // - ExitCXXTryStmt opens a RunCleanupsScope
  3920. // - EmitAutoVarAlloca creates the variable and debug info
  3921. // - InitCatchParam initializes the variable from the exception
  3922. // - CallBeginCatch calls __cxa_begin_catch
  3923. // - CallBeginCatch enters the __cxa_end_catch cleanup
  3924. // - EmitAutoVarCleanups enters the variable destructor cleanup
  3925. // - EmitCXXTryStmt emits the code for the catch body
  3926. // - EmitCXXTryStmt close the RunCleanupsScope
  3927. VarDecl *CatchParam = S->getExceptionDecl();
  3928. if (!CatchParam) {
  3929. llvm::Value *Exn = CGF.getExceptionFromSlot();
  3930. CallBeginCatch(CGF, Exn, true);
  3931. return;
  3932. }
  3933. // Emit the local.
  3934. CodeGenFunction::AutoVarEmission var = CGF.EmitAutoVarAlloca(*CatchParam);
  3935. InitCatchParam(CGF, *CatchParam, var.getObjectAddress(CGF), S->getBeginLoc());
  3936. CGF.EmitAutoVarCleanups(var);
  3937. }
  3938. /// Get or define the following function:
  3939. /// void @__clang_call_terminate(i8* %exn) nounwind noreturn
  3940. /// This code is used only in C++.
  3941. static llvm::FunctionCallee getClangCallTerminateFn(CodeGenModule &CGM) {
  3942. ASTContext &C = CGM.getContext();
  3943. const CGFunctionInfo &FI = CGM.getTypes().arrangeBuiltinFunctionDeclaration(
  3944. C.VoidTy, {C.getPointerType(C.CharTy)});
  3945. llvm::FunctionType *fnTy = CGM.getTypes().GetFunctionType(FI);
  3946. llvm::FunctionCallee fnRef = CGM.CreateRuntimeFunction(
  3947. fnTy, "__clang_call_terminate", llvm::AttributeList(), /*Local=*/true);
  3948. llvm::Function *fn =
  3949. cast<llvm::Function>(fnRef.getCallee()->stripPointerCasts());
  3950. if (fn->empty()) {
  3951. CGM.SetLLVMFunctionAttributes(GlobalDecl(), FI, fn, /*IsThunk=*/false);
  3952. fn->setDoesNotThrow();
  3953. fn->setDoesNotReturn();
  3954. // What we really want is to massively penalize inlining without
  3955. // forbidding it completely. The difference between that and
  3956. // 'noinline' is negligible.
  3957. fn->addFnAttr(llvm::Attribute::NoInline);
  3958. // Allow this function to be shared across translation units, but
  3959. // we don't want it to turn into an exported symbol.
  3960. fn->setLinkage(llvm::Function::LinkOnceODRLinkage);
  3961. fn->setVisibility(llvm::Function::HiddenVisibility);
  3962. if (CGM.supportsCOMDAT())
  3963. fn->setComdat(CGM.getModule().getOrInsertComdat(fn->getName()));
  3964. // Set up the function.
  3965. llvm::BasicBlock *entry =
  3966. llvm::BasicBlock::Create(CGM.getLLVMContext(), "", fn);
  3967. CGBuilderTy builder(CGM, entry);
  3968. // Pull the exception pointer out of the parameter list.
  3969. llvm::Value *exn = &*fn->arg_begin();
  3970. // Call __cxa_begin_catch(exn).
  3971. llvm::CallInst *catchCall = builder.CreateCall(getBeginCatchFn(CGM), exn);
  3972. catchCall->setDoesNotThrow();
  3973. catchCall->setCallingConv(CGM.getRuntimeCC());
  3974. // Call std::terminate().
  3975. llvm::CallInst *termCall = builder.CreateCall(CGM.getTerminateFn());
  3976. termCall->setDoesNotThrow();
  3977. termCall->setDoesNotReturn();
  3978. termCall->setCallingConv(CGM.getRuntimeCC());
  3979. // std::terminate cannot return.
  3980. builder.CreateUnreachable();
  3981. }
  3982. return fnRef;
  3983. }
  3984. llvm::CallInst *
  3985. ItaniumCXXABI::emitTerminateForUnexpectedException(CodeGenFunction &CGF,
  3986. llvm::Value *Exn) {
  3987. // In C++, we want to call __cxa_begin_catch() before terminating.
  3988. if (Exn) {
  3989. assert(CGF.CGM.getLangOpts().CPlusPlus);
  3990. return CGF.EmitNounwindRuntimeCall(getClangCallTerminateFn(CGF.CGM), Exn);
  3991. }
  3992. return CGF.EmitNounwindRuntimeCall(CGF.CGM.getTerminateFn());
  3993. }
  3994. std::pair<llvm::Value *, const CXXRecordDecl *>
  3995. ItaniumCXXABI::LoadVTablePtr(CodeGenFunction &CGF, Address This,
  3996. const CXXRecordDecl *RD) {
  3997. return {CGF.GetVTablePtr(This, CGM.Int8PtrTy, RD), RD};
  3998. }
  3999. void WebAssemblyCXXABI::emitBeginCatch(CodeGenFunction &CGF,
  4000. const CXXCatchStmt *C) {
  4001. if (CGF.getTarget().hasFeature("exception-handling"))
  4002. CGF.EHStack.pushCleanup<CatchRetScope>(
  4003. NormalCleanup, cast<llvm::CatchPadInst>(CGF.CurrentFuncletPad));
  4004. ItaniumCXXABI::emitBeginCatch(CGF, C);
  4005. }
  4006. llvm::CallInst *
  4007. WebAssemblyCXXABI::emitTerminateForUnexpectedException(CodeGenFunction &CGF,
  4008. llvm::Value *Exn) {
  4009. // Itanium ABI calls __clang_call_terminate(), which __cxa_begin_catch() on
  4010. // the violating exception to mark it handled, but it is currently hard to do
  4011. // with wasm EH instruction structure with catch/catch_all, we just call
  4012. // std::terminate and ignore the violating exception as in CGCXXABI.
  4013. // TODO Consider code transformation that makes calling __clang_call_terminate
  4014. // possible.
  4015. return CGCXXABI::emitTerminateForUnexpectedException(CGF, Exn);
  4016. }
  4017. /// Register a global destructor as best as we know how.
  4018. void XLCXXABI::registerGlobalDtor(CodeGenFunction &CGF, const VarDecl &D,
  4019. llvm::FunctionCallee Dtor,
  4020. llvm::Constant *Addr) {
  4021. if (D.getTLSKind() != VarDecl::TLS_None) {
  4022. // atexit routine expects "int(*)(int,...)"
  4023. llvm::FunctionType *FTy =
  4024. llvm::FunctionType::get(CGM.IntTy, CGM.IntTy, true);
  4025. llvm::PointerType *FpTy = FTy->getPointerTo();
  4026. // extern "C" int __pt_atexit_np(int flags, int(*)(int,...), ...);
  4027. llvm::FunctionType *AtExitTy =
  4028. llvm::FunctionType::get(CGM.IntTy, {CGM.IntTy, FpTy}, true);
  4029. // Fetch the actual function.
  4030. llvm::FunctionCallee AtExit =
  4031. CGM.CreateRuntimeFunction(AtExitTy, "__pt_atexit_np");
  4032. // Create __dtor function for the var decl.
  4033. llvm::Function *DtorStub = CGF.createTLSAtExitStub(D, Dtor, Addr, AtExit);
  4034. // Register above __dtor with atexit().
  4035. // First param is flags and must be 0, second param is function ptr
  4036. llvm::Value *NV = llvm::Constant::getNullValue(CGM.IntTy);
  4037. CGF.EmitNounwindRuntimeCall(AtExit, {NV, DtorStub});
  4038. // Cannot unregister TLS __dtor so done
  4039. return;
  4040. }
  4041. // Create __dtor function for the var decl.
  4042. llvm::Function *DtorStub = CGF.createAtExitStub(D, Dtor, Addr);
  4043. // Register above __dtor with atexit().
  4044. CGF.registerGlobalDtorWithAtExit(DtorStub);
  4045. // Emit __finalize function to unregister __dtor and (as appropriate) call
  4046. // __dtor.
  4047. emitCXXStermFinalizer(D, DtorStub, Addr);
  4048. }
  4049. void XLCXXABI::emitCXXStermFinalizer(const VarDecl &D, llvm::Function *dtorStub,
  4050. llvm::Constant *addr) {
  4051. llvm::FunctionType *FTy = llvm::FunctionType::get(CGM.VoidTy, false);
  4052. SmallString<256> FnName;
  4053. {
  4054. llvm::raw_svector_ostream Out(FnName);
  4055. getMangleContext().mangleDynamicStermFinalizer(&D, Out);
  4056. }
  4057. // Create the finalization action associated with a variable.
  4058. const CGFunctionInfo &FI = CGM.getTypes().arrangeNullaryFunction();
  4059. llvm::Function *StermFinalizer = CGM.CreateGlobalInitOrCleanUpFunction(
  4060. FTy, FnName.str(), FI, D.getLocation());
  4061. CodeGenFunction CGF(CGM);
  4062. CGF.StartFunction(GlobalDecl(), CGM.getContext().VoidTy, StermFinalizer, FI,
  4063. FunctionArgList(), D.getLocation(),
  4064. D.getInit()->getExprLoc());
  4065. // The unatexit subroutine unregisters __dtor functions that were previously
  4066. // registered by the atexit subroutine. If the referenced function is found,
  4067. // the unatexit returns a value of 0, meaning that the cleanup is still
  4068. // pending (and we should call the __dtor function).
  4069. llvm::Value *V = CGF.unregisterGlobalDtorWithUnAtExit(dtorStub);
  4070. llvm::Value *NeedsDestruct = CGF.Builder.CreateIsNull(V, "needs_destruct");
  4071. llvm::BasicBlock *DestructCallBlock = CGF.createBasicBlock("destruct.call");
  4072. llvm::BasicBlock *EndBlock = CGF.createBasicBlock("destruct.end");
  4073. // Check if unatexit returns a value of 0. If it does, jump to
  4074. // DestructCallBlock, otherwise jump to EndBlock directly.
  4075. CGF.Builder.CreateCondBr(NeedsDestruct, DestructCallBlock, EndBlock);
  4076. CGF.EmitBlock(DestructCallBlock);
  4077. // Emit the call to dtorStub.
  4078. llvm::CallInst *CI = CGF.Builder.CreateCall(dtorStub);
  4079. // Make sure the call and the callee agree on calling convention.
  4080. CI->setCallingConv(dtorStub->getCallingConv());
  4081. CGF.EmitBlock(EndBlock);
  4082. CGF.FinishFunction();
  4083. if (auto *IPA = D.getAttr<InitPriorityAttr>()) {
  4084. CGM.AddCXXPrioritizedStermFinalizerEntry(StermFinalizer,
  4085. IPA->getPriority());
  4086. } else if (isTemplateInstantiation(D.getTemplateSpecializationKind()) ||
  4087. getContext().GetGVALinkageForVariable(&D) == GVA_DiscardableODR) {
  4088. // According to C++ [basic.start.init]p2, class template static data
  4089. // members (i.e., implicitly or explicitly instantiated specializations)
  4090. // have unordered initialization. As a consequence, we can put them into
  4091. // their own llvm.global_dtors entry.
  4092. CGM.AddCXXStermFinalizerToGlobalDtor(StermFinalizer, 65535);
  4093. } else {
  4094. CGM.AddCXXStermFinalizerEntry(StermFinalizer);
  4095. }
  4096. }