ItaniumCXXABI.cpp 187 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830
  1. //===------- ItaniumCXXABI.cpp - Emit LLVM Code from ASTs for a Module ----===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This provides C++ code generation targeting the Itanium C++ ABI. The class
  10. // in this file generates structures that follow the Itanium C++ ABI, which is
  11. // documented at:
  12. // https://itanium-cxx-abi.github.io/cxx-abi/abi.html
  13. // https://itanium-cxx-abi.github.io/cxx-abi/abi-eh.html
  14. //
  15. // It also supports the closely-related ARM ABI, documented at:
  16. // https://developer.arm.com/documentation/ihi0041/g/
  17. //
  18. //===----------------------------------------------------------------------===//
  19. #include "CGCXXABI.h"
  20. #include "CGCleanup.h"
  21. #include "CGRecordLayout.h"
  22. #include "CGVTables.h"
  23. #include "CodeGenFunction.h"
  24. #include "CodeGenModule.h"
  25. #include "TargetInfo.h"
  26. #include "clang/AST/Attr.h"
  27. #include "clang/AST/Mangle.h"
  28. #include "clang/AST/StmtCXX.h"
  29. #include "clang/AST/Type.h"
  30. #include "clang/CodeGen/ConstantInitBuilder.h"
  31. #include "llvm/IR/DataLayout.h"
  32. #include "llvm/IR/GlobalValue.h"
  33. #include "llvm/IR/Instructions.h"
  34. #include "llvm/IR/Intrinsics.h"
  35. #include "llvm/IR/Value.h"
  36. #include "llvm/Support/ScopedPrinter.h"
  37. using namespace clang;
  38. using namespace CodeGen;
  39. namespace {
  40. class ItaniumCXXABI : public CodeGen::CGCXXABI {
  41. /// VTables - All the vtables which have been defined.
  42. llvm::DenseMap<const CXXRecordDecl *, llvm::GlobalVariable *> VTables;
  43. /// All the thread wrapper functions that have been used.
  44. llvm::SmallVector<std::pair<const VarDecl *, llvm::Function *>, 8>
  45. ThreadWrappers;
  46. protected:
  47. bool UseARMMethodPtrABI;
  48. bool UseARMGuardVarABI;
  49. bool Use32BitVTableOffsetABI;
  50. ItaniumMangleContext &getMangleContext() {
  51. return cast<ItaniumMangleContext>(CodeGen::CGCXXABI::getMangleContext());
  52. }
  53. public:
  54. ItaniumCXXABI(CodeGen::CodeGenModule &CGM,
  55. bool UseARMMethodPtrABI = false,
  56. bool UseARMGuardVarABI = false) :
  57. CGCXXABI(CGM), UseARMMethodPtrABI(UseARMMethodPtrABI),
  58. UseARMGuardVarABI(UseARMGuardVarABI),
  59. Use32BitVTableOffsetABI(false) { }
  60. bool classifyReturnType(CGFunctionInfo &FI) const override;
  61. RecordArgABI getRecordArgABI(const CXXRecordDecl *RD) const override {
  62. // If C++ prohibits us from making a copy, pass by address.
  63. if (!RD->canPassInRegisters())
  64. return RAA_Indirect;
  65. return RAA_Default;
  66. }
  67. bool isThisCompleteObject(GlobalDecl GD) const override {
  68. // The Itanium ABI has separate complete-object vs. base-object
  69. // variants of both constructors and destructors.
  70. if (isa<CXXDestructorDecl>(GD.getDecl())) {
  71. switch (GD.getDtorType()) {
  72. case Dtor_Complete:
  73. case Dtor_Deleting:
  74. return true;
  75. case Dtor_Base:
  76. return false;
  77. case Dtor_Comdat:
  78. llvm_unreachable("emitting dtor comdat as function?");
  79. }
  80. llvm_unreachable("bad dtor kind");
  81. }
  82. if (isa<CXXConstructorDecl>(GD.getDecl())) {
  83. switch (GD.getCtorType()) {
  84. case Ctor_Complete:
  85. return true;
  86. case Ctor_Base:
  87. return false;
  88. case Ctor_CopyingClosure:
  89. case Ctor_DefaultClosure:
  90. llvm_unreachable("closure ctors in Itanium ABI?");
  91. case Ctor_Comdat:
  92. llvm_unreachable("emitting ctor comdat as function?");
  93. }
  94. llvm_unreachable("bad dtor kind");
  95. }
  96. // No other kinds.
  97. return false;
  98. }
  99. bool isZeroInitializable(const MemberPointerType *MPT) override;
  100. llvm::Type *ConvertMemberPointerType(const MemberPointerType *MPT) override;
  101. CGCallee
  102. EmitLoadOfMemberFunctionPointer(CodeGenFunction &CGF,
  103. const Expr *E,
  104. Address This,
  105. llvm::Value *&ThisPtrForCall,
  106. llvm::Value *MemFnPtr,
  107. const MemberPointerType *MPT) override;
  108. llvm::Value *
  109. EmitMemberDataPointerAddress(CodeGenFunction &CGF, const Expr *E,
  110. Address Base,
  111. llvm::Value *MemPtr,
  112. const MemberPointerType *MPT) override;
  113. llvm::Value *EmitMemberPointerConversion(CodeGenFunction &CGF,
  114. const CastExpr *E,
  115. llvm::Value *Src) override;
  116. llvm::Constant *EmitMemberPointerConversion(const CastExpr *E,
  117. llvm::Constant *Src) override;
  118. llvm::Constant *EmitNullMemberPointer(const MemberPointerType *MPT) override;
  119. llvm::Constant *EmitMemberFunctionPointer(const CXXMethodDecl *MD) override;
  120. llvm::Constant *EmitMemberDataPointer(const MemberPointerType *MPT,
  121. CharUnits offset) override;
  122. llvm::Constant *EmitMemberPointer(const APValue &MP, QualType MPT) override;
  123. llvm::Constant *BuildMemberPointer(const CXXMethodDecl *MD,
  124. CharUnits ThisAdjustment);
  125. llvm::Value *EmitMemberPointerComparison(CodeGenFunction &CGF,
  126. llvm::Value *L, llvm::Value *R,
  127. const MemberPointerType *MPT,
  128. bool Inequality) override;
  129. llvm::Value *EmitMemberPointerIsNotNull(CodeGenFunction &CGF,
  130. llvm::Value *Addr,
  131. const MemberPointerType *MPT) override;
  132. void emitVirtualObjectDelete(CodeGenFunction &CGF, const CXXDeleteExpr *DE,
  133. Address Ptr, QualType ElementType,
  134. const CXXDestructorDecl *Dtor) override;
  135. void emitRethrow(CodeGenFunction &CGF, bool isNoReturn) override;
  136. void emitThrow(CodeGenFunction &CGF, const CXXThrowExpr *E) override;
  137. void emitBeginCatch(CodeGenFunction &CGF, const CXXCatchStmt *C) override;
  138. llvm::CallInst *
  139. emitTerminateForUnexpectedException(CodeGenFunction &CGF,
  140. llvm::Value *Exn) override;
  141. void EmitFundamentalRTTIDescriptors(const CXXRecordDecl *RD);
  142. llvm::Constant *getAddrOfRTTIDescriptor(QualType Ty) override;
  143. CatchTypeInfo
  144. getAddrOfCXXCatchHandlerType(QualType Ty,
  145. QualType CatchHandlerType) override {
  146. return CatchTypeInfo{getAddrOfRTTIDescriptor(Ty), 0};
  147. }
  148. bool shouldTypeidBeNullChecked(bool IsDeref, QualType SrcRecordTy) override;
  149. void EmitBadTypeidCall(CodeGenFunction &CGF) override;
  150. llvm::Value *EmitTypeid(CodeGenFunction &CGF, QualType SrcRecordTy,
  151. Address ThisPtr,
  152. llvm::Type *StdTypeInfoPtrTy) override;
  153. bool shouldDynamicCastCallBeNullChecked(bool SrcIsPtr,
  154. QualType SrcRecordTy) override;
  155. llvm::Value *EmitDynamicCastCall(CodeGenFunction &CGF, Address Value,
  156. QualType SrcRecordTy, QualType DestTy,
  157. QualType DestRecordTy,
  158. llvm::BasicBlock *CastEnd) override;
  159. llvm::Value *EmitDynamicCastToVoid(CodeGenFunction &CGF, Address Value,
  160. QualType SrcRecordTy,
  161. QualType DestTy) override;
  162. bool EmitBadCastCall(CodeGenFunction &CGF) override;
  163. llvm::Value *
  164. GetVirtualBaseClassOffset(CodeGenFunction &CGF, Address This,
  165. const CXXRecordDecl *ClassDecl,
  166. const CXXRecordDecl *BaseClassDecl) override;
  167. void EmitCXXConstructors(const CXXConstructorDecl *D) override;
  168. AddedStructorArgCounts
  169. buildStructorSignature(GlobalDecl GD,
  170. SmallVectorImpl<CanQualType> &ArgTys) override;
  171. bool useThunkForDtorVariant(const CXXDestructorDecl *Dtor,
  172. CXXDtorType DT) const override {
  173. // Itanium does not emit any destructor variant as an inline thunk.
  174. // Delegating may occur as an optimization, but all variants are either
  175. // emitted with external linkage or as linkonce if they are inline and used.
  176. return false;
  177. }
  178. void EmitCXXDestructors(const CXXDestructorDecl *D) override;
  179. void addImplicitStructorParams(CodeGenFunction &CGF, QualType &ResTy,
  180. FunctionArgList &Params) override;
  181. void EmitInstanceFunctionProlog(CodeGenFunction &CGF) override;
  182. AddedStructorArgs getImplicitConstructorArgs(CodeGenFunction &CGF,
  183. const CXXConstructorDecl *D,
  184. CXXCtorType Type,
  185. bool ForVirtualBase,
  186. bool Delegating) override;
  187. llvm::Value *getCXXDestructorImplicitParam(CodeGenFunction &CGF,
  188. const CXXDestructorDecl *DD,
  189. CXXDtorType Type,
  190. bool ForVirtualBase,
  191. bool Delegating) override;
  192. void EmitDestructorCall(CodeGenFunction &CGF, const CXXDestructorDecl *DD,
  193. CXXDtorType Type, bool ForVirtualBase,
  194. bool Delegating, Address This,
  195. QualType ThisTy) override;
  196. void emitVTableDefinitions(CodeGenVTables &CGVT,
  197. const CXXRecordDecl *RD) override;
  198. bool isVirtualOffsetNeededForVTableField(CodeGenFunction &CGF,
  199. CodeGenFunction::VPtr Vptr) override;
  200. bool doStructorsInitializeVPtrs(const CXXRecordDecl *VTableClass) override {
  201. return true;
  202. }
  203. llvm::Constant *
  204. getVTableAddressPoint(BaseSubobject Base,
  205. const CXXRecordDecl *VTableClass) override;
  206. llvm::Value *getVTableAddressPointInStructor(
  207. CodeGenFunction &CGF, const CXXRecordDecl *VTableClass,
  208. BaseSubobject Base, const CXXRecordDecl *NearestVBase) override;
  209. llvm::Value *getVTableAddressPointInStructorWithVTT(
  210. CodeGenFunction &CGF, const CXXRecordDecl *VTableClass,
  211. BaseSubobject Base, const CXXRecordDecl *NearestVBase);
  212. llvm::Constant *
  213. getVTableAddressPointForConstExpr(BaseSubobject Base,
  214. const CXXRecordDecl *VTableClass) override;
  215. llvm::GlobalVariable *getAddrOfVTable(const CXXRecordDecl *RD,
  216. CharUnits VPtrOffset) override;
  217. CGCallee getVirtualFunctionPointer(CodeGenFunction &CGF, GlobalDecl GD,
  218. Address This, llvm::Type *Ty,
  219. SourceLocation Loc) override;
  220. llvm::Value *EmitVirtualDestructorCall(CodeGenFunction &CGF,
  221. const CXXDestructorDecl *Dtor,
  222. CXXDtorType DtorType, Address This,
  223. DeleteOrMemberCallExpr E) override;
  224. void emitVirtualInheritanceTables(const CXXRecordDecl *RD) override;
  225. bool canSpeculativelyEmitVTable(const CXXRecordDecl *RD) const override;
  226. bool canSpeculativelyEmitVTableAsBaseClass(const CXXRecordDecl *RD) const;
  227. void setThunkLinkage(llvm::Function *Thunk, bool ForVTable, GlobalDecl GD,
  228. bool ReturnAdjustment) override {
  229. // Allow inlining of thunks by emitting them with available_externally
  230. // linkage together with vtables when needed.
  231. if (ForVTable && !Thunk->hasLocalLinkage())
  232. Thunk->setLinkage(llvm::GlobalValue::AvailableExternallyLinkage);
  233. CGM.setGVProperties(Thunk, GD);
  234. }
  235. bool exportThunk() override { return true; }
  236. llvm::Value *performThisAdjustment(CodeGenFunction &CGF, Address This,
  237. const ThisAdjustment &TA) override;
  238. llvm::Value *performReturnAdjustment(CodeGenFunction &CGF, Address Ret,
  239. const ReturnAdjustment &RA) override;
  240. size_t getSrcArgforCopyCtor(const CXXConstructorDecl *,
  241. FunctionArgList &Args) const override {
  242. assert(!Args.empty() && "expected the arglist to not be empty!");
  243. return Args.size() - 1;
  244. }
  245. StringRef GetPureVirtualCallName() override { return "__cxa_pure_virtual"; }
  246. StringRef GetDeletedVirtualCallName() override
  247. { return "__cxa_deleted_virtual"; }
  248. CharUnits getArrayCookieSizeImpl(QualType elementType) override;
  249. Address InitializeArrayCookie(CodeGenFunction &CGF,
  250. Address NewPtr,
  251. llvm::Value *NumElements,
  252. const CXXNewExpr *expr,
  253. QualType ElementType) override;
  254. llvm::Value *readArrayCookieImpl(CodeGenFunction &CGF,
  255. Address allocPtr,
  256. CharUnits cookieSize) override;
  257. void EmitGuardedInit(CodeGenFunction &CGF, const VarDecl &D,
  258. llvm::GlobalVariable *DeclPtr,
  259. bool PerformInit) override;
  260. void registerGlobalDtor(CodeGenFunction &CGF, const VarDecl &D,
  261. llvm::FunctionCallee dtor,
  262. llvm::Constant *addr) override;
  263. llvm::Function *getOrCreateThreadLocalWrapper(const VarDecl *VD,
  264. llvm::Value *Val);
  265. void EmitThreadLocalInitFuncs(
  266. CodeGenModule &CGM,
  267. ArrayRef<const VarDecl *> CXXThreadLocals,
  268. ArrayRef<llvm::Function *> CXXThreadLocalInits,
  269. ArrayRef<const VarDecl *> CXXThreadLocalInitVars) override;
  270. bool usesThreadWrapperFunction(const VarDecl *VD) const override {
  271. return !isEmittedWithConstantInitializer(VD) ||
  272. mayNeedDestruction(VD);
  273. }
  274. LValue EmitThreadLocalVarDeclLValue(CodeGenFunction &CGF, const VarDecl *VD,
  275. QualType LValType) override;
  276. bool NeedsVTTParameter(GlobalDecl GD) override;
  277. /**************************** RTTI Uniqueness ******************************/
  278. protected:
  279. /// Returns true if the ABI requires RTTI type_info objects to be unique
  280. /// across a program.
  281. virtual bool shouldRTTIBeUnique() const { return true; }
  282. public:
  283. /// What sort of unique-RTTI behavior should we use?
  284. enum RTTIUniquenessKind {
  285. /// We are guaranteeing, or need to guarantee, that the RTTI string
  286. /// is unique.
  287. RUK_Unique,
  288. /// We are not guaranteeing uniqueness for the RTTI string, so we
  289. /// can demote to hidden visibility but must use string comparisons.
  290. RUK_NonUniqueHidden,
  291. /// We are not guaranteeing uniqueness for the RTTI string, so we
  292. /// have to use string comparisons, but we also have to emit it with
  293. /// non-hidden visibility.
  294. RUK_NonUniqueVisible
  295. };
  296. /// Return the required visibility status for the given type and linkage in
  297. /// the current ABI.
  298. RTTIUniquenessKind
  299. classifyRTTIUniqueness(QualType CanTy,
  300. llvm::GlobalValue::LinkageTypes Linkage) const;
  301. friend class ItaniumRTTIBuilder;
  302. void emitCXXStructor(GlobalDecl GD) override;
  303. std::pair<llvm::Value *, const CXXRecordDecl *>
  304. LoadVTablePtr(CodeGenFunction &CGF, Address This,
  305. const CXXRecordDecl *RD) override;
  306. private:
  307. bool hasAnyUnusedVirtualInlineFunction(const CXXRecordDecl *RD) const {
  308. const auto &VtableLayout =
  309. CGM.getItaniumVTableContext().getVTableLayout(RD);
  310. for (const auto &VtableComponent : VtableLayout.vtable_components()) {
  311. // Skip empty slot.
  312. if (!VtableComponent.isUsedFunctionPointerKind())
  313. continue;
  314. const CXXMethodDecl *Method = VtableComponent.getFunctionDecl();
  315. if (!Method->getCanonicalDecl()->isInlined())
  316. continue;
  317. StringRef Name = CGM.getMangledName(VtableComponent.getGlobalDecl());
  318. auto *Entry = CGM.GetGlobalValue(Name);
  319. // This checks if virtual inline function has already been emitted.
  320. // Note that it is possible that this inline function would be emitted
  321. // after trying to emit vtable speculatively. Because of this we do
  322. // an extra pass after emitting all deferred vtables to find and emit
  323. // these vtables opportunistically.
  324. if (!Entry || Entry->isDeclaration())
  325. return true;
  326. }
  327. return false;
  328. }
  329. bool isVTableHidden(const CXXRecordDecl *RD) const {
  330. const auto &VtableLayout =
  331. CGM.getItaniumVTableContext().getVTableLayout(RD);
  332. for (const auto &VtableComponent : VtableLayout.vtable_components()) {
  333. if (VtableComponent.isRTTIKind()) {
  334. const CXXRecordDecl *RTTIDecl = VtableComponent.getRTTIDecl();
  335. if (RTTIDecl->getVisibility() == Visibility::HiddenVisibility)
  336. return true;
  337. } else if (VtableComponent.isUsedFunctionPointerKind()) {
  338. const CXXMethodDecl *Method = VtableComponent.getFunctionDecl();
  339. if (Method->getVisibility() == Visibility::HiddenVisibility &&
  340. !Method->isDefined())
  341. return true;
  342. }
  343. }
  344. return false;
  345. }
  346. };
  347. class ARMCXXABI : public ItaniumCXXABI {
  348. public:
  349. ARMCXXABI(CodeGen::CodeGenModule &CGM) :
  350. ItaniumCXXABI(CGM, /*UseARMMethodPtrABI=*/true,
  351. /*UseARMGuardVarABI=*/true) {}
  352. bool HasThisReturn(GlobalDecl GD) const override {
  353. return (isa<CXXConstructorDecl>(GD.getDecl()) || (
  354. isa<CXXDestructorDecl>(GD.getDecl()) &&
  355. GD.getDtorType() != Dtor_Deleting));
  356. }
  357. void EmitReturnFromThunk(CodeGenFunction &CGF, RValue RV,
  358. QualType ResTy) override;
  359. CharUnits getArrayCookieSizeImpl(QualType elementType) override;
  360. Address InitializeArrayCookie(CodeGenFunction &CGF,
  361. Address NewPtr,
  362. llvm::Value *NumElements,
  363. const CXXNewExpr *expr,
  364. QualType ElementType) override;
  365. llvm::Value *readArrayCookieImpl(CodeGenFunction &CGF, Address allocPtr,
  366. CharUnits cookieSize) override;
  367. };
  368. class AppleARM64CXXABI : public ARMCXXABI {
  369. public:
  370. AppleARM64CXXABI(CodeGen::CodeGenModule &CGM) : ARMCXXABI(CGM) {
  371. Use32BitVTableOffsetABI = true;
  372. }
  373. // ARM64 libraries are prepared for non-unique RTTI.
  374. bool shouldRTTIBeUnique() const override { return false; }
  375. };
  376. class FuchsiaCXXABI final : public ItaniumCXXABI {
  377. public:
  378. explicit FuchsiaCXXABI(CodeGen::CodeGenModule &CGM)
  379. : ItaniumCXXABI(CGM) {}
  380. private:
  381. bool HasThisReturn(GlobalDecl GD) const override {
  382. return isa<CXXConstructorDecl>(GD.getDecl()) ||
  383. (isa<CXXDestructorDecl>(GD.getDecl()) &&
  384. GD.getDtorType() != Dtor_Deleting);
  385. }
  386. };
  387. class WebAssemblyCXXABI final : public ItaniumCXXABI {
  388. public:
  389. explicit WebAssemblyCXXABI(CodeGen::CodeGenModule &CGM)
  390. : ItaniumCXXABI(CGM, /*UseARMMethodPtrABI=*/true,
  391. /*UseARMGuardVarABI=*/true) {}
  392. void emitBeginCatch(CodeGenFunction &CGF, const CXXCatchStmt *C) override;
  393. llvm::CallInst *
  394. emitTerminateForUnexpectedException(CodeGenFunction &CGF,
  395. llvm::Value *Exn) override;
  396. private:
  397. bool HasThisReturn(GlobalDecl GD) const override {
  398. return isa<CXXConstructorDecl>(GD.getDecl()) ||
  399. (isa<CXXDestructorDecl>(GD.getDecl()) &&
  400. GD.getDtorType() != Dtor_Deleting);
  401. }
  402. bool canCallMismatchedFunctionType() const override { return false; }
  403. };
  404. class XLCXXABI final : public ItaniumCXXABI {
  405. public:
  406. explicit XLCXXABI(CodeGen::CodeGenModule &CGM)
  407. : ItaniumCXXABI(CGM) {}
  408. void registerGlobalDtor(CodeGenFunction &CGF, const VarDecl &D,
  409. llvm::FunctionCallee dtor,
  410. llvm::Constant *addr) override;
  411. bool useSinitAndSterm() const override { return true; }
  412. private:
  413. void emitCXXStermFinalizer(const VarDecl &D, llvm::Function *dtorStub,
  414. llvm::Constant *addr);
  415. };
  416. }
  417. CodeGen::CGCXXABI *CodeGen::CreateItaniumCXXABI(CodeGenModule &CGM) {
  418. switch (CGM.getContext().getCXXABIKind()) {
  419. // For IR-generation purposes, there's no significant difference
  420. // between the ARM and iOS ABIs.
  421. case TargetCXXABI::GenericARM:
  422. case TargetCXXABI::iOS:
  423. case TargetCXXABI::WatchOS:
  424. return new ARMCXXABI(CGM);
  425. case TargetCXXABI::AppleARM64:
  426. return new AppleARM64CXXABI(CGM);
  427. case TargetCXXABI::Fuchsia:
  428. return new FuchsiaCXXABI(CGM);
  429. // Note that AArch64 uses the generic ItaniumCXXABI class since it doesn't
  430. // include the other 32-bit ARM oddities: constructor/destructor return values
  431. // and array cookies.
  432. case TargetCXXABI::GenericAArch64:
  433. return new ItaniumCXXABI(CGM, /*UseARMMethodPtrABI=*/true,
  434. /*UseARMGuardVarABI=*/true);
  435. case TargetCXXABI::GenericMIPS:
  436. return new ItaniumCXXABI(CGM, /*UseARMMethodPtrABI=*/true);
  437. case TargetCXXABI::WebAssembly:
  438. return new WebAssemblyCXXABI(CGM);
  439. case TargetCXXABI::XL:
  440. return new XLCXXABI(CGM);
  441. case TargetCXXABI::GenericItanium:
  442. if (CGM.getContext().getTargetInfo().getTriple().getArch()
  443. == llvm::Triple::le32) {
  444. // For PNaCl, use ARM-style method pointers so that PNaCl code
  445. // does not assume anything about the alignment of function
  446. // pointers.
  447. return new ItaniumCXXABI(CGM, /*UseARMMethodPtrABI=*/true);
  448. }
  449. return new ItaniumCXXABI(CGM);
  450. case TargetCXXABI::Microsoft:
  451. llvm_unreachable("Microsoft ABI is not Itanium-based");
  452. }
  453. llvm_unreachable("bad ABI kind");
  454. }
  455. llvm::Type *
  456. ItaniumCXXABI::ConvertMemberPointerType(const MemberPointerType *MPT) {
  457. if (MPT->isMemberDataPointer())
  458. return CGM.PtrDiffTy;
  459. return llvm::StructType::get(CGM.PtrDiffTy, CGM.PtrDiffTy);
  460. }
  461. /// In the Itanium and ARM ABIs, method pointers have the form:
  462. /// struct { ptrdiff_t ptr; ptrdiff_t adj; } memptr;
  463. ///
  464. /// In the Itanium ABI:
  465. /// - method pointers are virtual if (memptr.ptr & 1) is nonzero
  466. /// - the this-adjustment is (memptr.adj)
  467. /// - the virtual offset is (memptr.ptr - 1)
  468. ///
  469. /// In the ARM ABI:
  470. /// - method pointers are virtual if (memptr.adj & 1) is nonzero
  471. /// - the this-adjustment is (memptr.adj >> 1)
  472. /// - the virtual offset is (memptr.ptr)
  473. /// ARM uses 'adj' for the virtual flag because Thumb functions
  474. /// may be only single-byte aligned.
  475. ///
  476. /// If the member is virtual, the adjusted 'this' pointer points
  477. /// to a vtable pointer from which the virtual offset is applied.
  478. ///
  479. /// If the member is non-virtual, memptr.ptr is the address of
  480. /// the function to call.
  481. CGCallee ItaniumCXXABI::EmitLoadOfMemberFunctionPointer(
  482. CodeGenFunction &CGF, const Expr *E, Address ThisAddr,
  483. llvm::Value *&ThisPtrForCall,
  484. llvm::Value *MemFnPtr, const MemberPointerType *MPT) {
  485. CGBuilderTy &Builder = CGF.Builder;
  486. const FunctionProtoType *FPT =
  487. MPT->getPointeeType()->getAs<FunctionProtoType>();
  488. auto *RD =
  489. cast<CXXRecordDecl>(MPT->getClass()->castAs<RecordType>()->getDecl());
  490. llvm::FunctionType *FTy = CGM.getTypes().GetFunctionType(
  491. CGM.getTypes().arrangeCXXMethodType(RD, FPT, /*FD=*/nullptr));
  492. llvm::Constant *ptrdiff_1 = llvm::ConstantInt::get(CGM.PtrDiffTy, 1);
  493. llvm::BasicBlock *FnVirtual = CGF.createBasicBlock("memptr.virtual");
  494. llvm::BasicBlock *FnNonVirtual = CGF.createBasicBlock("memptr.nonvirtual");
  495. llvm::BasicBlock *FnEnd = CGF.createBasicBlock("memptr.end");
  496. // Extract memptr.adj, which is in the second field.
  497. llvm::Value *RawAdj = Builder.CreateExtractValue(MemFnPtr, 1, "memptr.adj");
  498. // Compute the true adjustment.
  499. llvm::Value *Adj = RawAdj;
  500. if (UseARMMethodPtrABI)
  501. Adj = Builder.CreateAShr(Adj, ptrdiff_1, "memptr.adj.shifted");
  502. // Apply the adjustment and cast back to the original struct type
  503. // for consistency.
  504. llvm::Value *This = ThisAddr.getPointer();
  505. llvm::Value *Ptr = Builder.CreateBitCast(This, Builder.getInt8PtrTy());
  506. Ptr = Builder.CreateInBoundsGEP(Builder.getInt8Ty(), Ptr, Adj);
  507. This = Builder.CreateBitCast(Ptr, This->getType(), "this.adjusted");
  508. ThisPtrForCall = This;
  509. // Load the function pointer.
  510. llvm::Value *FnAsInt = Builder.CreateExtractValue(MemFnPtr, 0, "memptr.ptr");
  511. // If the LSB in the function pointer is 1, the function pointer points to
  512. // a virtual function.
  513. llvm::Value *IsVirtual;
  514. if (UseARMMethodPtrABI)
  515. IsVirtual = Builder.CreateAnd(RawAdj, ptrdiff_1);
  516. else
  517. IsVirtual = Builder.CreateAnd(FnAsInt, ptrdiff_1);
  518. IsVirtual = Builder.CreateIsNotNull(IsVirtual, "memptr.isvirtual");
  519. Builder.CreateCondBr(IsVirtual, FnVirtual, FnNonVirtual);
  520. // In the virtual path, the adjustment left 'This' pointing to the
  521. // vtable of the correct base subobject. The "function pointer" is an
  522. // offset within the vtable (+1 for the virtual flag on non-ARM).
  523. CGF.EmitBlock(FnVirtual);
  524. // Cast the adjusted this to a pointer to vtable pointer and load.
  525. llvm::Type *VTableTy = Builder.getInt8PtrTy();
  526. CharUnits VTablePtrAlign =
  527. CGF.CGM.getDynamicOffsetAlignment(ThisAddr.getAlignment(), RD,
  528. CGF.getPointerAlign());
  529. llvm::Value *VTable = CGF.GetVTablePtr(
  530. Address(This, ThisAddr.getElementType(), VTablePtrAlign), VTableTy, RD);
  531. // Apply the offset.
  532. // On ARM64, to reserve extra space in virtual member function pointers,
  533. // we only pay attention to the low 32 bits of the offset.
  534. llvm::Value *VTableOffset = FnAsInt;
  535. if (!UseARMMethodPtrABI)
  536. VTableOffset = Builder.CreateSub(VTableOffset, ptrdiff_1);
  537. if (Use32BitVTableOffsetABI) {
  538. VTableOffset = Builder.CreateTrunc(VTableOffset, CGF.Int32Ty);
  539. VTableOffset = Builder.CreateZExt(VTableOffset, CGM.PtrDiffTy);
  540. }
  541. // Check the address of the function pointer if CFI on member function
  542. // pointers is enabled.
  543. llvm::Constant *CheckSourceLocation;
  544. llvm::Constant *CheckTypeDesc;
  545. bool ShouldEmitCFICheck = CGF.SanOpts.has(SanitizerKind::CFIMFCall) &&
  546. CGM.HasHiddenLTOVisibility(RD);
  547. bool ShouldEmitVFEInfo = CGM.getCodeGenOpts().VirtualFunctionElimination &&
  548. CGM.HasHiddenLTOVisibility(RD);
  549. bool ShouldEmitWPDInfo =
  550. CGM.getCodeGenOpts().WholeProgramVTables &&
  551. // Don't insert type tests if we are forcing public std visibility.
  552. !CGM.HasLTOVisibilityPublicStd(RD);
  553. llvm::Value *VirtualFn = nullptr;
  554. {
  555. CodeGenFunction::SanitizerScope SanScope(&CGF);
  556. llvm::Value *TypeId = nullptr;
  557. llvm::Value *CheckResult = nullptr;
  558. if (ShouldEmitCFICheck || ShouldEmitVFEInfo || ShouldEmitWPDInfo) {
  559. // If doing CFI, VFE or WPD, we will need the metadata node to check
  560. // against.
  561. llvm::Metadata *MD =
  562. CGM.CreateMetadataIdentifierForVirtualMemPtrType(QualType(MPT, 0));
  563. TypeId = llvm::MetadataAsValue::get(CGF.getLLVMContext(), MD);
  564. }
  565. if (ShouldEmitVFEInfo) {
  566. llvm::Value *VFPAddr =
  567. Builder.CreateGEP(CGF.Int8Ty, VTable, VTableOffset);
  568. // If doing VFE, load from the vtable with a type.checked.load intrinsic
  569. // call. Note that we use the GEP to calculate the address to load from
  570. // and pass 0 as the offset to the intrinsic. This is because every
  571. // vtable slot of the correct type is marked with matching metadata, and
  572. // we know that the load must be from one of these slots.
  573. llvm::Value *CheckedLoad = Builder.CreateCall(
  574. CGM.getIntrinsic(llvm::Intrinsic::type_checked_load),
  575. {VFPAddr, llvm::ConstantInt::get(CGM.Int32Ty, 0), TypeId});
  576. CheckResult = Builder.CreateExtractValue(CheckedLoad, 1);
  577. VirtualFn = Builder.CreateExtractValue(CheckedLoad, 0);
  578. VirtualFn = Builder.CreateBitCast(VirtualFn, FTy->getPointerTo(),
  579. "memptr.virtualfn");
  580. } else {
  581. // When not doing VFE, emit a normal load, as it allows more
  582. // optimisations than type.checked.load.
  583. if (ShouldEmitCFICheck || ShouldEmitWPDInfo) {
  584. llvm::Value *VFPAddr =
  585. Builder.CreateGEP(CGF.Int8Ty, VTable, VTableOffset);
  586. CheckResult = Builder.CreateCall(
  587. CGM.getIntrinsic(llvm::Intrinsic::type_test),
  588. {Builder.CreateBitCast(VFPAddr, CGF.Int8PtrTy), TypeId});
  589. }
  590. if (CGM.getItaniumVTableContext().isRelativeLayout()) {
  591. VirtualFn = CGF.Builder.CreateCall(
  592. CGM.getIntrinsic(llvm::Intrinsic::load_relative,
  593. {VTableOffset->getType()}),
  594. {VTable, VTableOffset});
  595. VirtualFn = CGF.Builder.CreateBitCast(VirtualFn, FTy->getPointerTo());
  596. } else {
  597. llvm::Value *VFPAddr =
  598. CGF.Builder.CreateGEP(CGF.Int8Ty, VTable, VTableOffset);
  599. VFPAddr = CGF.Builder.CreateBitCast(
  600. VFPAddr, FTy->getPointerTo()->getPointerTo());
  601. VirtualFn = CGF.Builder.CreateAlignedLoad(
  602. FTy->getPointerTo(), VFPAddr, CGF.getPointerAlign(),
  603. "memptr.virtualfn");
  604. }
  605. }
  606. assert(VirtualFn && "Virtual fuction pointer not created!");
  607. assert((!ShouldEmitCFICheck || !ShouldEmitVFEInfo || !ShouldEmitWPDInfo ||
  608. CheckResult) &&
  609. "Check result required but not created!");
  610. if (ShouldEmitCFICheck) {
  611. // If doing CFI, emit the check.
  612. CheckSourceLocation = CGF.EmitCheckSourceLocation(E->getBeginLoc());
  613. CheckTypeDesc = CGF.EmitCheckTypeDescriptor(QualType(MPT, 0));
  614. llvm::Constant *StaticData[] = {
  615. llvm::ConstantInt::get(CGF.Int8Ty, CodeGenFunction::CFITCK_VMFCall),
  616. CheckSourceLocation,
  617. CheckTypeDesc,
  618. };
  619. if (CGM.getCodeGenOpts().SanitizeTrap.has(SanitizerKind::CFIMFCall)) {
  620. CGF.EmitTrapCheck(CheckResult, SanitizerHandler::CFICheckFail);
  621. } else {
  622. llvm::Value *AllVtables = llvm::MetadataAsValue::get(
  623. CGM.getLLVMContext(),
  624. llvm::MDString::get(CGM.getLLVMContext(), "all-vtables"));
  625. llvm::Value *ValidVtable = Builder.CreateCall(
  626. CGM.getIntrinsic(llvm::Intrinsic::type_test), {VTable, AllVtables});
  627. CGF.EmitCheck(std::make_pair(CheckResult, SanitizerKind::CFIMFCall),
  628. SanitizerHandler::CFICheckFail, StaticData,
  629. {VTable, ValidVtable});
  630. }
  631. FnVirtual = Builder.GetInsertBlock();
  632. }
  633. } // End of sanitizer scope
  634. CGF.EmitBranch(FnEnd);
  635. // In the non-virtual path, the function pointer is actually a
  636. // function pointer.
  637. CGF.EmitBlock(FnNonVirtual);
  638. llvm::Value *NonVirtualFn =
  639. Builder.CreateIntToPtr(FnAsInt, FTy->getPointerTo(), "memptr.nonvirtualfn");
  640. // Check the function pointer if CFI on member function pointers is enabled.
  641. if (ShouldEmitCFICheck) {
  642. CXXRecordDecl *RD = MPT->getClass()->getAsCXXRecordDecl();
  643. if (RD->hasDefinition()) {
  644. CodeGenFunction::SanitizerScope SanScope(&CGF);
  645. llvm::Constant *StaticData[] = {
  646. llvm::ConstantInt::get(CGF.Int8Ty, CodeGenFunction::CFITCK_NVMFCall),
  647. CheckSourceLocation,
  648. CheckTypeDesc,
  649. };
  650. llvm::Value *Bit = Builder.getFalse();
  651. llvm::Value *CastedNonVirtualFn =
  652. Builder.CreateBitCast(NonVirtualFn, CGF.Int8PtrTy);
  653. for (const CXXRecordDecl *Base : CGM.getMostBaseClasses(RD)) {
  654. llvm::Metadata *MD = CGM.CreateMetadataIdentifierForType(
  655. getContext().getMemberPointerType(
  656. MPT->getPointeeType(),
  657. getContext().getRecordType(Base).getTypePtr()));
  658. llvm::Value *TypeId =
  659. llvm::MetadataAsValue::get(CGF.getLLVMContext(), MD);
  660. llvm::Value *TypeTest =
  661. Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::type_test),
  662. {CastedNonVirtualFn, TypeId});
  663. Bit = Builder.CreateOr(Bit, TypeTest);
  664. }
  665. CGF.EmitCheck(std::make_pair(Bit, SanitizerKind::CFIMFCall),
  666. SanitizerHandler::CFICheckFail, StaticData,
  667. {CastedNonVirtualFn, llvm::UndefValue::get(CGF.IntPtrTy)});
  668. FnNonVirtual = Builder.GetInsertBlock();
  669. }
  670. }
  671. // We're done.
  672. CGF.EmitBlock(FnEnd);
  673. llvm::PHINode *CalleePtr = Builder.CreatePHI(FTy->getPointerTo(), 2);
  674. CalleePtr->addIncoming(VirtualFn, FnVirtual);
  675. CalleePtr->addIncoming(NonVirtualFn, FnNonVirtual);
  676. CGCallee Callee(FPT, CalleePtr);
  677. return Callee;
  678. }
  679. /// Compute an l-value by applying the given pointer-to-member to a
  680. /// base object.
  681. llvm::Value *ItaniumCXXABI::EmitMemberDataPointerAddress(
  682. CodeGenFunction &CGF, const Expr *E, Address Base, llvm::Value *MemPtr,
  683. const MemberPointerType *MPT) {
  684. assert(MemPtr->getType() == CGM.PtrDiffTy);
  685. CGBuilderTy &Builder = CGF.Builder;
  686. // Cast to char*.
  687. Base = Builder.CreateElementBitCast(Base, CGF.Int8Ty);
  688. // Apply the offset, which we assume is non-null.
  689. llvm::Value *Addr = Builder.CreateInBoundsGEP(
  690. Base.getElementType(), Base.getPointer(), MemPtr, "memptr.offset");
  691. // Cast the address to the appropriate pointer type, adopting the
  692. // address space of the base pointer.
  693. llvm::Type *PType = CGF.ConvertTypeForMem(MPT->getPointeeType())
  694. ->getPointerTo(Base.getAddressSpace());
  695. return Builder.CreateBitCast(Addr, PType);
  696. }
  697. /// Perform a bitcast, derived-to-base, or base-to-derived member pointer
  698. /// conversion.
  699. ///
  700. /// Bitcast conversions are always a no-op under Itanium.
  701. ///
  702. /// Obligatory offset/adjustment diagram:
  703. /// <-- offset --> <-- adjustment -->
  704. /// |--------------------------|----------------------|--------------------|
  705. /// ^Derived address point ^Base address point ^Member address point
  706. ///
  707. /// So when converting a base member pointer to a derived member pointer,
  708. /// we add the offset to the adjustment because the address point has
  709. /// decreased; and conversely, when converting a derived MP to a base MP
  710. /// we subtract the offset from the adjustment because the address point
  711. /// has increased.
  712. ///
  713. /// The standard forbids (at compile time) conversion to and from
  714. /// virtual bases, which is why we don't have to consider them here.
  715. ///
  716. /// The standard forbids (at run time) casting a derived MP to a base
  717. /// MP when the derived MP does not point to a member of the base.
  718. /// This is why -1 is a reasonable choice for null data member
  719. /// pointers.
  720. llvm::Value *
  721. ItaniumCXXABI::EmitMemberPointerConversion(CodeGenFunction &CGF,
  722. const CastExpr *E,
  723. llvm::Value *src) {
  724. assert(E->getCastKind() == CK_DerivedToBaseMemberPointer ||
  725. E->getCastKind() == CK_BaseToDerivedMemberPointer ||
  726. E->getCastKind() == CK_ReinterpretMemberPointer);
  727. // Under Itanium, reinterprets don't require any additional processing.
  728. if (E->getCastKind() == CK_ReinterpretMemberPointer) return src;
  729. // Use constant emission if we can.
  730. if (isa<llvm::Constant>(src))
  731. return EmitMemberPointerConversion(E, cast<llvm::Constant>(src));
  732. llvm::Constant *adj = getMemberPointerAdjustment(E);
  733. if (!adj) return src;
  734. CGBuilderTy &Builder = CGF.Builder;
  735. bool isDerivedToBase = (E->getCastKind() == CK_DerivedToBaseMemberPointer);
  736. const MemberPointerType *destTy =
  737. E->getType()->castAs<MemberPointerType>();
  738. // For member data pointers, this is just a matter of adding the
  739. // offset if the source is non-null.
  740. if (destTy->isMemberDataPointer()) {
  741. llvm::Value *dst;
  742. if (isDerivedToBase)
  743. dst = Builder.CreateNSWSub(src, adj, "adj");
  744. else
  745. dst = Builder.CreateNSWAdd(src, adj, "adj");
  746. // Null check.
  747. llvm::Value *null = llvm::Constant::getAllOnesValue(src->getType());
  748. llvm::Value *isNull = Builder.CreateICmpEQ(src, null, "memptr.isnull");
  749. return Builder.CreateSelect(isNull, src, dst);
  750. }
  751. // The this-adjustment is left-shifted by 1 on ARM.
  752. if (UseARMMethodPtrABI) {
  753. uint64_t offset = cast<llvm::ConstantInt>(adj)->getZExtValue();
  754. offset <<= 1;
  755. adj = llvm::ConstantInt::get(adj->getType(), offset);
  756. }
  757. llvm::Value *srcAdj = Builder.CreateExtractValue(src, 1, "src.adj");
  758. llvm::Value *dstAdj;
  759. if (isDerivedToBase)
  760. dstAdj = Builder.CreateNSWSub(srcAdj, adj, "adj");
  761. else
  762. dstAdj = Builder.CreateNSWAdd(srcAdj, adj, "adj");
  763. return Builder.CreateInsertValue(src, dstAdj, 1);
  764. }
  765. llvm::Constant *
  766. ItaniumCXXABI::EmitMemberPointerConversion(const CastExpr *E,
  767. llvm::Constant *src) {
  768. assert(E->getCastKind() == CK_DerivedToBaseMemberPointer ||
  769. E->getCastKind() == CK_BaseToDerivedMemberPointer ||
  770. E->getCastKind() == CK_ReinterpretMemberPointer);
  771. // Under Itanium, reinterprets don't require any additional processing.
  772. if (E->getCastKind() == CK_ReinterpretMemberPointer) return src;
  773. // If the adjustment is trivial, we don't need to do anything.
  774. llvm::Constant *adj = getMemberPointerAdjustment(E);
  775. if (!adj) return src;
  776. bool isDerivedToBase = (E->getCastKind() == CK_DerivedToBaseMemberPointer);
  777. const MemberPointerType *destTy =
  778. E->getType()->castAs<MemberPointerType>();
  779. // For member data pointers, this is just a matter of adding the
  780. // offset if the source is non-null.
  781. if (destTy->isMemberDataPointer()) {
  782. // null maps to null.
  783. if (src->isAllOnesValue()) return src;
  784. if (isDerivedToBase)
  785. return llvm::ConstantExpr::getNSWSub(src, adj);
  786. else
  787. return llvm::ConstantExpr::getNSWAdd(src, adj);
  788. }
  789. // The this-adjustment is left-shifted by 1 on ARM.
  790. if (UseARMMethodPtrABI) {
  791. uint64_t offset = cast<llvm::ConstantInt>(adj)->getZExtValue();
  792. offset <<= 1;
  793. adj = llvm::ConstantInt::get(adj->getType(), offset);
  794. }
  795. llvm::Constant *srcAdj = llvm::ConstantExpr::getExtractValue(src, 1);
  796. llvm::Constant *dstAdj;
  797. if (isDerivedToBase)
  798. dstAdj = llvm::ConstantExpr::getNSWSub(srcAdj, adj);
  799. else
  800. dstAdj = llvm::ConstantExpr::getNSWAdd(srcAdj, adj);
  801. return llvm::ConstantExpr::getInsertValue(src, dstAdj, 1);
  802. }
  803. llvm::Constant *
  804. ItaniumCXXABI::EmitNullMemberPointer(const MemberPointerType *MPT) {
  805. // Itanium C++ ABI 2.3:
  806. // A NULL pointer is represented as -1.
  807. if (MPT->isMemberDataPointer())
  808. return llvm::ConstantInt::get(CGM.PtrDiffTy, -1ULL, /*isSigned=*/true);
  809. llvm::Constant *Zero = llvm::ConstantInt::get(CGM.PtrDiffTy, 0);
  810. llvm::Constant *Values[2] = { Zero, Zero };
  811. return llvm::ConstantStruct::getAnon(Values);
  812. }
  813. llvm::Constant *
  814. ItaniumCXXABI::EmitMemberDataPointer(const MemberPointerType *MPT,
  815. CharUnits offset) {
  816. // Itanium C++ ABI 2.3:
  817. // A pointer to data member is an offset from the base address of
  818. // the class object containing it, represented as a ptrdiff_t
  819. return llvm::ConstantInt::get(CGM.PtrDiffTy, offset.getQuantity());
  820. }
  821. llvm::Constant *
  822. ItaniumCXXABI::EmitMemberFunctionPointer(const CXXMethodDecl *MD) {
  823. return BuildMemberPointer(MD, CharUnits::Zero());
  824. }
  825. llvm::Constant *ItaniumCXXABI::BuildMemberPointer(const CXXMethodDecl *MD,
  826. CharUnits ThisAdjustment) {
  827. assert(MD->isInstance() && "Member function must not be static!");
  828. CodeGenTypes &Types = CGM.getTypes();
  829. // Get the function pointer (or index if this is a virtual function).
  830. llvm::Constant *MemPtr[2];
  831. if (MD->isVirtual()) {
  832. uint64_t Index = CGM.getItaniumVTableContext().getMethodVTableIndex(MD);
  833. uint64_t VTableOffset;
  834. if (CGM.getItaniumVTableContext().isRelativeLayout()) {
  835. // Multiply by 4-byte relative offsets.
  836. VTableOffset = Index * 4;
  837. } else {
  838. const ASTContext &Context = getContext();
  839. CharUnits PointerWidth = Context.toCharUnitsFromBits(
  840. Context.getTargetInfo().getPointerWidth(0));
  841. VTableOffset = Index * PointerWidth.getQuantity();
  842. }
  843. if (UseARMMethodPtrABI) {
  844. // ARM C++ ABI 3.2.1:
  845. // This ABI specifies that adj contains twice the this
  846. // adjustment, plus 1 if the member function is virtual. The
  847. // least significant bit of adj then makes exactly the same
  848. // discrimination as the least significant bit of ptr does for
  849. // Itanium.
  850. MemPtr[0] = llvm::ConstantInt::get(CGM.PtrDiffTy, VTableOffset);
  851. MemPtr[1] = llvm::ConstantInt::get(CGM.PtrDiffTy,
  852. 2 * ThisAdjustment.getQuantity() + 1);
  853. } else {
  854. // Itanium C++ ABI 2.3:
  855. // For a virtual function, [the pointer field] is 1 plus the
  856. // virtual table offset (in bytes) of the function,
  857. // represented as a ptrdiff_t.
  858. MemPtr[0] = llvm::ConstantInt::get(CGM.PtrDiffTy, VTableOffset + 1);
  859. MemPtr[1] = llvm::ConstantInt::get(CGM.PtrDiffTy,
  860. ThisAdjustment.getQuantity());
  861. }
  862. } else {
  863. const FunctionProtoType *FPT = MD->getType()->castAs<FunctionProtoType>();
  864. llvm::Type *Ty;
  865. // Check whether the function has a computable LLVM signature.
  866. if (Types.isFuncTypeConvertible(FPT)) {
  867. // The function has a computable LLVM signature; use the correct type.
  868. Ty = Types.GetFunctionType(Types.arrangeCXXMethodDeclaration(MD));
  869. } else {
  870. // Use an arbitrary non-function type to tell GetAddrOfFunction that the
  871. // function type is incomplete.
  872. Ty = CGM.PtrDiffTy;
  873. }
  874. llvm::Constant *addr = CGM.GetAddrOfFunction(MD, Ty);
  875. MemPtr[0] = llvm::ConstantExpr::getPtrToInt(addr, CGM.PtrDiffTy);
  876. MemPtr[1] = llvm::ConstantInt::get(CGM.PtrDiffTy,
  877. (UseARMMethodPtrABI ? 2 : 1) *
  878. ThisAdjustment.getQuantity());
  879. }
  880. return llvm::ConstantStruct::getAnon(MemPtr);
  881. }
  882. llvm::Constant *ItaniumCXXABI::EmitMemberPointer(const APValue &MP,
  883. QualType MPType) {
  884. const MemberPointerType *MPT = MPType->castAs<MemberPointerType>();
  885. const ValueDecl *MPD = MP.getMemberPointerDecl();
  886. if (!MPD)
  887. return EmitNullMemberPointer(MPT);
  888. CharUnits ThisAdjustment = getContext().getMemberPointerPathAdjustment(MP);
  889. if (const CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(MPD))
  890. return BuildMemberPointer(MD, ThisAdjustment);
  891. CharUnits FieldOffset =
  892. getContext().toCharUnitsFromBits(getContext().getFieldOffset(MPD));
  893. return EmitMemberDataPointer(MPT, ThisAdjustment + FieldOffset);
  894. }
  895. /// The comparison algorithm is pretty easy: the member pointers are
  896. /// the same if they're either bitwise identical *or* both null.
  897. ///
  898. /// ARM is different here only because null-ness is more complicated.
  899. llvm::Value *
  900. ItaniumCXXABI::EmitMemberPointerComparison(CodeGenFunction &CGF,
  901. llvm::Value *L,
  902. llvm::Value *R,
  903. const MemberPointerType *MPT,
  904. bool Inequality) {
  905. CGBuilderTy &Builder = CGF.Builder;
  906. llvm::ICmpInst::Predicate Eq;
  907. llvm::Instruction::BinaryOps And, Or;
  908. if (Inequality) {
  909. Eq = llvm::ICmpInst::ICMP_NE;
  910. And = llvm::Instruction::Or;
  911. Or = llvm::Instruction::And;
  912. } else {
  913. Eq = llvm::ICmpInst::ICMP_EQ;
  914. And = llvm::Instruction::And;
  915. Or = llvm::Instruction::Or;
  916. }
  917. // Member data pointers are easy because there's a unique null
  918. // value, so it just comes down to bitwise equality.
  919. if (MPT->isMemberDataPointer())
  920. return Builder.CreateICmp(Eq, L, R);
  921. // For member function pointers, the tautologies are more complex.
  922. // The Itanium tautology is:
  923. // (L == R) <==> (L.ptr == R.ptr && (L.ptr == 0 || L.adj == R.adj))
  924. // The ARM tautology is:
  925. // (L == R) <==> (L.ptr == R.ptr &&
  926. // (L.adj == R.adj ||
  927. // (L.ptr == 0 && ((L.adj|R.adj) & 1) == 0)))
  928. // The inequality tautologies have exactly the same structure, except
  929. // applying De Morgan's laws.
  930. llvm::Value *LPtr = Builder.CreateExtractValue(L, 0, "lhs.memptr.ptr");
  931. llvm::Value *RPtr = Builder.CreateExtractValue(R, 0, "rhs.memptr.ptr");
  932. // This condition tests whether L.ptr == R.ptr. This must always be
  933. // true for equality to hold.
  934. llvm::Value *PtrEq = Builder.CreateICmp(Eq, LPtr, RPtr, "cmp.ptr");
  935. // This condition, together with the assumption that L.ptr == R.ptr,
  936. // tests whether the pointers are both null. ARM imposes an extra
  937. // condition.
  938. llvm::Value *Zero = llvm::Constant::getNullValue(LPtr->getType());
  939. llvm::Value *EqZero = Builder.CreateICmp(Eq, LPtr, Zero, "cmp.ptr.null");
  940. // This condition tests whether L.adj == R.adj. If this isn't
  941. // true, the pointers are unequal unless they're both null.
  942. llvm::Value *LAdj = Builder.CreateExtractValue(L, 1, "lhs.memptr.adj");
  943. llvm::Value *RAdj = Builder.CreateExtractValue(R, 1, "rhs.memptr.adj");
  944. llvm::Value *AdjEq = Builder.CreateICmp(Eq, LAdj, RAdj, "cmp.adj");
  945. // Null member function pointers on ARM clear the low bit of Adj,
  946. // so the zero condition has to check that neither low bit is set.
  947. if (UseARMMethodPtrABI) {
  948. llvm::Value *One = llvm::ConstantInt::get(LPtr->getType(), 1);
  949. // Compute (l.adj | r.adj) & 1 and test it against zero.
  950. llvm::Value *OrAdj = Builder.CreateOr(LAdj, RAdj, "or.adj");
  951. llvm::Value *OrAdjAnd1 = Builder.CreateAnd(OrAdj, One);
  952. llvm::Value *OrAdjAnd1EqZero = Builder.CreateICmp(Eq, OrAdjAnd1, Zero,
  953. "cmp.or.adj");
  954. EqZero = Builder.CreateBinOp(And, EqZero, OrAdjAnd1EqZero);
  955. }
  956. // Tie together all our conditions.
  957. llvm::Value *Result = Builder.CreateBinOp(Or, EqZero, AdjEq);
  958. Result = Builder.CreateBinOp(And, PtrEq, Result,
  959. Inequality ? "memptr.ne" : "memptr.eq");
  960. return Result;
  961. }
  962. llvm::Value *
  963. ItaniumCXXABI::EmitMemberPointerIsNotNull(CodeGenFunction &CGF,
  964. llvm::Value *MemPtr,
  965. const MemberPointerType *MPT) {
  966. CGBuilderTy &Builder = CGF.Builder;
  967. /// For member data pointers, this is just a check against -1.
  968. if (MPT->isMemberDataPointer()) {
  969. assert(MemPtr->getType() == CGM.PtrDiffTy);
  970. llvm::Value *NegativeOne =
  971. llvm::Constant::getAllOnesValue(MemPtr->getType());
  972. return Builder.CreateICmpNE(MemPtr, NegativeOne, "memptr.tobool");
  973. }
  974. // In Itanium, a member function pointer is not null if 'ptr' is not null.
  975. llvm::Value *Ptr = Builder.CreateExtractValue(MemPtr, 0, "memptr.ptr");
  976. llvm::Constant *Zero = llvm::ConstantInt::get(Ptr->getType(), 0);
  977. llvm::Value *Result = Builder.CreateICmpNE(Ptr, Zero, "memptr.tobool");
  978. // On ARM, a member function pointer is also non-null if the low bit of 'adj'
  979. // (the virtual bit) is set.
  980. if (UseARMMethodPtrABI) {
  981. llvm::Constant *One = llvm::ConstantInt::get(Ptr->getType(), 1);
  982. llvm::Value *Adj = Builder.CreateExtractValue(MemPtr, 1, "memptr.adj");
  983. llvm::Value *VirtualBit = Builder.CreateAnd(Adj, One, "memptr.virtualbit");
  984. llvm::Value *IsVirtual = Builder.CreateICmpNE(VirtualBit, Zero,
  985. "memptr.isvirtual");
  986. Result = Builder.CreateOr(Result, IsVirtual);
  987. }
  988. return Result;
  989. }
  990. bool ItaniumCXXABI::classifyReturnType(CGFunctionInfo &FI) const {
  991. const CXXRecordDecl *RD = FI.getReturnType()->getAsCXXRecordDecl();
  992. if (!RD)
  993. return false;
  994. // If C++ prohibits us from making a copy, return by address.
  995. if (!RD->canPassInRegisters()) {
  996. auto Align = CGM.getContext().getTypeAlignInChars(FI.getReturnType());
  997. FI.getReturnInfo() = ABIArgInfo::getIndirect(Align, /*ByVal=*/false);
  998. return true;
  999. }
  1000. return false;
  1001. }
  1002. /// The Itanium ABI requires non-zero initialization only for data
  1003. /// member pointers, for which '0' is a valid offset.
  1004. bool ItaniumCXXABI::isZeroInitializable(const MemberPointerType *MPT) {
  1005. return MPT->isMemberFunctionPointer();
  1006. }
  1007. /// The Itanium ABI always places an offset to the complete object
  1008. /// at entry -2 in the vtable.
  1009. void ItaniumCXXABI::emitVirtualObjectDelete(CodeGenFunction &CGF,
  1010. const CXXDeleteExpr *DE,
  1011. Address Ptr,
  1012. QualType ElementType,
  1013. const CXXDestructorDecl *Dtor) {
  1014. bool UseGlobalDelete = DE->isGlobalDelete();
  1015. if (UseGlobalDelete) {
  1016. // Derive the complete-object pointer, which is what we need
  1017. // to pass to the deallocation function.
  1018. // Grab the vtable pointer as an intptr_t*.
  1019. auto *ClassDecl =
  1020. cast<CXXRecordDecl>(ElementType->castAs<RecordType>()->getDecl());
  1021. llvm::Value *VTable =
  1022. CGF.GetVTablePtr(Ptr, CGF.IntPtrTy->getPointerTo(), ClassDecl);
  1023. // Track back to entry -2 and pull out the offset there.
  1024. llvm::Value *OffsetPtr = CGF.Builder.CreateConstInBoundsGEP1_64(
  1025. CGF.IntPtrTy, VTable, -2, "complete-offset.ptr");
  1026. llvm::Value *Offset = CGF.Builder.CreateAlignedLoad(CGF.IntPtrTy, OffsetPtr, CGF.getPointerAlign());
  1027. // Apply the offset.
  1028. llvm::Value *CompletePtr =
  1029. CGF.Builder.CreateBitCast(Ptr.getPointer(), CGF.Int8PtrTy);
  1030. CompletePtr =
  1031. CGF.Builder.CreateInBoundsGEP(CGF.Int8Ty, CompletePtr, Offset);
  1032. // If we're supposed to call the global delete, make sure we do so
  1033. // even if the destructor throws.
  1034. CGF.pushCallObjectDeleteCleanup(DE->getOperatorDelete(), CompletePtr,
  1035. ElementType);
  1036. }
  1037. // FIXME: Provide a source location here even though there's no
  1038. // CXXMemberCallExpr for dtor call.
  1039. CXXDtorType DtorType = UseGlobalDelete ? Dtor_Complete : Dtor_Deleting;
  1040. EmitVirtualDestructorCall(CGF, Dtor, DtorType, Ptr, DE);
  1041. if (UseGlobalDelete)
  1042. CGF.PopCleanupBlock();
  1043. }
  1044. void ItaniumCXXABI::emitRethrow(CodeGenFunction &CGF, bool isNoReturn) {
  1045. // void __cxa_rethrow();
  1046. llvm::FunctionType *FTy =
  1047. llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
  1048. llvm::FunctionCallee Fn = CGM.CreateRuntimeFunction(FTy, "__cxa_rethrow");
  1049. if (isNoReturn)
  1050. CGF.EmitNoreturnRuntimeCallOrInvoke(Fn, None);
  1051. else
  1052. CGF.EmitRuntimeCallOrInvoke(Fn);
  1053. }
  1054. static llvm::FunctionCallee getAllocateExceptionFn(CodeGenModule &CGM) {
  1055. // void *__cxa_allocate_exception(size_t thrown_size);
  1056. llvm::FunctionType *FTy =
  1057. llvm::FunctionType::get(CGM.Int8PtrTy, CGM.SizeTy, /*isVarArg=*/false);
  1058. return CGM.CreateRuntimeFunction(FTy, "__cxa_allocate_exception");
  1059. }
  1060. static llvm::FunctionCallee getThrowFn(CodeGenModule &CGM) {
  1061. // void __cxa_throw(void *thrown_exception, std::type_info *tinfo,
  1062. // void (*dest) (void *));
  1063. llvm::Type *Args[3] = { CGM.Int8PtrTy, CGM.Int8PtrTy, CGM.Int8PtrTy };
  1064. llvm::FunctionType *FTy =
  1065. llvm::FunctionType::get(CGM.VoidTy, Args, /*isVarArg=*/false);
  1066. return CGM.CreateRuntimeFunction(FTy, "__cxa_throw");
  1067. }
  1068. void ItaniumCXXABI::emitThrow(CodeGenFunction &CGF, const CXXThrowExpr *E) {
  1069. QualType ThrowType = E->getSubExpr()->getType();
  1070. // Now allocate the exception object.
  1071. llvm::Type *SizeTy = CGF.ConvertType(getContext().getSizeType());
  1072. uint64_t TypeSize = getContext().getTypeSizeInChars(ThrowType).getQuantity();
  1073. llvm::FunctionCallee AllocExceptionFn = getAllocateExceptionFn(CGM);
  1074. llvm::CallInst *ExceptionPtr = CGF.EmitNounwindRuntimeCall(
  1075. AllocExceptionFn, llvm::ConstantInt::get(SizeTy, TypeSize), "exception");
  1076. CharUnits ExnAlign = CGF.getContext().getExnObjectAlignment();
  1077. CGF.EmitAnyExprToExn(
  1078. E->getSubExpr(), Address(ExceptionPtr, CGM.Int8Ty, ExnAlign));
  1079. // Now throw the exception.
  1080. llvm::Constant *TypeInfo = CGM.GetAddrOfRTTIDescriptor(ThrowType,
  1081. /*ForEH=*/true);
  1082. // The address of the destructor. If the exception type has a
  1083. // trivial destructor (or isn't a record), we just pass null.
  1084. llvm::Constant *Dtor = nullptr;
  1085. if (const RecordType *RecordTy = ThrowType->getAs<RecordType>()) {
  1086. CXXRecordDecl *Record = cast<CXXRecordDecl>(RecordTy->getDecl());
  1087. if (!Record->hasTrivialDestructor()) {
  1088. CXXDestructorDecl *DtorD = Record->getDestructor();
  1089. Dtor = CGM.getAddrOfCXXStructor(GlobalDecl(DtorD, Dtor_Complete));
  1090. Dtor = llvm::ConstantExpr::getBitCast(Dtor, CGM.Int8PtrTy);
  1091. }
  1092. }
  1093. if (!Dtor) Dtor = llvm::Constant::getNullValue(CGM.Int8PtrTy);
  1094. llvm::Value *args[] = { ExceptionPtr, TypeInfo, Dtor };
  1095. CGF.EmitNoreturnRuntimeCallOrInvoke(getThrowFn(CGM), args);
  1096. }
  1097. static llvm::FunctionCallee getItaniumDynamicCastFn(CodeGenFunction &CGF) {
  1098. // void *__dynamic_cast(const void *sub,
  1099. // const abi::__class_type_info *src,
  1100. // const abi::__class_type_info *dst,
  1101. // std::ptrdiff_t src2dst_offset);
  1102. llvm::Type *Int8PtrTy = CGF.Int8PtrTy;
  1103. llvm::Type *PtrDiffTy =
  1104. CGF.ConvertType(CGF.getContext().getPointerDiffType());
  1105. llvm::Type *Args[4] = { Int8PtrTy, Int8PtrTy, Int8PtrTy, PtrDiffTy };
  1106. llvm::FunctionType *FTy = llvm::FunctionType::get(Int8PtrTy, Args, false);
  1107. // Mark the function as nounwind readonly.
  1108. llvm::Attribute::AttrKind FuncAttrs[] = { llvm::Attribute::NoUnwind,
  1109. llvm::Attribute::ReadOnly };
  1110. llvm::AttributeList Attrs = llvm::AttributeList::get(
  1111. CGF.getLLVMContext(), llvm::AttributeList::FunctionIndex, FuncAttrs);
  1112. return CGF.CGM.CreateRuntimeFunction(FTy, "__dynamic_cast", Attrs);
  1113. }
  1114. static llvm::FunctionCallee getBadCastFn(CodeGenFunction &CGF) {
  1115. // void __cxa_bad_cast();
  1116. llvm::FunctionType *FTy = llvm::FunctionType::get(CGF.VoidTy, false);
  1117. return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_bad_cast");
  1118. }
  1119. /// Compute the src2dst_offset hint as described in the
  1120. /// Itanium C++ ABI [2.9.7]
  1121. static CharUnits computeOffsetHint(ASTContext &Context,
  1122. const CXXRecordDecl *Src,
  1123. const CXXRecordDecl *Dst) {
  1124. CXXBasePaths Paths(/*FindAmbiguities=*/true, /*RecordPaths=*/true,
  1125. /*DetectVirtual=*/false);
  1126. // If Dst is not derived from Src we can skip the whole computation below and
  1127. // return that Src is not a public base of Dst. Record all inheritance paths.
  1128. if (!Dst->isDerivedFrom(Src, Paths))
  1129. return CharUnits::fromQuantity(-2ULL);
  1130. unsigned NumPublicPaths = 0;
  1131. CharUnits Offset;
  1132. // Now walk all possible inheritance paths.
  1133. for (const CXXBasePath &Path : Paths) {
  1134. if (Path.Access != AS_public) // Ignore non-public inheritance.
  1135. continue;
  1136. ++NumPublicPaths;
  1137. for (const CXXBasePathElement &PathElement : Path) {
  1138. // If the path contains a virtual base class we can't give any hint.
  1139. // -1: no hint.
  1140. if (PathElement.Base->isVirtual())
  1141. return CharUnits::fromQuantity(-1ULL);
  1142. if (NumPublicPaths > 1) // Won't use offsets, skip computation.
  1143. continue;
  1144. // Accumulate the base class offsets.
  1145. const ASTRecordLayout &L = Context.getASTRecordLayout(PathElement.Class);
  1146. Offset += L.getBaseClassOffset(
  1147. PathElement.Base->getType()->getAsCXXRecordDecl());
  1148. }
  1149. }
  1150. // -2: Src is not a public base of Dst.
  1151. if (NumPublicPaths == 0)
  1152. return CharUnits::fromQuantity(-2ULL);
  1153. // -3: Src is a multiple public base type but never a virtual base type.
  1154. if (NumPublicPaths > 1)
  1155. return CharUnits::fromQuantity(-3ULL);
  1156. // Otherwise, the Src type is a unique public nonvirtual base type of Dst.
  1157. // Return the offset of Src from the origin of Dst.
  1158. return Offset;
  1159. }
  1160. static llvm::FunctionCallee getBadTypeidFn(CodeGenFunction &CGF) {
  1161. // void __cxa_bad_typeid();
  1162. llvm::FunctionType *FTy = llvm::FunctionType::get(CGF.VoidTy, false);
  1163. return CGF.CGM.CreateRuntimeFunction(FTy, "__cxa_bad_typeid");
  1164. }
  1165. bool ItaniumCXXABI::shouldTypeidBeNullChecked(bool IsDeref,
  1166. QualType SrcRecordTy) {
  1167. return IsDeref;
  1168. }
  1169. void ItaniumCXXABI::EmitBadTypeidCall(CodeGenFunction &CGF) {
  1170. llvm::FunctionCallee Fn = getBadTypeidFn(CGF);
  1171. llvm::CallBase *Call = CGF.EmitRuntimeCallOrInvoke(Fn);
  1172. Call->setDoesNotReturn();
  1173. CGF.Builder.CreateUnreachable();
  1174. }
  1175. llvm::Value *ItaniumCXXABI::EmitTypeid(CodeGenFunction &CGF,
  1176. QualType SrcRecordTy,
  1177. Address ThisPtr,
  1178. llvm::Type *StdTypeInfoPtrTy) {
  1179. auto *ClassDecl =
  1180. cast<CXXRecordDecl>(SrcRecordTy->castAs<RecordType>()->getDecl());
  1181. llvm::Value *Value =
  1182. CGF.GetVTablePtr(ThisPtr, StdTypeInfoPtrTy->getPointerTo(), ClassDecl);
  1183. if (CGM.getItaniumVTableContext().isRelativeLayout()) {
  1184. // Load the type info.
  1185. Value = CGF.Builder.CreateBitCast(Value, CGM.Int8PtrTy);
  1186. Value = CGF.Builder.CreateCall(
  1187. CGM.getIntrinsic(llvm::Intrinsic::load_relative, {CGM.Int32Ty}),
  1188. {Value, llvm::ConstantInt::get(CGM.Int32Ty, -4)});
  1189. // Setup to dereference again since this is a proxy we accessed.
  1190. Value = CGF.Builder.CreateBitCast(Value, StdTypeInfoPtrTy->getPointerTo());
  1191. } else {
  1192. // Load the type info.
  1193. Value =
  1194. CGF.Builder.CreateConstInBoundsGEP1_64(StdTypeInfoPtrTy, Value, -1ULL);
  1195. }
  1196. return CGF.Builder.CreateAlignedLoad(StdTypeInfoPtrTy, Value,
  1197. CGF.getPointerAlign());
  1198. }
  1199. bool ItaniumCXXABI::shouldDynamicCastCallBeNullChecked(bool SrcIsPtr,
  1200. QualType SrcRecordTy) {
  1201. return SrcIsPtr;
  1202. }
  1203. llvm::Value *ItaniumCXXABI::EmitDynamicCastCall(
  1204. CodeGenFunction &CGF, Address ThisAddr, QualType SrcRecordTy,
  1205. QualType DestTy, QualType DestRecordTy, llvm::BasicBlock *CastEnd) {
  1206. llvm::Type *PtrDiffLTy =
  1207. CGF.ConvertType(CGF.getContext().getPointerDiffType());
  1208. llvm::Type *DestLTy = CGF.ConvertType(DestTy);
  1209. llvm::Value *SrcRTTI =
  1210. CGF.CGM.GetAddrOfRTTIDescriptor(SrcRecordTy.getUnqualifiedType());
  1211. llvm::Value *DestRTTI =
  1212. CGF.CGM.GetAddrOfRTTIDescriptor(DestRecordTy.getUnqualifiedType());
  1213. // Compute the offset hint.
  1214. const CXXRecordDecl *SrcDecl = SrcRecordTy->getAsCXXRecordDecl();
  1215. const CXXRecordDecl *DestDecl = DestRecordTy->getAsCXXRecordDecl();
  1216. llvm::Value *OffsetHint = llvm::ConstantInt::get(
  1217. PtrDiffLTy,
  1218. computeOffsetHint(CGF.getContext(), SrcDecl, DestDecl).getQuantity());
  1219. // Emit the call to __dynamic_cast.
  1220. llvm::Value *Value = ThisAddr.getPointer();
  1221. Value = CGF.EmitCastToVoidPtr(Value);
  1222. llvm::Value *args[] = {Value, SrcRTTI, DestRTTI, OffsetHint};
  1223. Value = CGF.EmitNounwindRuntimeCall(getItaniumDynamicCastFn(CGF), args);
  1224. Value = CGF.Builder.CreateBitCast(Value, DestLTy);
  1225. /// C++ [expr.dynamic.cast]p9:
  1226. /// A failed cast to reference type throws std::bad_cast
  1227. if (DestTy->isReferenceType()) {
  1228. llvm::BasicBlock *BadCastBlock =
  1229. CGF.createBasicBlock("dynamic_cast.bad_cast");
  1230. llvm::Value *IsNull = CGF.Builder.CreateIsNull(Value);
  1231. CGF.Builder.CreateCondBr(IsNull, BadCastBlock, CastEnd);
  1232. CGF.EmitBlock(BadCastBlock);
  1233. EmitBadCastCall(CGF);
  1234. }
  1235. return Value;
  1236. }
  1237. llvm::Value *ItaniumCXXABI::EmitDynamicCastToVoid(CodeGenFunction &CGF,
  1238. Address ThisAddr,
  1239. QualType SrcRecordTy,
  1240. QualType DestTy) {
  1241. llvm::Type *DestLTy = CGF.ConvertType(DestTy);
  1242. auto *ClassDecl =
  1243. cast<CXXRecordDecl>(SrcRecordTy->castAs<RecordType>()->getDecl());
  1244. llvm::Value *OffsetToTop;
  1245. if (CGM.getItaniumVTableContext().isRelativeLayout()) {
  1246. // Get the vtable pointer.
  1247. llvm::Value *VTable =
  1248. CGF.GetVTablePtr(ThisAddr, CGM.Int32Ty->getPointerTo(), ClassDecl);
  1249. // Get the offset-to-top from the vtable.
  1250. OffsetToTop =
  1251. CGF.Builder.CreateConstInBoundsGEP1_32(CGM.Int32Ty, VTable, -2U);
  1252. OffsetToTop = CGF.Builder.CreateAlignedLoad(
  1253. CGM.Int32Ty, OffsetToTop, CharUnits::fromQuantity(4), "offset.to.top");
  1254. } else {
  1255. llvm::Type *PtrDiffLTy =
  1256. CGF.ConvertType(CGF.getContext().getPointerDiffType());
  1257. // Get the vtable pointer.
  1258. llvm::Value *VTable =
  1259. CGF.GetVTablePtr(ThisAddr, PtrDiffLTy->getPointerTo(), ClassDecl);
  1260. // Get the offset-to-top from the vtable.
  1261. OffsetToTop =
  1262. CGF.Builder.CreateConstInBoundsGEP1_64(PtrDiffLTy, VTable, -2ULL);
  1263. OffsetToTop = CGF.Builder.CreateAlignedLoad(
  1264. PtrDiffLTy, OffsetToTop, CGF.getPointerAlign(), "offset.to.top");
  1265. }
  1266. // Finally, add the offset to the pointer.
  1267. llvm::Value *Value = ThisAddr.getPointer();
  1268. Value = CGF.EmitCastToVoidPtr(Value);
  1269. Value = CGF.Builder.CreateInBoundsGEP(CGF.Int8Ty, Value, OffsetToTop);
  1270. return CGF.Builder.CreateBitCast(Value, DestLTy);
  1271. }
  1272. bool ItaniumCXXABI::EmitBadCastCall(CodeGenFunction &CGF) {
  1273. llvm::FunctionCallee Fn = getBadCastFn(CGF);
  1274. llvm::CallBase *Call = CGF.EmitRuntimeCallOrInvoke(Fn);
  1275. Call->setDoesNotReturn();
  1276. CGF.Builder.CreateUnreachable();
  1277. return true;
  1278. }
  1279. llvm::Value *
  1280. ItaniumCXXABI::GetVirtualBaseClassOffset(CodeGenFunction &CGF,
  1281. Address This,
  1282. const CXXRecordDecl *ClassDecl,
  1283. const CXXRecordDecl *BaseClassDecl) {
  1284. llvm::Value *VTablePtr = CGF.GetVTablePtr(This, CGM.Int8PtrTy, ClassDecl);
  1285. CharUnits VBaseOffsetOffset =
  1286. CGM.getItaniumVTableContext().getVirtualBaseOffsetOffset(ClassDecl,
  1287. BaseClassDecl);
  1288. llvm::Value *VBaseOffsetPtr =
  1289. CGF.Builder.CreateConstGEP1_64(
  1290. CGF.Int8Ty, VTablePtr, VBaseOffsetOffset.getQuantity(),
  1291. "vbase.offset.ptr");
  1292. llvm::Value *VBaseOffset;
  1293. if (CGM.getItaniumVTableContext().isRelativeLayout()) {
  1294. VBaseOffsetPtr =
  1295. CGF.Builder.CreateBitCast(VBaseOffsetPtr, CGF.Int32Ty->getPointerTo());
  1296. VBaseOffset = CGF.Builder.CreateAlignedLoad(
  1297. CGF.Int32Ty, VBaseOffsetPtr, CharUnits::fromQuantity(4),
  1298. "vbase.offset");
  1299. } else {
  1300. VBaseOffsetPtr = CGF.Builder.CreateBitCast(VBaseOffsetPtr,
  1301. CGM.PtrDiffTy->getPointerTo());
  1302. VBaseOffset = CGF.Builder.CreateAlignedLoad(
  1303. CGM.PtrDiffTy, VBaseOffsetPtr, CGF.getPointerAlign(), "vbase.offset");
  1304. }
  1305. return VBaseOffset;
  1306. }
  1307. void ItaniumCXXABI::EmitCXXConstructors(const CXXConstructorDecl *D) {
  1308. // Just make sure we're in sync with TargetCXXABI.
  1309. assert(CGM.getTarget().getCXXABI().hasConstructorVariants());
  1310. // The constructor used for constructing this as a base class;
  1311. // ignores virtual bases.
  1312. CGM.EmitGlobal(GlobalDecl(D, Ctor_Base));
  1313. // The constructor used for constructing this as a complete class;
  1314. // constructs the virtual bases, then calls the base constructor.
  1315. if (!D->getParent()->isAbstract()) {
  1316. // We don't need to emit the complete ctor if the class is abstract.
  1317. CGM.EmitGlobal(GlobalDecl(D, Ctor_Complete));
  1318. }
  1319. }
  1320. CGCXXABI::AddedStructorArgCounts
  1321. ItaniumCXXABI::buildStructorSignature(GlobalDecl GD,
  1322. SmallVectorImpl<CanQualType> &ArgTys) {
  1323. ASTContext &Context = getContext();
  1324. // All parameters are already in place except VTT, which goes after 'this'.
  1325. // These are Clang types, so we don't need to worry about sret yet.
  1326. // Check if we need to add a VTT parameter (which has type void **).
  1327. if ((isa<CXXConstructorDecl>(GD.getDecl()) ? GD.getCtorType() == Ctor_Base
  1328. : GD.getDtorType() == Dtor_Base) &&
  1329. cast<CXXMethodDecl>(GD.getDecl())->getParent()->getNumVBases() != 0) {
  1330. ArgTys.insert(ArgTys.begin() + 1,
  1331. Context.getPointerType(Context.VoidPtrTy));
  1332. return AddedStructorArgCounts::prefix(1);
  1333. }
  1334. return AddedStructorArgCounts{};
  1335. }
  1336. void ItaniumCXXABI::EmitCXXDestructors(const CXXDestructorDecl *D) {
  1337. // The destructor used for destructing this as a base class; ignores
  1338. // virtual bases.
  1339. CGM.EmitGlobal(GlobalDecl(D, Dtor_Base));
  1340. // The destructor used for destructing this as a most-derived class;
  1341. // call the base destructor and then destructs any virtual bases.
  1342. CGM.EmitGlobal(GlobalDecl(D, Dtor_Complete));
  1343. // The destructor in a virtual table is always a 'deleting'
  1344. // destructor, which calls the complete destructor and then uses the
  1345. // appropriate operator delete.
  1346. if (D->isVirtual())
  1347. CGM.EmitGlobal(GlobalDecl(D, Dtor_Deleting));
  1348. }
  1349. void ItaniumCXXABI::addImplicitStructorParams(CodeGenFunction &CGF,
  1350. QualType &ResTy,
  1351. FunctionArgList &Params) {
  1352. const CXXMethodDecl *MD = cast<CXXMethodDecl>(CGF.CurGD.getDecl());
  1353. assert(isa<CXXConstructorDecl>(MD) || isa<CXXDestructorDecl>(MD));
  1354. // Check if we need a VTT parameter as well.
  1355. if (NeedsVTTParameter(CGF.CurGD)) {
  1356. ASTContext &Context = getContext();
  1357. // FIXME: avoid the fake decl
  1358. QualType T = Context.getPointerType(Context.VoidPtrTy);
  1359. auto *VTTDecl = ImplicitParamDecl::Create(
  1360. Context, /*DC=*/nullptr, MD->getLocation(), &Context.Idents.get("vtt"),
  1361. T, ImplicitParamDecl::CXXVTT);
  1362. Params.insert(Params.begin() + 1, VTTDecl);
  1363. getStructorImplicitParamDecl(CGF) = VTTDecl;
  1364. }
  1365. }
  1366. void ItaniumCXXABI::EmitInstanceFunctionProlog(CodeGenFunction &CGF) {
  1367. // Naked functions have no prolog.
  1368. if (CGF.CurFuncDecl && CGF.CurFuncDecl->hasAttr<NakedAttr>())
  1369. return;
  1370. /// Initialize the 'this' slot. In the Itanium C++ ABI, no prologue
  1371. /// adjustments are required, because they are all handled by thunks.
  1372. setCXXABIThisValue(CGF, loadIncomingCXXThis(CGF));
  1373. /// Initialize the 'vtt' slot if needed.
  1374. if (getStructorImplicitParamDecl(CGF)) {
  1375. getStructorImplicitParamValue(CGF) = CGF.Builder.CreateLoad(
  1376. CGF.GetAddrOfLocalVar(getStructorImplicitParamDecl(CGF)), "vtt");
  1377. }
  1378. /// If this is a function that the ABI specifies returns 'this', initialize
  1379. /// the return slot to 'this' at the start of the function.
  1380. ///
  1381. /// Unlike the setting of return types, this is done within the ABI
  1382. /// implementation instead of by clients of CGCXXABI because:
  1383. /// 1) getThisValue is currently protected
  1384. /// 2) in theory, an ABI could implement 'this' returns some other way;
  1385. /// HasThisReturn only specifies a contract, not the implementation
  1386. if (HasThisReturn(CGF.CurGD))
  1387. CGF.Builder.CreateStore(getThisValue(CGF), CGF.ReturnValue);
  1388. }
  1389. CGCXXABI::AddedStructorArgs ItaniumCXXABI::getImplicitConstructorArgs(
  1390. CodeGenFunction &CGF, const CXXConstructorDecl *D, CXXCtorType Type,
  1391. bool ForVirtualBase, bool Delegating) {
  1392. if (!NeedsVTTParameter(GlobalDecl(D, Type)))
  1393. return AddedStructorArgs{};
  1394. // Insert the implicit 'vtt' argument as the second argument.
  1395. llvm::Value *VTT =
  1396. CGF.GetVTTParameter(GlobalDecl(D, Type), ForVirtualBase, Delegating);
  1397. QualType VTTTy = getContext().getPointerType(getContext().VoidPtrTy);
  1398. return AddedStructorArgs::prefix({{VTT, VTTTy}});
  1399. }
  1400. llvm::Value *ItaniumCXXABI::getCXXDestructorImplicitParam(
  1401. CodeGenFunction &CGF, const CXXDestructorDecl *DD, CXXDtorType Type,
  1402. bool ForVirtualBase, bool Delegating) {
  1403. GlobalDecl GD(DD, Type);
  1404. return CGF.GetVTTParameter(GD, ForVirtualBase, Delegating);
  1405. }
  1406. void ItaniumCXXABI::EmitDestructorCall(CodeGenFunction &CGF,
  1407. const CXXDestructorDecl *DD,
  1408. CXXDtorType Type, bool ForVirtualBase,
  1409. bool Delegating, Address This,
  1410. QualType ThisTy) {
  1411. GlobalDecl GD(DD, Type);
  1412. llvm::Value *VTT =
  1413. getCXXDestructorImplicitParam(CGF, DD, Type, ForVirtualBase, Delegating);
  1414. QualType VTTTy = getContext().getPointerType(getContext().VoidPtrTy);
  1415. CGCallee Callee;
  1416. if (getContext().getLangOpts().AppleKext &&
  1417. Type != Dtor_Base && DD->isVirtual())
  1418. Callee = CGF.BuildAppleKextVirtualDestructorCall(DD, Type, DD->getParent());
  1419. else
  1420. Callee = CGCallee::forDirect(CGM.getAddrOfCXXStructor(GD), GD);
  1421. CGF.EmitCXXDestructorCall(GD, Callee, This.getPointer(), ThisTy, VTT, VTTTy,
  1422. nullptr);
  1423. }
  1424. void ItaniumCXXABI::emitVTableDefinitions(CodeGenVTables &CGVT,
  1425. const CXXRecordDecl *RD) {
  1426. llvm::GlobalVariable *VTable = getAddrOfVTable(RD, CharUnits());
  1427. if (VTable->hasInitializer())
  1428. return;
  1429. ItaniumVTableContext &VTContext = CGM.getItaniumVTableContext();
  1430. const VTableLayout &VTLayout = VTContext.getVTableLayout(RD);
  1431. llvm::GlobalVariable::LinkageTypes Linkage = CGM.getVTableLinkage(RD);
  1432. llvm::Constant *RTTI =
  1433. CGM.GetAddrOfRTTIDescriptor(CGM.getContext().getTagDeclType(RD));
  1434. // Create and set the initializer.
  1435. ConstantInitBuilder builder(CGM);
  1436. auto components = builder.beginStruct();
  1437. CGVT.createVTableInitializer(components, VTLayout, RTTI,
  1438. llvm::GlobalValue::isLocalLinkage(Linkage));
  1439. components.finishAndSetAsInitializer(VTable);
  1440. // Set the correct linkage.
  1441. VTable->setLinkage(Linkage);
  1442. if (CGM.supportsCOMDAT() && VTable->isWeakForLinker())
  1443. VTable->setComdat(CGM.getModule().getOrInsertComdat(VTable->getName()));
  1444. // Set the right visibility.
  1445. CGM.setGVProperties(VTable, RD);
  1446. // If this is the magic class __cxxabiv1::__fundamental_type_info,
  1447. // we will emit the typeinfo for the fundamental types. This is the
  1448. // same behaviour as GCC.
  1449. const DeclContext *DC = RD->getDeclContext();
  1450. if (RD->getIdentifier() &&
  1451. RD->getIdentifier()->isStr("__fundamental_type_info") &&
  1452. isa<NamespaceDecl>(DC) && cast<NamespaceDecl>(DC)->getIdentifier() &&
  1453. cast<NamespaceDecl>(DC)->getIdentifier()->isStr("__cxxabiv1") &&
  1454. DC->getParent()->isTranslationUnit())
  1455. EmitFundamentalRTTIDescriptors(RD);
  1456. // Always emit type metadata on non-available_externally definitions, and on
  1457. // available_externally definitions if we are performing whole program
  1458. // devirtualization. For WPD we need the type metadata on all vtable
  1459. // definitions to ensure we associate derived classes with base classes
  1460. // defined in headers but with a strong definition only in a shared library.
  1461. if (!VTable->isDeclarationForLinker() ||
  1462. CGM.getCodeGenOpts().WholeProgramVTables) {
  1463. CGM.EmitVTableTypeMetadata(RD, VTable, VTLayout);
  1464. // For available_externally definitions, add the vtable to
  1465. // @llvm.compiler.used so that it isn't deleted before whole program
  1466. // analysis.
  1467. if (VTable->isDeclarationForLinker()) {
  1468. assert(CGM.getCodeGenOpts().WholeProgramVTables);
  1469. CGM.addCompilerUsedGlobal(VTable);
  1470. }
  1471. }
  1472. if (VTContext.isRelativeLayout() && !VTable->isDSOLocal())
  1473. CGVT.GenerateRelativeVTableAlias(VTable, VTable->getName());
  1474. }
  1475. bool ItaniumCXXABI::isVirtualOffsetNeededForVTableField(
  1476. CodeGenFunction &CGF, CodeGenFunction::VPtr Vptr) {
  1477. if (Vptr.NearestVBase == nullptr)
  1478. return false;
  1479. return NeedsVTTParameter(CGF.CurGD);
  1480. }
  1481. llvm::Value *ItaniumCXXABI::getVTableAddressPointInStructor(
  1482. CodeGenFunction &CGF, const CXXRecordDecl *VTableClass, BaseSubobject Base,
  1483. const CXXRecordDecl *NearestVBase) {
  1484. if ((Base.getBase()->getNumVBases() || NearestVBase != nullptr) &&
  1485. NeedsVTTParameter(CGF.CurGD)) {
  1486. return getVTableAddressPointInStructorWithVTT(CGF, VTableClass, Base,
  1487. NearestVBase);
  1488. }
  1489. return getVTableAddressPoint(Base, VTableClass);
  1490. }
  1491. llvm::Constant *
  1492. ItaniumCXXABI::getVTableAddressPoint(BaseSubobject Base,
  1493. const CXXRecordDecl *VTableClass) {
  1494. llvm::GlobalValue *VTable = getAddrOfVTable(VTableClass, CharUnits());
  1495. // Find the appropriate vtable within the vtable group, and the address point
  1496. // within that vtable.
  1497. VTableLayout::AddressPointLocation AddressPoint =
  1498. CGM.getItaniumVTableContext()
  1499. .getVTableLayout(VTableClass)
  1500. .getAddressPoint(Base);
  1501. llvm::Value *Indices[] = {
  1502. llvm::ConstantInt::get(CGM.Int32Ty, 0),
  1503. llvm::ConstantInt::get(CGM.Int32Ty, AddressPoint.VTableIndex),
  1504. llvm::ConstantInt::get(CGM.Int32Ty, AddressPoint.AddressPointIndex),
  1505. };
  1506. return llvm::ConstantExpr::getGetElementPtr(VTable->getValueType(), VTable,
  1507. Indices, /*InBounds=*/true,
  1508. /*InRangeIndex=*/1);
  1509. }
  1510. // Check whether all the non-inline virtual methods for the class have the
  1511. // specified attribute.
  1512. template <typename T>
  1513. static bool CXXRecordAllNonInlineVirtualsHaveAttr(const CXXRecordDecl *RD) {
  1514. bool FoundNonInlineVirtualMethodWithAttr = false;
  1515. for (const auto *D : RD->noload_decls()) {
  1516. if (const auto *FD = dyn_cast<FunctionDecl>(D)) {
  1517. if (!FD->isVirtualAsWritten() || FD->isInlineSpecified() ||
  1518. FD->doesThisDeclarationHaveABody())
  1519. continue;
  1520. if (!D->hasAttr<T>())
  1521. return false;
  1522. FoundNonInlineVirtualMethodWithAttr = true;
  1523. }
  1524. }
  1525. // We didn't find any non-inline virtual methods missing the attribute. We
  1526. // will return true when we found at least one non-inline virtual with the
  1527. // attribute. (This lets our caller know that the attribute needs to be
  1528. // propagated up to the vtable.)
  1529. return FoundNonInlineVirtualMethodWithAttr;
  1530. }
  1531. llvm::Value *ItaniumCXXABI::getVTableAddressPointInStructorWithVTT(
  1532. CodeGenFunction &CGF, const CXXRecordDecl *VTableClass, BaseSubobject Base,
  1533. const CXXRecordDecl *NearestVBase) {
  1534. assert((Base.getBase()->getNumVBases() || NearestVBase != nullptr) &&
  1535. NeedsVTTParameter(CGF.CurGD) && "This class doesn't have VTT");
  1536. // Get the secondary vpointer index.
  1537. uint64_t VirtualPointerIndex =
  1538. CGM.getVTables().getSecondaryVirtualPointerIndex(VTableClass, Base);
  1539. /// Load the VTT.
  1540. llvm::Value *VTT = CGF.LoadCXXVTT();
  1541. if (VirtualPointerIndex)
  1542. VTT = CGF.Builder.CreateConstInBoundsGEP1_64(
  1543. CGF.VoidPtrTy, VTT, VirtualPointerIndex);
  1544. // And load the address point from the VTT.
  1545. return CGF.Builder.CreateAlignedLoad(CGF.VoidPtrTy, VTT,
  1546. CGF.getPointerAlign());
  1547. }
  1548. llvm::Constant *ItaniumCXXABI::getVTableAddressPointForConstExpr(
  1549. BaseSubobject Base, const CXXRecordDecl *VTableClass) {
  1550. return getVTableAddressPoint(Base, VTableClass);
  1551. }
  1552. llvm::GlobalVariable *ItaniumCXXABI::getAddrOfVTable(const CXXRecordDecl *RD,
  1553. CharUnits VPtrOffset) {
  1554. assert(VPtrOffset.isZero() && "Itanium ABI only supports zero vptr offsets");
  1555. llvm::GlobalVariable *&VTable = VTables[RD];
  1556. if (VTable)
  1557. return VTable;
  1558. // Queue up this vtable for possible deferred emission.
  1559. CGM.addDeferredVTable(RD);
  1560. SmallString<256> Name;
  1561. llvm::raw_svector_ostream Out(Name);
  1562. getMangleContext().mangleCXXVTable(RD, Out);
  1563. const VTableLayout &VTLayout =
  1564. CGM.getItaniumVTableContext().getVTableLayout(RD);
  1565. llvm::Type *VTableType = CGM.getVTables().getVTableType(VTLayout);
  1566. // Use pointer alignment for the vtable. Otherwise we would align them based
  1567. // on the size of the initializer which doesn't make sense as only single
  1568. // values are read.
  1569. unsigned PAlign = CGM.getItaniumVTableContext().isRelativeLayout()
  1570. ? 32
  1571. : CGM.getTarget().getPointerAlign(0);
  1572. VTable = CGM.CreateOrReplaceCXXRuntimeVariable(
  1573. Name, VTableType, llvm::GlobalValue::ExternalLinkage,
  1574. getContext().toCharUnitsFromBits(PAlign).getQuantity());
  1575. VTable->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
  1576. // In MS C++ if you have a class with virtual functions in which you are using
  1577. // selective member import/export, then all virtual functions must be exported
  1578. // unless they are inline, otherwise a link error will result. To match this
  1579. // behavior, for such classes, we dllimport the vtable if it is defined
  1580. // externally and all the non-inline virtual methods are marked dllimport, and
  1581. // we dllexport the vtable if it is defined in this TU and all the non-inline
  1582. // virtual methods are marked dllexport.
  1583. if (CGM.getTarget().hasPS4DLLImportExport()) {
  1584. if ((!RD->hasAttr<DLLImportAttr>()) && (!RD->hasAttr<DLLExportAttr>())) {
  1585. if (CGM.getVTables().isVTableExternal(RD)) {
  1586. if (CXXRecordAllNonInlineVirtualsHaveAttr<DLLImportAttr>(RD))
  1587. VTable->setDLLStorageClass(llvm::GlobalValue::DLLImportStorageClass);
  1588. } else {
  1589. if (CXXRecordAllNonInlineVirtualsHaveAttr<DLLExportAttr>(RD))
  1590. VTable->setDLLStorageClass(llvm::GlobalValue::DLLExportStorageClass);
  1591. }
  1592. }
  1593. }
  1594. CGM.setGVProperties(VTable, RD);
  1595. return VTable;
  1596. }
  1597. CGCallee ItaniumCXXABI::getVirtualFunctionPointer(CodeGenFunction &CGF,
  1598. GlobalDecl GD,
  1599. Address This,
  1600. llvm::Type *Ty,
  1601. SourceLocation Loc) {
  1602. llvm::Type *TyPtr = Ty->getPointerTo();
  1603. auto *MethodDecl = cast<CXXMethodDecl>(GD.getDecl());
  1604. llvm::Value *VTable = CGF.GetVTablePtr(
  1605. This, TyPtr->getPointerTo(), MethodDecl->getParent());
  1606. uint64_t VTableIndex = CGM.getItaniumVTableContext().getMethodVTableIndex(GD);
  1607. llvm::Value *VFunc;
  1608. if (CGF.ShouldEmitVTableTypeCheckedLoad(MethodDecl->getParent())) {
  1609. VFunc = CGF.EmitVTableTypeCheckedLoad(
  1610. MethodDecl->getParent(), VTable,
  1611. VTableIndex * CGM.getContext().getTargetInfo().getPointerWidth(0) / 8);
  1612. } else {
  1613. CGF.EmitTypeMetadataCodeForVCall(MethodDecl->getParent(), VTable, Loc);
  1614. llvm::Value *VFuncLoad;
  1615. if (CGM.getItaniumVTableContext().isRelativeLayout()) {
  1616. VTable = CGF.Builder.CreateBitCast(VTable, CGM.Int8PtrTy);
  1617. llvm::Value *Load = CGF.Builder.CreateCall(
  1618. CGM.getIntrinsic(llvm::Intrinsic::load_relative, {CGM.Int32Ty}),
  1619. {VTable, llvm::ConstantInt::get(CGM.Int32Ty, 4 * VTableIndex)});
  1620. VFuncLoad = CGF.Builder.CreateBitCast(Load, TyPtr);
  1621. } else {
  1622. VTable =
  1623. CGF.Builder.CreateBitCast(VTable, TyPtr->getPointerTo());
  1624. llvm::Value *VTableSlotPtr = CGF.Builder.CreateConstInBoundsGEP1_64(
  1625. TyPtr, VTable, VTableIndex, "vfn");
  1626. VFuncLoad =
  1627. CGF.Builder.CreateAlignedLoad(TyPtr, VTableSlotPtr,
  1628. CGF.getPointerAlign());
  1629. }
  1630. // Add !invariant.load md to virtual function load to indicate that
  1631. // function didn't change inside vtable.
  1632. // It's safe to add it without -fstrict-vtable-pointers, but it would not
  1633. // help in devirtualization because it will only matter if we will have 2
  1634. // the same virtual function loads from the same vtable load, which won't
  1635. // happen without enabled devirtualization with -fstrict-vtable-pointers.
  1636. if (CGM.getCodeGenOpts().OptimizationLevel > 0 &&
  1637. CGM.getCodeGenOpts().StrictVTablePointers) {
  1638. if (auto *VFuncLoadInstr = dyn_cast<llvm::Instruction>(VFuncLoad)) {
  1639. VFuncLoadInstr->setMetadata(
  1640. llvm::LLVMContext::MD_invariant_load,
  1641. llvm::MDNode::get(CGM.getLLVMContext(),
  1642. llvm::ArrayRef<llvm::Metadata *>()));
  1643. }
  1644. }
  1645. VFunc = VFuncLoad;
  1646. }
  1647. CGCallee Callee(GD, VFunc);
  1648. return Callee;
  1649. }
  1650. llvm::Value *ItaniumCXXABI::EmitVirtualDestructorCall(
  1651. CodeGenFunction &CGF, const CXXDestructorDecl *Dtor, CXXDtorType DtorType,
  1652. Address This, DeleteOrMemberCallExpr E) {
  1653. auto *CE = E.dyn_cast<const CXXMemberCallExpr *>();
  1654. auto *D = E.dyn_cast<const CXXDeleteExpr *>();
  1655. assert((CE != nullptr) ^ (D != nullptr));
  1656. assert(CE == nullptr || CE->arg_begin() == CE->arg_end());
  1657. assert(DtorType == Dtor_Deleting || DtorType == Dtor_Complete);
  1658. GlobalDecl GD(Dtor, DtorType);
  1659. const CGFunctionInfo *FInfo =
  1660. &CGM.getTypes().arrangeCXXStructorDeclaration(GD);
  1661. llvm::FunctionType *Ty = CGF.CGM.getTypes().GetFunctionType(*FInfo);
  1662. CGCallee Callee = CGCallee::forVirtual(CE, GD, This, Ty);
  1663. QualType ThisTy;
  1664. if (CE) {
  1665. ThisTy = CE->getObjectType();
  1666. } else {
  1667. ThisTy = D->getDestroyedType();
  1668. }
  1669. CGF.EmitCXXDestructorCall(GD, Callee, This.getPointer(), ThisTy, nullptr,
  1670. QualType(), nullptr);
  1671. return nullptr;
  1672. }
  1673. void ItaniumCXXABI::emitVirtualInheritanceTables(const CXXRecordDecl *RD) {
  1674. CodeGenVTables &VTables = CGM.getVTables();
  1675. llvm::GlobalVariable *VTT = VTables.GetAddrOfVTT(RD);
  1676. VTables.EmitVTTDefinition(VTT, CGM.getVTableLinkage(RD), RD);
  1677. }
  1678. bool ItaniumCXXABI::canSpeculativelyEmitVTableAsBaseClass(
  1679. const CXXRecordDecl *RD) const {
  1680. // We don't emit available_externally vtables if we are in -fapple-kext mode
  1681. // because kext mode does not permit devirtualization.
  1682. if (CGM.getLangOpts().AppleKext)
  1683. return false;
  1684. // If the vtable is hidden then it is not safe to emit an available_externally
  1685. // copy of vtable.
  1686. if (isVTableHidden(RD))
  1687. return false;
  1688. if (CGM.getCodeGenOpts().ForceEmitVTables)
  1689. return true;
  1690. // If we don't have any not emitted inline virtual function then we are safe
  1691. // to emit an available_externally copy of vtable.
  1692. // FIXME we can still emit a copy of the vtable if we
  1693. // can emit definition of the inline functions.
  1694. if (hasAnyUnusedVirtualInlineFunction(RD))
  1695. return false;
  1696. // For a class with virtual bases, we must also be able to speculatively
  1697. // emit the VTT, because CodeGen doesn't have separate notions of "can emit
  1698. // the vtable" and "can emit the VTT". For a base subobject, this means we
  1699. // need to be able to emit non-virtual base vtables.
  1700. if (RD->getNumVBases()) {
  1701. for (const auto &B : RD->bases()) {
  1702. auto *BRD = B.getType()->getAsCXXRecordDecl();
  1703. assert(BRD && "no class for base specifier");
  1704. if (B.isVirtual() || !BRD->isDynamicClass())
  1705. continue;
  1706. if (!canSpeculativelyEmitVTableAsBaseClass(BRD))
  1707. return false;
  1708. }
  1709. }
  1710. return true;
  1711. }
  1712. bool ItaniumCXXABI::canSpeculativelyEmitVTable(const CXXRecordDecl *RD) const {
  1713. if (!canSpeculativelyEmitVTableAsBaseClass(RD))
  1714. return false;
  1715. // For a complete-object vtable (or more specifically, for the VTT), we need
  1716. // to be able to speculatively emit the vtables of all dynamic virtual bases.
  1717. for (const auto &B : RD->vbases()) {
  1718. auto *BRD = B.getType()->getAsCXXRecordDecl();
  1719. assert(BRD && "no class for base specifier");
  1720. if (!BRD->isDynamicClass())
  1721. continue;
  1722. if (!canSpeculativelyEmitVTableAsBaseClass(BRD))
  1723. return false;
  1724. }
  1725. return true;
  1726. }
  1727. static llvm::Value *performTypeAdjustment(CodeGenFunction &CGF,
  1728. Address InitialPtr,
  1729. int64_t NonVirtualAdjustment,
  1730. int64_t VirtualAdjustment,
  1731. bool IsReturnAdjustment) {
  1732. if (!NonVirtualAdjustment && !VirtualAdjustment)
  1733. return InitialPtr.getPointer();
  1734. Address V = CGF.Builder.CreateElementBitCast(InitialPtr, CGF.Int8Ty);
  1735. // In a base-to-derived cast, the non-virtual adjustment is applied first.
  1736. if (NonVirtualAdjustment && !IsReturnAdjustment) {
  1737. V = CGF.Builder.CreateConstInBoundsByteGEP(V,
  1738. CharUnits::fromQuantity(NonVirtualAdjustment));
  1739. }
  1740. // Perform the virtual adjustment if we have one.
  1741. llvm::Value *ResultPtr;
  1742. if (VirtualAdjustment) {
  1743. Address VTablePtrPtr = CGF.Builder.CreateElementBitCast(V, CGF.Int8PtrTy);
  1744. llvm::Value *VTablePtr = CGF.Builder.CreateLoad(VTablePtrPtr);
  1745. llvm::Value *Offset;
  1746. llvm::Value *OffsetPtr = CGF.Builder.CreateConstInBoundsGEP1_64(
  1747. CGF.Int8Ty, VTablePtr, VirtualAdjustment);
  1748. if (CGF.CGM.getItaniumVTableContext().isRelativeLayout()) {
  1749. // Load the adjustment offset from the vtable as a 32-bit int.
  1750. OffsetPtr =
  1751. CGF.Builder.CreateBitCast(OffsetPtr, CGF.Int32Ty->getPointerTo());
  1752. Offset =
  1753. CGF.Builder.CreateAlignedLoad(CGF.Int32Ty, OffsetPtr,
  1754. CharUnits::fromQuantity(4));
  1755. } else {
  1756. llvm::Type *PtrDiffTy =
  1757. CGF.ConvertType(CGF.getContext().getPointerDiffType());
  1758. OffsetPtr =
  1759. CGF.Builder.CreateBitCast(OffsetPtr, PtrDiffTy->getPointerTo());
  1760. // Load the adjustment offset from the vtable.
  1761. Offset = CGF.Builder.CreateAlignedLoad(PtrDiffTy, OffsetPtr,
  1762. CGF.getPointerAlign());
  1763. }
  1764. // Adjust our pointer.
  1765. ResultPtr = CGF.Builder.CreateInBoundsGEP(
  1766. V.getElementType(), V.getPointer(), Offset);
  1767. } else {
  1768. ResultPtr = V.getPointer();
  1769. }
  1770. // In a derived-to-base conversion, the non-virtual adjustment is
  1771. // applied second.
  1772. if (NonVirtualAdjustment && IsReturnAdjustment) {
  1773. ResultPtr = CGF.Builder.CreateConstInBoundsGEP1_64(CGF.Int8Ty, ResultPtr,
  1774. NonVirtualAdjustment);
  1775. }
  1776. // Cast back to the original type.
  1777. return CGF.Builder.CreateBitCast(ResultPtr, InitialPtr.getType());
  1778. }
  1779. llvm::Value *ItaniumCXXABI::performThisAdjustment(CodeGenFunction &CGF,
  1780. Address This,
  1781. const ThisAdjustment &TA) {
  1782. return performTypeAdjustment(CGF, This, TA.NonVirtual,
  1783. TA.Virtual.Itanium.VCallOffsetOffset,
  1784. /*IsReturnAdjustment=*/false);
  1785. }
  1786. llvm::Value *
  1787. ItaniumCXXABI::performReturnAdjustment(CodeGenFunction &CGF, Address Ret,
  1788. const ReturnAdjustment &RA) {
  1789. return performTypeAdjustment(CGF, Ret, RA.NonVirtual,
  1790. RA.Virtual.Itanium.VBaseOffsetOffset,
  1791. /*IsReturnAdjustment=*/true);
  1792. }
  1793. void ARMCXXABI::EmitReturnFromThunk(CodeGenFunction &CGF,
  1794. RValue RV, QualType ResultType) {
  1795. if (!isa<CXXDestructorDecl>(CGF.CurGD.getDecl()))
  1796. return ItaniumCXXABI::EmitReturnFromThunk(CGF, RV, ResultType);
  1797. // Destructor thunks in the ARM ABI have indeterminate results.
  1798. llvm::Type *T = CGF.ReturnValue.getElementType();
  1799. RValue Undef = RValue::get(llvm::UndefValue::get(T));
  1800. return ItaniumCXXABI::EmitReturnFromThunk(CGF, Undef, ResultType);
  1801. }
  1802. /************************** Array allocation cookies **************************/
  1803. CharUnits ItaniumCXXABI::getArrayCookieSizeImpl(QualType elementType) {
  1804. // The array cookie is a size_t; pad that up to the element alignment.
  1805. // The cookie is actually right-justified in that space.
  1806. return std::max(CharUnits::fromQuantity(CGM.SizeSizeInBytes),
  1807. CGM.getContext().getPreferredTypeAlignInChars(elementType));
  1808. }
  1809. Address ItaniumCXXABI::InitializeArrayCookie(CodeGenFunction &CGF,
  1810. Address NewPtr,
  1811. llvm::Value *NumElements,
  1812. const CXXNewExpr *expr,
  1813. QualType ElementType) {
  1814. assert(requiresArrayCookie(expr));
  1815. unsigned AS = NewPtr.getAddressSpace();
  1816. ASTContext &Ctx = getContext();
  1817. CharUnits SizeSize = CGF.getSizeSize();
  1818. // The size of the cookie.
  1819. CharUnits CookieSize =
  1820. std::max(SizeSize, Ctx.getPreferredTypeAlignInChars(ElementType));
  1821. assert(CookieSize == getArrayCookieSizeImpl(ElementType));
  1822. // Compute an offset to the cookie.
  1823. Address CookiePtr = NewPtr;
  1824. CharUnits CookieOffset = CookieSize - SizeSize;
  1825. if (!CookieOffset.isZero())
  1826. CookiePtr = CGF.Builder.CreateConstInBoundsByteGEP(CookiePtr, CookieOffset);
  1827. // Write the number of elements into the appropriate slot.
  1828. Address NumElementsPtr =
  1829. CGF.Builder.CreateElementBitCast(CookiePtr, CGF.SizeTy);
  1830. llvm::Instruction *SI = CGF.Builder.CreateStore(NumElements, NumElementsPtr);
  1831. // Handle the array cookie specially in ASan.
  1832. if (CGM.getLangOpts().Sanitize.has(SanitizerKind::Address) && AS == 0 &&
  1833. (expr->getOperatorNew()->isReplaceableGlobalAllocationFunction() ||
  1834. CGM.getCodeGenOpts().SanitizeAddressPoisonCustomArrayCookie)) {
  1835. // The store to the CookiePtr does not need to be instrumented.
  1836. CGM.getSanitizerMetadata()->disableSanitizerForInstruction(SI);
  1837. llvm::FunctionType *FTy =
  1838. llvm::FunctionType::get(CGM.VoidTy, NumElementsPtr.getType(), false);
  1839. llvm::FunctionCallee F =
  1840. CGM.CreateRuntimeFunction(FTy, "__asan_poison_cxx_array_cookie");
  1841. CGF.Builder.CreateCall(F, NumElementsPtr.getPointer());
  1842. }
  1843. // Finally, compute a pointer to the actual data buffer by skipping
  1844. // over the cookie completely.
  1845. return CGF.Builder.CreateConstInBoundsByteGEP(NewPtr, CookieSize);
  1846. }
  1847. llvm::Value *ItaniumCXXABI::readArrayCookieImpl(CodeGenFunction &CGF,
  1848. Address allocPtr,
  1849. CharUnits cookieSize) {
  1850. // The element size is right-justified in the cookie.
  1851. Address numElementsPtr = allocPtr;
  1852. CharUnits numElementsOffset = cookieSize - CGF.getSizeSize();
  1853. if (!numElementsOffset.isZero())
  1854. numElementsPtr =
  1855. CGF.Builder.CreateConstInBoundsByteGEP(numElementsPtr, numElementsOffset);
  1856. unsigned AS = allocPtr.getAddressSpace();
  1857. numElementsPtr = CGF.Builder.CreateElementBitCast(numElementsPtr, CGF.SizeTy);
  1858. if (!CGM.getLangOpts().Sanitize.has(SanitizerKind::Address) || AS != 0)
  1859. return CGF.Builder.CreateLoad(numElementsPtr);
  1860. // In asan mode emit a function call instead of a regular load and let the
  1861. // run-time deal with it: if the shadow is properly poisoned return the
  1862. // cookie, otherwise return 0 to avoid an infinite loop calling DTORs.
  1863. // We can't simply ignore this load using nosanitize metadata because
  1864. // the metadata may be lost.
  1865. llvm::FunctionType *FTy =
  1866. llvm::FunctionType::get(CGF.SizeTy, CGF.SizeTy->getPointerTo(0), false);
  1867. llvm::FunctionCallee F =
  1868. CGM.CreateRuntimeFunction(FTy, "__asan_load_cxx_array_cookie");
  1869. return CGF.Builder.CreateCall(F, numElementsPtr.getPointer());
  1870. }
  1871. CharUnits ARMCXXABI::getArrayCookieSizeImpl(QualType elementType) {
  1872. // ARM says that the cookie is always:
  1873. // struct array_cookie {
  1874. // std::size_t element_size; // element_size != 0
  1875. // std::size_t element_count;
  1876. // };
  1877. // But the base ABI doesn't give anything an alignment greater than
  1878. // 8, so we can dismiss this as typical ABI-author blindness to
  1879. // actual language complexity and round up to the element alignment.
  1880. return std::max(CharUnits::fromQuantity(2 * CGM.SizeSizeInBytes),
  1881. CGM.getContext().getTypeAlignInChars(elementType));
  1882. }
  1883. Address ARMCXXABI::InitializeArrayCookie(CodeGenFunction &CGF,
  1884. Address newPtr,
  1885. llvm::Value *numElements,
  1886. const CXXNewExpr *expr,
  1887. QualType elementType) {
  1888. assert(requiresArrayCookie(expr));
  1889. // The cookie is always at the start of the buffer.
  1890. Address cookie = newPtr;
  1891. // The first element is the element size.
  1892. cookie = CGF.Builder.CreateElementBitCast(cookie, CGF.SizeTy);
  1893. llvm::Value *elementSize = llvm::ConstantInt::get(CGF.SizeTy,
  1894. getContext().getTypeSizeInChars(elementType).getQuantity());
  1895. CGF.Builder.CreateStore(elementSize, cookie);
  1896. // The second element is the element count.
  1897. cookie = CGF.Builder.CreateConstInBoundsGEP(cookie, 1);
  1898. CGF.Builder.CreateStore(numElements, cookie);
  1899. // Finally, compute a pointer to the actual data buffer by skipping
  1900. // over the cookie completely.
  1901. CharUnits cookieSize = ARMCXXABI::getArrayCookieSizeImpl(elementType);
  1902. return CGF.Builder.CreateConstInBoundsByteGEP(newPtr, cookieSize);
  1903. }
  1904. llvm::Value *ARMCXXABI::readArrayCookieImpl(CodeGenFunction &CGF,
  1905. Address allocPtr,
  1906. CharUnits cookieSize) {
  1907. // The number of elements is at offset sizeof(size_t) relative to
  1908. // the allocated pointer.
  1909. Address numElementsPtr
  1910. = CGF.Builder.CreateConstInBoundsByteGEP(allocPtr, CGF.getSizeSize());
  1911. numElementsPtr = CGF.Builder.CreateElementBitCast(numElementsPtr, CGF.SizeTy);
  1912. return CGF.Builder.CreateLoad(numElementsPtr);
  1913. }
  1914. /*********************** Static local initialization **************************/
  1915. static llvm::FunctionCallee getGuardAcquireFn(CodeGenModule &CGM,
  1916. llvm::PointerType *GuardPtrTy) {
  1917. // int __cxa_guard_acquire(__guard *guard_object);
  1918. llvm::FunctionType *FTy =
  1919. llvm::FunctionType::get(CGM.getTypes().ConvertType(CGM.getContext().IntTy),
  1920. GuardPtrTy, /*isVarArg=*/false);
  1921. return CGM.CreateRuntimeFunction(
  1922. FTy, "__cxa_guard_acquire",
  1923. llvm::AttributeList::get(CGM.getLLVMContext(),
  1924. llvm::AttributeList::FunctionIndex,
  1925. llvm::Attribute::NoUnwind));
  1926. }
  1927. static llvm::FunctionCallee getGuardReleaseFn(CodeGenModule &CGM,
  1928. llvm::PointerType *GuardPtrTy) {
  1929. // void __cxa_guard_release(__guard *guard_object);
  1930. llvm::FunctionType *FTy =
  1931. llvm::FunctionType::get(CGM.VoidTy, GuardPtrTy, /*isVarArg=*/false);
  1932. return CGM.CreateRuntimeFunction(
  1933. FTy, "__cxa_guard_release",
  1934. llvm::AttributeList::get(CGM.getLLVMContext(),
  1935. llvm::AttributeList::FunctionIndex,
  1936. llvm::Attribute::NoUnwind));
  1937. }
  1938. static llvm::FunctionCallee getGuardAbortFn(CodeGenModule &CGM,
  1939. llvm::PointerType *GuardPtrTy) {
  1940. // void __cxa_guard_abort(__guard *guard_object);
  1941. llvm::FunctionType *FTy =
  1942. llvm::FunctionType::get(CGM.VoidTy, GuardPtrTy, /*isVarArg=*/false);
  1943. return CGM.CreateRuntimeFunction(
  1944. FTy, "__cxa_guard_abort",
  1945. llvm::AttributeList::get(CGM.getLLVMContext(),
  1946. llvm::AttributeList::FunctionIndex,
  1947. llvm::Attribute::NoUnwind));
  1948. }
  1949. namespace {
  1950. struct CallGuardAbort final : EHScopeStack::Cleanup {
  1951. llvm::GlobalVariable *Guard;
  1952. CallGuardAbort(llvm::GlobalVariable *Guard) : Guard(Guard) {}
  1953. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1954. CGF.EmitNounwindRuntimeCall(getGuardAbortFn(CGF.CGM, Guard->getType()),
  1955. Guard);
  1956. }
  1957. };
  1958. }
  1959. /// The ARM code here follows the Itanium code closely enough that we
  1960. /// just special-case it at particular places.
  1961. void ItaniumCXXABI::EmitGuardedInit(CodeGenFunction &CGF,
  1962. const VarDecl &D,
  1963. llvm::GlobalVariable *var,
  1964. bool shouldPerformInit) {
  1965. CGBuilderTy &Builder = CGF.Builder;
  1966. // Inline variables that weren't instantiated from variable templates have
  1967. // partially-ordered initialization within their translation unit.
  1968. bool NonTemplateInline =
  1969. D.isInline() &&
  1970. !isTemplateInstantiation(D.getTemplateSpecializationKind());
  1971. // We only need to use thread-safe statics for local non-TLS variables and
  1972. // inline variables; other global initialization is always single-threaded
  1973. // or (through lazy dynamic loading in multiple threads) unsequenced.
  1974. bool threadsafe = getContext().getLangOpts().ThreadsafeStatics &&
  1975. (D.isLocalVarDecl() || NonTemplateInline) &&
  1976. !D.getTLSKind();
  1977. // If we have a global variable with internal linkage and thread-safe statics
  1978. // are disabled, we can just let the guard variable be of type i8.
  1979. bool useInt8GuardVariable = !threadsafe && var->hasInternalLinkage();
  1980. llvm::IntegerType *guardTy;
  1981. CharUnits guardAlignment;
  1982. if (useInt8GuardVariable) {
  1983. guardTy = CGF.Int8Ty;
  1984. guardAlignment = CharUnits::One();
  1985. } else {
  1986. // Guard variables are 64 bits in the generic ABI and size width on ARM
  1987. // (i.e. 32-bit on AArch32, 64-bit on AArch64).
  1988. if (UseARMGuardVarABI) {
  1989. guardTy = CGF.SizeTy;
  1990. guardAlignment = CGF.getSizeAlign();
  1991. } else {
  1992. guardTy = CGF.Int64Ty;
  1993. guardAlignment = CharUnits::fromQuantity(
  1994. CGM.getDataLayout().getABITypeAlignment(guardTy));
  1995. }
  1996. }
  1997. llvm::PointerType *guardPtrTy = guardTy->getPointerTo(
  1998. CGF.CGM.getDataLayout().getDefaultGlobalsAddressSpace());
  1999. // Create the guard variable if we don't already have it (as we
  2000. // might if we're double-emitting this function body).
  2001. llvm::GlobalVariable *guard = CGM.getStaticLocalDeclGuardAddress(&D);
  2002. if (!guard) {
  2003. // Mangle the name for the guard.
  2004. SmallString<256> guardName;
  2005. {
  2006. llvm::raw_svector_ostream out(guardName);
  2007. getMangleContext().mangleStaticGuardVariable(&D, out);
  2008. }
  2009. // Create the guard variable with a zero-initializer.
  2010. // Just absorb linkage and visibility from the guarded variable.
  2011. guard = new llvm::GlobalVariable(CGM.getModule(), guardTy,
  2012. false, var->getLinkage(),
  2013. llvm::ConstantInt::get(guardTy, 0),
  2014. guardName.str());
  2015. guard->setDSOLocal(var->isDSOLocal());
  2016. guard->setVisibility(var->getVisibility());
  2017. // If the variable is thread-local, so is its guard variable.
  2018. guard->setThreadLocalMode(var->getThreadLocalMode());
  2019. guard->setAlignment(guardAlignment.getAsAlign());
  2020. // The ABI says: "It is suggested that it be emitted in the same COMDAT
  2021. // group as the associated data object." In practice, this doesn't work for
  2022. // non-ELF and non-Wasm object formats, so only do it for ELF and Wasm.
  2023. llvm::Comdat *C = var->getComdat();
  2024. if (!D.isLocalVarDecl() && C &&
  2025. (CGM.getTarget().getTriple().isOSBinFormatELF() ||
  2026. CGM.getTarget().getTriple().isOSBinFormatWasm())) {
  2027. guard->setComdat(C);
  2028. } else if (CGM.supportsCOMDAT() && guard->isWeakForLinker()) {
  2029. guard->setComdat(CGM.getModule().getOrInsertComdat(guard->getName()));
  2030. }
  2031. CGM.setStaticLocalDeclGuardAddress(&D, guard);
  2032. }
  2033. Address guardAddr = Address(guard, guard->getValueType(), guardAlignment);
  2034. // Test whether the variable has completed initialization.
  2035. //
  2036. // Itanium C++ ABI 3.3.2:
  2037. // The following is pseudo-code showing how these functions can be used:
  2038. // if (obj_guard.first_byte == 0) {
  2039. // if ( __cxa_guard_acquire (&obj_guard) ) {
  2040. // try {
  2041. // ... initialize the object ...;
  2042. // } catch (...) {
  2043. // __cxa_guard_abort (&obj_guard);
  2044. // throw;
  2045. // }
  2046. // ... queue object destructor with __cxa_atexit() ...;
  2047. // __cxa_guard_release (&obj_guard);
  2048. // }
  2049. // }
  2050. // Load the first byte of the guard variable.
  2051. llvm::LoadInst *LI =
  2052. Builder.CreateLoad(Builder.CreateElementBitCast(guardAddr, CGM.Int8Ty));
  2053. // Itanium ABI:
  2054. // An implementation supporting thread-safety on multiprocessor
  2055. // systems must also guarantee that references to the initialized
  2056. // object do not occur before the load of the initialization flag.
  2057. //
  2058. // In LLVM, we do this by marking the load Acquire.
  2059. if (threadsafe)
  2060. LI->setAtomic(llvm::AtomicOrdering::Acquire);
  2061. // For ARM, we should only check the first bit, rather than the entire byte:
  2062. //
  2063. // ARM C++ ABI 3.2.3.1:
  2064. // To support the potential use of initialization guard variables
  2065. // as semaphores that are the target of ARM SWP and LDREX/STREX
  2066. // synchronizing instructions we define a static initialization
  2067. // guard variable to be a 4-byte aligned, 4-byte word with the
  2068. // following inline access protocol.
  2069. // #define INITIALIZED 1
  2070. // if ((obj_guard & INITIALIZED) != INITIALIZED) {
  2071. // if (__cxa_guard_acquire(&obj_guard))
  2072. // ...
  2073. // }
  2074. //
  2075. // and similarly for ARM64:
  2076. //
  2077. // ARM64 C++ ABI 3.2.2:
  2078. // This ABI instead only specifies the value bit 0 of the static guard
  2079. // variable; all other bits are platform defined. Bit 0 shall be 0 when the
  2080. // variable is not initialized and 1 when it is.
  2081. llvm::Value *V =
  2082. (UseARMGuardVarABI && !useInt8GuardVariable)
  2083. ? Builder.CreateAnd(LI, llvm::ConstantInt::get(CGM.Int8Ty, 1))
  2084. : LI;
  2085. llvm::Value *NeedsInit = Builder.CreateIsNull(V, "guard.uninitialized");
  2086. llvm::BasicBlock *InitCheckBlock = CGF.createBasicBlock("init.check");
  2087. llvm::BasicBlock *EndBlock = CGF.createBasicBlock("init.end");
  2088. // Check if the first byte of the guard variable is zero.
  2089. CGF.EmitCXXGuardedInitBranch(NeedsInit, InitCheckBlock, EndBlock,
  2090. CodeGenFunction::GuardKind::VariableGuard, &D);
  2091. CGF.EmitBlock(InitCheckBlock);
  2092. // Variables used when coping with thread-safe statics and exceptions.
  2093. if (threadsafe) {
  2094. // Call __cxa_guard_acquire.
  2095. llvm::Value *V
  2096. = CGF.EmitNounwindRuntimeCall(getGuardAcquireFn(CGM, guardPtrTy), guard);
  2097. llvm::BasicBlock *InitBlock = CGF.createBasicBlock("init");
  2098. Builder.CreateCondBr(Builder.CreateIsNotNull(V, "tobool"),
  2099. InitBlock, EndBlock);
  2100. // Call __cxa_guard_abort along the exceptional edge.
  2101. CGF.EHStack.pushCleanup<CallGuardAbort>(EHCleanup, guard);
  2102. CGF.EmitBlock(InitBlock);
  2103. }
  2104. // Emit the initializer and add a global destructor if appropriate.
  2105. CGF.EmitCXXGlobalVarDeclInit(D, var, shouldPerformInit);
  2106. if (threadsafe) {
  2107. // Pop the guard-abort cleanup if we pushed one.
  2108. CGF.PopCleanupBlock();
  2109. // Call __cxa_guard_release. This cannot throw.
  2110. CGF.EmitNounwindRuntimeCall(getGuardReleaseFn(CGM, guardPtrTy),
  2111. guardAddr.getPointer());
  2112. } else {
  2113. // Store 1 into the first byte of the guard variable after initialization is
  2114. // complete.
  2115. Builder.CreateStore(llvm::ConstantInt::get(CGM.Int8Ty, 1),
  2116. Builder.CreateElementBitCast(guardAddr, CGM.Int8Ty));
  2117. }
  2118. CGF.EmitBlock(EndBlock);
  2119. }
  2120. /// Register a global destructor using __cxa_atexit.
  2121. static void emitGlobalDtorWithCXAAtExit(CodeGenFunction &CGF,
  2122. llvm::FunctionCallee dtor,
  2123. llvm::Constant *addr, bool TLS) {
  2124. assert(!CGF.getTarget().getTriple().isOSAIX() &&
  2125. "unexpected call to emitGlobalDtorWithCXAAtExit");
  2126. assert((TLS || CGF.getTypes().getCodeGenOpts().CXAAtExit) &&
  2127. "__cxa_atexit is disabled");
  2128. const char *Name = "__cxa_atexit";
  2129. if (TLS) {
  2130. const llvm::Triple &T = CGF.getTarget().getTriple();
  2131. Name = T.isOSDarwin() ? "_tlv_atexit" : "__cxa_thread_atexit";
  2132. }
  2133. // We're assuming that the destructor function is something we can
  2134. // reasonably call with the default CC. Go ahead and cast it to the
  2135. // right prototype.
  2136. llvm::Type *dtorTy =
  2137. llvm::FunctionType::get(CGF.VoidTy, CGF.Int8PtrTy, false)->getPointerTo();
  2138. // Preserve address space of addr.
  2139. auto AddrAS = addr ? addr->getType()->getPointerAddressSpace() : 0;
  2140. auto AddrInt8PtrTy =
  2141. AddrAS ? CGF.Int8Ty->getPointerTo(AddrAS) : CGF.Int8PtrTy;
  2142. // Create a variable that binds the atexit to this shared object.
  2143. llvm::Constant *handle =
  2144. CGF.CGM.CreateRuntimeVariable(CGF.Int8Ty, "__dso_handle");
  2145. auto *GV = cast<llvm::GlobalValue>(handle->stripPointerCasts());
  2146. GV->setVisibility(llvm::GlobalValue::HiddenVisibility);
  2147. // extern "C" int __cxa_atexit(void (*f)(void *), void *p, void *d);
  2148. llvm::Type *paramTys[] = {dtorTy, AddrInt8PtrTy, handle->getType()};
  2149. llvm::FunctionType *atexitTy =
  2150. llvm::FunctionType::get(CGF.IntTy, paramTys, false);
  2151. // Fetch the actual function.
  2152. llvm::FunctionCallee atexit = CGF.CGM.CreateRuntimeFunction(atexitTy, Name);
  2153. if (llvm::Function *fn = dyn_cast<llvm::Function>(atexit.getCallee()))
  2154. fn->setDoesNotThrow();
  2155. if (!addr)
  2156. // addr is null when we are trying to register a dtor annotated with
  2157. // __attribute__((destructor)) in a constructor function. Using null here is
  2158. // okay because this argument is just passed back to the destructor
  2159. // function.
  2160. addr = llvm::Constant::getNullValue(CGF.Int8PtrTy);
  2161. llvm::Value *args[] = {llvm::ConstantExpr::getBitCast(
  2162. cast<llvm::Constant>(dtor.getCallee()), dtorTy),
  2163. llvm::ConstantExpr::getBitCast(addr, AddrInt8PtrTy),
  2164. handle};
  2165. CGF.EmitNounwindRuntimeCall(atexit, args);
  2166. }
  2167. static llvm::Function *createGlobalInitOrCleanupFn(CodeGen::CodeGenModule &CGM,
  2168. StringRef FnName) {
  2169. // Create a function that registers/unregisters destructors that have the same
  2170. // priority.
  2171. llvm::FunctionType *FTy = llvm::FunctionType::get(CGM.VoidTy, false);
  2172. llvm::Function *GlobalInitOrCleanupFn = CGM.CreateGlobalInitOrCleanUpFunction(
  2173. FTy, FnName, CGM.getTypes().arrangeNullaryFunction(), SourceLocation());
  2174. return GlobalInitOrCleanupFn;
  2175. }
  2176. void CodeGenModule::unregisterGlobalDtorsWithUnAtExit() {
  2177. for (const auto &I : DtorsUsingAtExit) {
  2178. int Priority = I.first;
  2179. std::string GlobalCleanupFnName =
  2180. std::string("__GLOBAL_cleanup_") + llvm::to_string(Priority);
  2181. llvm::Function *GlobalCleanupFn =
  2182. createGlobalInitOrCleanupFn(*this, GlobalCleanupFnName);
  2183. CodeGenFunction CGF(*this);
  2184. CGF.StartFunction(GlobalDecl(), getContext().VoidTy, GlobalCleanupFn,
  2185. getTypes().arrangeNullaryFunction(), FunctionArgList(),
  2186. SourceLocation(), SourceLocation());
  2187. auto AL = ApplyDebugLocation::CreateArtificial(CGF);
  2188. // Get the destructor function type, void(*)(void).
  2189. llvm::FunctionType *dtorFuncTy = llvm::FunctionType::get(CGF.VoidTy, false);
  2190. llvm::Type *dtorTy = dtorFuncTy->getPointerTo();
  2191. // Destructor functions are run/unregistered in non-ascending
  2192. // order of their priorities.
  2193. const llvm::TinyPtrVector<llvm::Function *> &Dtors = I.second;
  2194. auto itv = Dtors.rbegin();
  2195. while (itv != Dtors.rend()) {
  2196. llvm::Function *Dtor = *itv;
  2197. // We're assuming that the destructor function is something we can
  2198. // reasonably call with the correct CC. Go ahead and cast it to the
  2199. // right prototype.
  2200. llvm::Constant *dtor = llvm::ConstantExpr::getBitCast(Dtor, dtorTy);
  2201. llvm::Value *V = CGF.unregisterGlobalDtorWithUnAtExit(dtor);
  2202. llvm::Value *NeedsDestruct =
  2203. CGF.Builder.CreateIsNull(V, "needs_destruct");
  2204. llvm::BasicBlock *DestructCallBlock =
  2205. CGF.createBasicBlock("destruct.call");
  2206. llvm::BasicBlock *EndBlock = CGF.createBasicBlock(
  2207. (itv + 1) != Dtors.rend() ? "unatexit.call" : "destruct.end");
  2208. // Check if unatexit returns a value of 0. If it does, jump to
  2209. // DestructCallBlock, otherwise jump to EndBlock directly.
  2210. CGF.Builder.CreateCondBr(NeedsDestruct, DestructCallBlock, EndBlock);
  2211. CGF.EmitBlock(DestructCallBlock);
  2212. // Emit the call to casted Dtor.
  2213. llvm::CallInst *CI = CGF.Builder.CreateCall(dtorFuncTy, dtor);
  2214. // Make sure the call and the callee agree on calling convention.
  2215. CI->setCallingConv(Dtor->getCallingConv());
  2216. CGF.EmitBlock(EndBlock);
  2217. itv++;
  2218. }
  2219. CGF.FinishFunction();
  2220. AddGlobalDtor(GlobalCleanupFn, Priority);
  2221. }
  2222. }
  2223. void CodeGenModule::registerGlobalDtorsWithAtExit() {
  2224. for (const auto &I : DtorsUsingAtExit) {
  2225. int Priority = I.first;
  2226. std::string GlobalInitFnName =
  2227. std::string("__GLOBAL_init_") + llvm::to_string(Priority);
  2228. llvm::Function *GlobalInitFn =
  2229. createGlobalInitOrCleanupFn(*this, GlobalInitFnName);
  2230. CodeGenFunction CGF(*this);
  2231. CGF.StartFunction(GlobalDecl(), getContext().VoidTy, GlobalInitFn,
  2232. getTypes().arrangeNullaryFunction(), FunctionArgList(),
  2233. SourceLocation(), SourceLocation());
  2234. auto AL = ApplyDebugLocation::CreateArtificial(CGF);
  2235. // Since constructor functions are run in non-descending order of their
  2236. // priorities, destructors are registered in non-descending order of their
  2237. // priorities, and since destructor functions are run in the reverse order
  2238. // of their registration, destructor functions are run in non-ascending
  2239. // order of their priorities.
  2240. const llvm::TinyPtrVector<llvm::Function *> &Dtors = I.second;
  2241. for (auto *Dtor : Dtors) {
  2242. // Register the destructor function calling __cxa_atexit if it is
  2243. // available. Otherwise fall back on calling atexit.
  2244. if (getCodeGenOpts().CXAAtExit) {
  2245. emitGlobalDtorWithCXAAtExit(CGF, Dtor, nullptr, false);
  2246. } else {
  2247. // Get the destructor function type, void(*)(void).
  2248. llvm::Type *dtorTy =
  2249. llvm::FunctionType::get(CGF.VoidTy, false)->getPointerTo();
  2250. // We're assuming that the destructor function is something we can
  2251. // reasonably call with the correct CC. Go ahead and cast it to the
  2252. // right prototype.
  2253. CGF.registerGlobalDtorWithAtExit(
  2254. llvm::ConstantExpr::getBitCast(Dtor, dtorTy));
  2255. }
  2256. }
  2257. CGF.FinishFunction();
  2258. AddGlobalCtor(GlobalInitFn, Priority, nullptr);
  2259. }
  2260. if (getCXXABI().useSinitAndSterm())
  2261. unregisterGlobalDtorsWithUnAtExit();
  2262. }
  2263. /// Register a global destructor as best as we know how.
  2264. void ItaniumCXXABI::registerGlobalDtor(CodeGenFunction &CGF, const VarDecl &D,
  2265. llvm::FunctionCallee dtor,
  2266. llvm::Constant *addr) {
  2267. if (D.isNoDestroy(CGM.getContext()))
  2268. return;
  2269. // emitGlobalDtorWithCXAAtExit will emit a call to either __cxa_thread_atexit
  2270. // or __cxa_atexit depending on whether this VarDecl is a thread-local storage
  2271. // or not. CXAAtExit controls only __cxa_atexit, so use it if it is enabled.
  2272. // We can always use __cxa_thread_atexit.
  2273. if (CGM.getCodeGenOpts().CXAAtExit || D.getTLSKind())
  2274. return emitGlobalDtorWithCXAAtExit(CGF, dtor, addr, D.getTLSKind());
  2275. // In Apple kexts, we want to add a global destructor entry.
  2276. // FIXME: shouldn't this be guarded by some variable?
  2277. if (CGM.getLangOpts().AppleKext) {
  2278. // Generate a global destructor entry.
  2279. return CGM.AddCXXDtorEntry(dtor, addr);
  2280. }
  2281. CGF.registerGlobalDtorWithAtExit(D, dtor, addr);
  2282. }
  2283. static bool isThreadWrapperReplaceable(const VarDecl *VD,
  2284. CodeGen::CodeGenModule &CGM) {
  2285. assert(!VD->isStaticLocal() && "static local VarDecls don't need wrappers!");
  2286. // Darwin prefers to have references to thread local variables to go through
  2287. // the thread wrapper instead of directly referencing the backing variable.
  2288. return VD->getTLSKind() == VarDecl::TLS_Dynamic &&
  2289. CGM.getTarget().getTriple().isOSDarwin();
  2290. }
  2291. /// Get the appropriate linkage for the wrapper function. This is essentially
  2292. /// the weak form of the variable's linkage; every translation unit which needs
  2293. /// the wrapper emits a copy, and we want the linker to merge them.
  2294. static llvm::GlobalValue::LinkageTypes
  2295. getThreadLocalWrapperLinkage(const VarDecl *VD, CodeGen::CodeGenModule &CGM) {
  2296. llvm::GlobalValue::LinkageTypes VarLinkage =
  2297. CGM.getLLVMLinkageVarDefinition(VD, /*IsConstant=*/false);
  2298. // For internal linkage variables, we don't need an external or weak wrapper.
  2299. if (llvm::GlobalValue::isLocalLinkage(VarLinkage))
  2300. return VarLinkage;
  2301. // If the thread wrapper is replaceable, give it appropriate linkage.
  2302. if (isThreadWrapperReplaceable(VD, CGM))
  2303. if (!llvm::GlobalVariable::isLinkOnceLinkage(VarLinkage) &&
  2304. !llvm::GlobalVariable::isWeakODRLinkage(VarLinkage))
  2305. return VarLinkage;
  2306. return llvm::GlobalValue::WeakODRLinkage;
  2307. }
  2308. llvm::Function *
  2309. ItaniumCXXABI::getOrCreateThreadLocalWrapper(const VarDecl *VD,
  2310. llvm::Value *Val) {
  2311. // Mangle the name for the thread_local wrapper function.
  2312. SmallString<256> WrapperName;
  2313. {
  2314. llvm::raw_svector_ostream Out(WrapperName);
  2315. getMangleContext().mangleItaniumThreadLocalWrapper(VD, Out);
  2316. }
  2317. // FIXME: If VD is a definition, we should regenerate the function attributes
  2318. // before returning.
  2319. if (llvm::Value *V = CGM.getModule().getNamedValue(WrapperName))
  2320. return cast<llvm::Function>(V);
  2321. QualType RetQT = VD->getType();
  2322. if (RetQT->isReferenceType())
  2323. RetQT = RetQT.getNonReferenceType();
  2324. const CGFunctionInfo &FI = CGM.getTypes().arrangeBuiltinFunctionDeclaration(
  2325. getContext().getPointerType(RetQT), FunctionArgList());
  2326. llvm::FunctionType *FnTy = CGM.getTypes().GetFunctionType(FI);
  2327. llvm::Function *Wrapper =
  2328. llvm::Function::Create(FnTy, getThreadLocalWrapperLinkage(VD, CGM),
  2329. WrapperName.str(), &CGM.getModule());
  2330. if (CGM.supportsCOMDAT() && Wrapper->isWeakForLinker())
  2331. Wrapper->setComdat(CGM.getModule().getOrInsertComdat(Wrapper->getName()));
  2332. CGM.SetLLVMFunctionAttributes(GlobalDecl(), FI, Wrapper, /*IsThunk=*/false);
  2333. // Always resolve references to the wrapper at link time.
  2334. if (!Wrapper->hasLocalLinkage())
  2335. if (!isThreadWrapperReplaceable(VD, CGM) ||
  2336. llvm::GlobalVariable::isLinkOnceLinkage(Wrapper->getLinkage()) ||
  2337. llvm::GlobalVariable::isWeakODRLinkage(Wrapper->getLinkage()) ||
  2338. VD->getVisibility() == HiddenVisibility)
  2339. Wrapper->setVisibility(llvm::GlobalValue::HiddenVisibility);
  2340. if (isThreadWrapperReplaceable(VD, CGM)) {
  2341. Wrapper->setCallingConv(llvm::CallingConv::CXX_FAST_TLS);
  2342. Wrapper->addFnAttr(llvm::Attribute::NoUnwind);
  2343. }
  2344. ThreadWrappers.push_back({VD, Wrapper});
  2345. return Wrapper;
  2346. }
  2347. void ItaniumCXXABI::EmitThreadLocalInitFuncs(
  2348. CodeGenModule &CGM, ArrayRef<const VarDecl *> CXXThreadLocals,
  2349. ArrayRef<llvm::Function *> CXXThreadLocalInits,
  2350. ArrayRef<const VarDecl *> CXXThreadLocalInitVars) {
  2351. llvm::Function *InitFunc = nullptr;
  2352. // Separate initializers into those with ordered (or partially-ordered)
  2353. // initialization and those with unordered initialization.
  2354. llvm::SmallVector<llvm::Function *, 8> OrderedInits;
  2355. llvm::SmallDenseMap<const VarDecl *, llvm::Function *> UnorderedInits;
  2356. for (unsigned I = 0; I != CXXThreadLocalInits.size(); ++I) {
  2357. if (isTemplateInstantiation(
  2358. CXXThreadLocalInitVars[I]->getTemplateSpecializationKind()))
  2359. UnorderedInits[CXXThreadLocalInitVars[I]->getCanonicalDecl()] =
  2360. CXXThreadLocalInits[I];
  2361. else
  2362. OrderedInits.push_back(CXXThreadLocalInits[I]);
  2363. }
  2364. if (!OrderedInits.empty()) {
  2365. // Generate a guarded initialization function.
  2366. llvm::FunctionType *FTy =
  2367. llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
  2368. const CGFunctionInfo &FI = CGM.getTypes().arrangeNullaryFunction();
  2369. InitFunc = CGM.CreateGlobalInitOrCleanUpFunction(FTy, "__tls_init", FI,
  2370. SourceLocation(),
  2371. /*TLS=*/true);
  2372. llvm::GlobalVariable *Guard = new llvm::GlobalVariable(
  2373. CGM.getModule(), CGM.Int8Ty, /*isConstant=*/false,
  2374. llvm::GlobalVariable::InternalLinkage,
  2375. llvm::ConstantInt::get(CGM.Int8Ty, 0), "__tls_guard");
  2376. Guard->setThreadLocal(true);
  2377. Guard->setThreadLocalMode(CGM.GetDefaultLLVMTLSModel());
  2378. CharUnits GuardAlign = CharUnits::One();
  2379. Guard->setAlignment(GuardAlign.getAsAlign());
  2380. CodeGenFunction(CGM).GenerateCXXGlobalInitFunc(
  2381. InitFunc, OrderedInits, ConstantAddress(Guard, CGM.Int8Ty, GuardAlign));
  2382. // On Darwin platforms, use CXX_FAST_TLS calling convention.
  2383. if (CGM.getTarget().getTriple().isOSDarwin()) {
  2384. InitFunc->setCallingConv(llvm::CallingConv::CXX_FAST_TLS);
  2385. InitFunc->addFnAttr(llvm::Attribute::NoUnwind);
  2386. }
  2387. }
  2388. // Create declarations for thread wrappers for all thread-local variables
  2389. // with non-discardable definitions in this translation unit.
  2390. for (const VarDecl *VD : CXXThreadLocals) {
  2391. if (VD->hasDefinition() &&
  2392. !isDiscardableGVALinkage(getContext().GetGVALinkageForVariable(VD))) {
  2393. llvm::GlobalValue *GV = CGM.GetGlobalValue(CGM.getMangledName(VD));
  2394. getOrCreateThreadLocalWrapper(VD, GV);
  2395. }
  2396. }
  2397. // Emit all referenced thread wrappers.
  2398. for (auto VDAndWrapper : ThreadWrappers) {
  2399. const VarDecl *VD = VDAndWrapper.first;
  2400. llvm::GlobalVariable *Var =
  2401. cast<llvm::GlobalVariable>(CGM.GetGlobalValue(CGM.getMangledName(VD)));
  2402. llvm::Function *Wrapper = VDAndWrapper.second;
  2403. // Some targets require that all access to thread local variables go through
  2404. // the thread wrapper. This means that we cannot attempt to create a thread
  2405. // wrapper or a thread helper.
  2406. if (!VD->hasDefinition()) {
  2407. if (isThreadWrapperReplaceable(VD, CGM)) {
  2408. Wrapper->setLinkage(llvm::Function::ExternalLinkage);
  2409. continue;
  2410. }
  2411. // If this isn't a TU in which this variable is defined, the thread
  2412. // wrapper is discardable.
  2413. if (Wrapper->getLinkage() == llvm::Function::WeakODRLinkage)
  2414. Wrapper->setLinkage(llvm::Function::LinkOnceODRLinkage);
  2415. }
  2416. CGM.SetLLVMFunctionAttributesForDefinition(nullptr, Wrapper);
  2417. // Mangle the name for the thread_local initialization function.
  2418. SmallString<256> InitFnName;
  2419. {
  2420. llvm::raw_svector_ostream Out(InitFnName);
  2421. getMangleContext().mangleItaniumThreadLocalInit(VD, Out);
  2422. }
  2423. llvm::FunctionType *InitFnTy = llvm::FunctionType::get(CGM.VoidTy, false);
  2424. // If we have a definition for the variable, emit the initialization
  2425. // function as an alias to the global Init function (if any). Otherwise,
  2426. // produce a declaration of the initialization function.
  2427. llvm::GlobalValue *Init = nullptr;
  2428. bool InitIsInitFunc = false;
  2429. bool HasConstantInitialization = false;
  2430. if (!usesThreadWrapperFunction(VD)) {
  2431. HasConstantInitialization = true;
  2432. } else if (VD->hasDefinition()) {
  2433. InitIsInitFunc = true;
  2434. llvm::Function *InitFuncToUse = InitFunc;
  2435. if (isTemplateInstantiation(VD->getTemplateSpecializationKind()))
  2436. InitFuncToUse = UnorderedInits.lookup(VD->getCanonicalDecl());
  2437. if (InitFuncToUse)
  2438. Init = llvm::GlobalAlias::create(Var->getLinkage(), InitFnName.str(),
  2439. InitFuncToUse);
  2440. } else {
  2441. // Emit a weak global function referring to the initialization function.
  2442. // This function will not exist if the TU defining the thread_local
  2443. // variable in question does not need any dynamic initialization for
  2444. // its thread_local variables.
  2445. Init = llvm::Function::Create(InitFnTy,
  2446. llvm::GlobalVariable::ExternalWeakLinkage,
  2447. InitFnName.str(), &CGM.getModule());
  2448. const CGFunctionInfo &FI = CGM.getTypes().arrangeNullaryFunction();
  2449. CGM.SetLLVMFunctionAttributes(
  2450. GlobalDecl(), FI, cast<llvm::Function>(Init), /*IsThunk=*/false);
  2451. }
  2452. if (Init) {
  2453. Init->setVisibility(Var->getVisibility());
  2454. // Don't mark an extern_weak function DSO local on windows.
  2455. if (!CGM.getTriple().isOSWindows() || !Init->hasExternalWeakLinkage())
  2456. Init->setDSOLocal(Var->isDSOLocal());
  2457. }
  2458. llvm::LLVMContext &Context = CGM.getModule().getContext();
  2459. // The linker on AIX is not happy with missing weak symbols. However,
  2460. // other TUs will not know whether the initialization routine exists
  2461. // so create an empty, init function to satisfy the linker.
  2462. // This is needed whenever a thread wrapper function is not used, and
  2463. // also when the symbol is weak.
  2464. if (CGM.getTriple().isOSAIX() && VD->hasDefinition() &&
  2465. isEmittedWithConstantInitializer(VD, true) &&
  2466. !mayNeedDestruction(VD)) {
  2467. // Init should be null. If it were non-null, then the logic above would
  2468. // either be defining the function to be an alias or declaring the
  2469. // function with the expectation that the definition of the variable
  2470. // is elsewhere.
  2471. assert(Init == nullptr && "Expected Init to be null.");
  2472. llvm::Function *Func = llvm::Function::Create(
  2473. InitFnTy, Var->getLinkage(), InitFnName.str(), &CGM.getModule());
  2474. const CGFunctionInfo &FI = CGM.getTypes().arrangeNullaryFunction();
  2475. CGM.SetLLVMFunctionAttributes(GlobalDecl(), FI,
  2476. cast<llvm::Function>(Func),
  2477. /*IsThunk=*/false);
  2478. // Create a function body that just returns
  2479. llvm::BasicBlock *Entry = llvm::BasicBlock::Create(Context, "", Func);
  2480. CGBuilderTy Builder(CGM, Entry);
  2481. Builder.CreateRetVoid();
  2482. }
  2483. llvm::BasicBlock *Entry = llvm::BasicBlock::Create(Context, "", Wrapper);
  2484. CGBuilderTy Builder(CGM, Entry);
  2485. if (HasConstantInitialization) {
  2486. // No dynamic initialization to invoke.
  2487. } else if (InitIsInitFunc) {
  2488. if (Init) {
  2489. llvm::CallInst *CallVal = Builder.CreateCall(InitFnTy, Init);
  2490. if (isThreadWrapperReplaceable(VD, CGM)) {
  2491. CallVal->setCallingConv(llvm::CallingConv::CXX_FAST_TLS);
  2492. llvm::Function *Fn =
  2493. cast<llvm::Function>(cast<llvm::GlobalAlias>(Init)->getAliasee());
  2494. Fn->setCallingConv(llvm::CallingConv::CXX_FAST_TLS);
  2495. }
  2496. }
  2497. } else if (CGM.getTriple().isOSAIX()) {
  2498. // On AIX, except if constinit and also neither of class type or of
  2499. // (possibly multi-dimensional) array of class type, thread_local vars
  2500. // will have init routines regardless of whether they are
  2501. // const-initialized. Since the routine is guaranteed to exist, we can
  2502. // unconditionally call it without testing for its existance. This
  2503. // avoids potentially unresolved weak symbols which the AIX linker
  2504. // isn't happy with.
  2505. Builder.CreateCall(InitFnTy, Init);
  2506. } else {
  2507. // Don't know whether we have an init function. Call it if it exists.
  2508. llvm::Value *Have = Builder.CreateIsNotNull(Init);
  2509. llvm::BasicBlock *InitBB = llvm::BasicBlock::Create(Context, "", Wrapper);
  2510. llvm::BasicBlock *ExitBB = llvm::BasicBlock::Create(Context, "", Wrapper);
  2511. Builder.CreateCondBr(Have, InitBB, ExitBB);
  2512. Builder.SetInsertPoint(InitBB);
  2513. Builder.CreateCall(InitFnTy, Init);
  2514. Builder.CreateBr(ExitBB);
  2515. Builder.SetInsertPoint(ExitBB);
  2516. }
  2517. // For a reference, the result of the wrapper function is a pointer to
  2518. // the referenced object.
  2519. llvm::Value *Val = Var;
  2520. if (VD->getType()->isReferenceType()) {
  2521. CharUnits Align = CGM.getContext().getDeclAlign(VD);
  2522. Val = Builder.CreateAlignedLoad(Var->getValueType(), Var, Align);
  2523. }
  2524. if (Val->getType() != Wrapper->getReturnType())
  2525. Val = Builder.CreatePointerBitCastOrAddrSpaceCast(
  2526. Val, Wrapper->getReturnType(), "");
  2527. Builder.CreateRet(Val);
  2528. }
  2529. }
  2530. LValue ItaniumCXXABI::EmitThreadLocalVarDeclLValue(CodeGenFunction &CGF,
  2531. const VarDecl *VD,
  2532. QualType LValType) {
  2533. llvm::Value *Val = CGF.CGM.GetAddrOfGlobalVar(VD);
  2534. llvm::Function *Wrapper = getOrCreateThreadLocalWrapper(VD, Val);
  2535. llvm::CallInst *CallVal = CGF.Builder.CreateCall(Wrapper);
  2536. CallVal->setCallingConv(Wrapper->getCallingConv());
  2537. LValue LV;
  2538. if (VD->getType()->isReferenceType())
  2539. LV = CGF.MakeNaturalAlignAddrLValue(CallVal, LValType);
  2540. else
  2541. LV = CGF.MakeAddrLValue(CallVal, LValType,
  2542. CGF.getContext().getDeclAlign(VD));
  2543. // FIXME: need setObjCGCLValueClass?
  2544. return LV;
  2545. }
  2546. /// Return whether the given global decl needs a VTT parameter, which it does
  2547. /// if it's a base constructor or destructor with virtual bases.
  2548. bool ItaniumCXXABI::NeedsVTTParameter(GlobalDecl GD) {
  2549. const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl());
  2550. // We don't have any virtual bases, just return early.
  2551. if (!MD->getParent()->getNumVBases())
  2552. return false;
  2553. // Check if we have a base constructor.
  2554. if (isa<CXXConstructorDecl>(MD) && GD.getCtorType() == Ctor_Base)
  2555. return true;
  2556. // Check if we have a base destructor.
  2557. if (isa<CXXDestructorDecl>(MD) && GD.getDtorType() == Dtor_Base)
  2558. return true;
  2559. return false;
  2560. }
  2561. namespace {
  2562. class ItaniumRTTIBuilder {
  2563. CodeGenModule &CGM; // Per-module state.
  2564. llvm::LLVMContext &VMContext;
  2565. const ItaniumCXXABI &CXXABI; // Per-module state.
  2566. /// Fields - The fields of the RTTI descriptor currently being built.
  2567. SmallVector<llvm::Constant *, 16> Fields;
  2568. /// GetAddrOfTypeName - Returns the mangled type name of the given type.
  2569. llvm::GlobalVariable *
  2570. GetAddrOfTypeName(QualType Ty, llvm::GlobalVariable::LinkageTypes Linkage);
  2571. /// GetAddrOfExternalRTTIDescriptor - Returns the constant for the RTTI
  2572. /// descriptor of the given type.
  2573. llvm::Constant *GetAddrOfExternalRTTIDescriptor(QualType Ty);
  2574. /// BuildVTablePointer - Build the vtable pointer for the given type.
  2575. void BuildVTablePointer(const Type *Ty);
  2576. /// BuildSIClassTypeInfo - Build an abi::__si_class_type_info, used for single
  2577. /// inheritance, according to the Itanium C++ ABI, 2.9.5p6b.
  2578. void BuildSIClassTypeInfo(const CXXRecordDecl *RD);
  2579. /// BuildVMIClassTypeInfo - Build an abi::__vmi_class_type_info, used for
  2580. /// classes with bases that do not satisfy the abi::__si_class_type_info
  2581. /// constraints, according ti the Itanium C++ ABI, 2.9.5p5c.
  2582. void BuildVMIClassTypeInfo(const CXXRecordDecl *RD);
  2583. /// BuildPointerTypeInfo - Build an abi::__pointer_type_info struct, used
  2584. /// for pointer types.
  2585. void BuildPointerTypeInfo(QualType PointeeTy);
  2586. /// BuildObjCObjectTypeInfo - Build the appropriate kind of
  2587. /// type_info for an object type.
  2588. void BuildObjCObjectTypeInfo(const ObjCObjectType *Ty);
  2589. /// BuildPointerToMemberTypeInfo - Build an abi::__pointer_to_member_type_info
  2590. /// struct, used for member pointer types.
  2591. void BuildPointerToMemberTypeInfo(const MemberPointerType *Ty);
  2592. public:
  2593. ItaniumRTTIBuilder(const ItaniumCXXABI &ABI)
  2594. : CGM(ABI.CGM), VMContext(CGM.getModule().getContext()), CXXABI(ABI) {}
  2595. // Pointer type info flags.
  2596. enum {
  2597. /// PTI_Const - Type has const qualifier.
  2598. PTI_Const = 0x1,
  2599. /// PTI_Volatile - Type has volatile qualifier.
  2600. PTI_Volatile = 0x2,
  2601. /// PTI_Restrict - Type has restrict qualifier.
  2602. PTI_Restrict = 0x4,
  2603. /// PTI_Incomplete - Type is incomplete.
  2604. PTI_Incomplete = 0x8,
  2605. /// PTI_ContainingClassIncomplete - Containing class is incomplete.
  2606. /// (in pointer to member).
  2607. PTI_ContainingClassIncomplete = 0x10,
  2608. /// PTI_TransactionSafe - Pointee is transaction_safe function (C++ TM TS).
  2609. //PTI_TransactionSafe = 0x20,
  2610. /// PTI_Noexcept - Pointee is noexcept function (C++1z).
  2611. PTI_Noexcept = 0x40,
  2612. };
  2613. // VMI type info flags.
  2614. enum {
  2615. /// VMI_NonDiamondRepeat - Class has non-diamond repeated inheritance.
  2616. VMI_NonDiamondRepeat = 0x1,
  2617. /// VMI_DiamondShaped - Class is diamond shaped.
  2618. VMI_DiamondShaped = 0x2
  2619. };
  2620. // Base class type info flags.
  2621. enum {
  2622. /// BCTI_Virtual - Base class is virtual.
  2623. BCTI_Virtual = 0x1,
  2624. /// BCTI_Public - Base class is public.
  2625. BCTI_Public = 0x2
  2626. };
  2627. /// BuildTypeInfo - Build the RTTI type info struct for the given type, or
  2628. /// link to an existing RTTI descriptor if one already exists.
  2629. llvm::Constant *BuildTypeInfo(QualType Ty);
  2630. /// BuildTypeInfo - Build the RTTI type info struct for the given type.
  2631. llvm::Constant *BuildTypeInfo(
  2632. QualType Ty,
  2633. llvm::GlobalVariable::LinkageTypes Linkage,
  2634. llvm::GlobalValue::VisibilityTypes Visibility,
  2635. llvm::GlobalValue::DLLStorageClassTypes DLLStorageClass);
  2636. };
  2637. }
  2638. llvm::GlobalVariable *ItaniumRTTIBuilder::GetAddrOfTypeName(
  2639. QualType Ty, llvm::GlobalVariable::LinkageTypes Linkage) {
  2640. SmallString<256> Name;
  2641. llvm::raw_svector_ostream Out(Name);
  2642. CGM.getCXXABI().getMangleContext().mangleCXXRTTIName(Ty, Out);
  2643. // We know that the mangled name of the type starts at index 4 of the
  2644. // mangled name of the typename, so we can just index into it in order to
  2645. // get the mangled name of the type.
  2646. llvm::Constant *Init = llvm::ConstantDataArray::getString(VMContext,
  2647. Name.substr(4));
  2648. auto Align = CGM.getContext().getTypeAlignInChars(CGM.getContext().CharTy);
  2649. llvm::GlobalVariable *GV = CGM.CreateOrReplaceCXXRuntimeVariable(
  2650. Name, Init->getType(), Linkage, Align.getQuantity());
  2651. GV->setInitializer(Init);
  2652. return GV;
  2653. }
  2654. llvm::Constant *
  2655. ItaniumRTTIBuilder::GetAddrOfExternalRTTIDescriptor(QualType Ty) {
  2656. // Mangle the RTTI name.
  2657. SmallString<256> Name;
  2658. llvm::raw_svector_ostream Out(Name);
  2659. CGM.getCXXABI().getMangleContext().mangleCXXRTTI(Ty, Out);
  2660. // Look for an existing global.
  2661. llvm::GlobalVariable *GV = CGM.getModule().getNamedGlobal(Name);
  2662. if (!GV) {
  2663. // Create a new global variable.
  2664. // Note for the future: If we would ever like to do deferred emission of
  2665. // RTTI, check if emitting vtables opportunistically need any adjustment.
  2666. GV = new llvm::GlobalVariable(CGM.getModule(), CGM.Int8PtrTy,
  2667. /*isConstant=*/true,
  2668. llvm::GlobalValue::ExternalLinkage, nullptr,
  2669. Name);
  2670. const CXXRecordDecl *RD = Ty->getAsCXXRecordDecl();
  2671. CGM.setGVProperties(GV, RD);
  2672. // Import the typeinfo symbol when all non-inline virtual methods are
  2673. // imported.
  2674. if (CGM.getTarget().hasPS4DLLImportExport()) {
  2675. if (RD && CXXRecordAllNonInlineVirtualsHaveAttr<DLLImportAttr>(RD)) {
  2676. GV->setDLLStorageClass(llvm::GlobalVariable::DLLImportStorageClass);
  2677. CGM.setDSOLocal(GV);
  2678. }
  2679. }
  2680. }
  2681. return llvm::ConstantExpr::getBitCast(GV, CGM.Int8PtrTy);
  2682. }
  2683. /// TypeInfoIsInStandardLibrary - Given a builtin type, returns whether the type
  2684. /// info for that type is defined in the standard library.
  2685. static bool TypeInfoIsInStandardLibrary(const BuiltinType *Ty) {
  2686. // Itanium C++ ABI 2.9.2:
  2687. // Basic type information (e.g. for "int", "bool", etc.) will be kept in
  2688. // the run-time support library. Specifically, the run-time support
  2689. // library should contain type_info objects for the types X, X* and
  2690. // X const*, for every X in: void, std::nullptr_t, bool, wchar_t, char,
  2691. // unsigned char, signed char, short, unsigned short, int, unsigned int,
  2692. // long, unsigned long, long long, unsigned long long, float, double,
  2693. // long double, char16_t, char32_t, and the IEEE 754r decimal and
  2694. // half-precision floating point types.
  2695. //
  2696. // GCC also emits RTTI for __int128.
  2697. // FIXME: We do not emit RTTI information for decimal types here.
  2698. // Types added here must also be added to EmitFundamentalRTTIDescriptors.
  2699. switch (Ty->getKind()) {
  2700. case BuiltinType::Void:
  2701. case BuiltinType::NullPtr:
  2702. case BuiltinType::Bool:
  2703. case BuiltinType::WChar_S:
  2704. case BuiltinType::WChar_U:
  2705. case BuiltinType::Char_U:
  2706. case BuiltinType::Char_S:
  2707. case BuiltinType::UChar:
  2708. case BuiltinType::SChar:
  2709. case BuiltinType::Short:
  2710. case BuiltinType::UShort:
  2711. case BuiltinType::Int:
  2712. case BuiltinType::UInt:
  2713. case BuiltinType::Long:
  2714. case BuiltinType::ULong:
  2715. case BuiltinType::LongLong:
  2716. case BuiltinType::ULongLong:
  2717. case BuiltinType::Half:
  2718. case BuiltinType::Float:
  2719. case BuiltinType::Double:
  2720. case BuiltinType::LongDouble:
  2721. case BuiltinType::Float16:
  2722. case BuiltinType::Float128:
  2723. case BuiltinType::Ibm128:
  2724. case BuiltinType::Char8:
  2725. case BuiltinType::Char16:
  2726. case BuiltinType::Char32:
  2727. case BuiltinType::Int128:
  2728. case BuiltinType::UInt128:
  2729. return true;
  2730. #define IMAGE_TYPE(ImgType, Id, SingletonId, Access, Suffix) \
  2731. case BuiltinType::Id:
  2732. #include "clang/Basic/OpenCLImageTypes.def"
  2733. #define EXT_OPAQUE_TYPE(ExtType, Id, Ext) \
  2734. case BuiltinType::Id:
  2735. #include "clang/Basic/OpenCLExtensionTypes.def"
  2736. case BuiltinType::OCLSampler:
  2737. case BuiltinType::OCLEvent:
  2738. case BuiltinType::OCLClkEvent:
  2739. case BuiltinType::OCLQueue:
  2740. case BuiltinType::OCLReserveID:
  2741. #define SVE_TYPE(Name, Id, SingletonId) \
  2742. case BuiltinType::Id:
  2743. #include "clang/Basic/AArch64SVEACLETypes.def"
  2744. #define PPC_VECTOR_TYPE(Name, Id, Size) \
  2745. case BuiltinType::Id:
  2746. #include "clang/Basic/PPCTypes.def"
  2747. #define RVV_TYPE(Name, Id, SingletonId) case BuiltinType::Id:
  2748. #include "clang/Basic/RISCVVTypes.def"
  2749. case BuiltinType::ShortAccum:
  2750. case BuiltinType::Accum:
  2751. case BuiltinType::LongAccum:
  2752. case BuiltinType::UShortAccum:
  2753. case BuiltinType::UAccum:
  2754. case BuiltinType::ULongAccum:
  2755. case BuiltinType::ShortFract:
  2756. case BuiltinType::Fract:
  2757. case BuiltinType::LongFract:
  2758. case BuiltinType::UShortFract:
  2759. case BuiltinType::UFract:
  2760. case BuiltinType::ULongFract:
  2761. case BuiltinType::SatShortAccum:
  2762. case BuiltinType::SatAccum:
  2763. case BuiltinType::SatLongAccum:
  2764. case BuiltinType::SatUShortAccum:
  2765. case BuiltinType::SatUAccum:
  2766. case BuiltinType::SatULongAccum:
  2767. case BuiltinType::SatShortFract:
  2768. case BuiltinType::SatFract:
  2769. case BuiltinType::SatLongFract:
  2770. case BuiltinType::SatUShortFract:
  2771. case BuiltinType::SatUFract:
  2772. case BuiltinType::SatULongFract:
  2773. case BuiltinType::BFloat16:
  2774. return false;
  2775. case BuiltinType::Dependent:
  2776. #define BUILTIN_TYPE(Id, SingletonId)
  2777. #define PLACEHOLDER_TYPE(Id, SingletonId) \
  2778. case BuiltinType::Id:
  2779. #include "clang/AST/BuiltinTypes.def"
  2780. llvm_unreachable("asking for RRTI for a placeholder type!");
  2781. case BuiltinType::ObjCId:
  2782. case BuiltinType::ObjCClass:
  2783. case BuiltinType::ObjCSel:
  2784. llvm_unreachable("FIXME: Objective-C types are unsupported!");
  2785. }
  2786. llvm_unreachable("Invalid BuiltinType Kind!");
  2787. }
  2788. static bool TypeInfoIsInStandardLibrary(const PointerType *PointerTy) {
  2789. QualType PointeeTy = PointerTy->getPointeeType();
  2790. const BuiltinType *BuiltinTy = dyn_cast<BuiltinType>(PointeeTy);
  2791. if (!BuiltinTy)
  2792. return false;
  2793. // Check the qualifiers.
  2794. Qualifiers Quals = PointeeTy.getQualifiers();
  2795. Quals.removeConst();
  2796. if (!Quals.empty())
  2797. return false;
  2798. return TypeInfoIsInStandardLibrary(BuiltinTy);
  2799. }
  2800. /// IsStandardLibraryRTTIDescriptor - Returns whether the type
  2801. /// information for the given type exists in the standard library.
  2802. static bool IsStandardLibraryRTTIDescriptor(QualType Ty) {
  2803. // Type info for builtin types is defined in the standard library.
  2804. if (const BuiltinType *BuiltinTy = dyn_cast<BuiltinType>(Ty))
  2805. return TypeInfoIsInStandardLibrary(BuiltinTy);
  2806. // Type info for some pointer types to builtin types is defined in the
  2807. // standard library.
  2808. if (const PointerType *PointerTy = dyn_cast<PointerType>(Ty))
  2809. return TypeInfoIsInStandardLibrary(PointerTy);
  2810. return false;
  2811. }
  2812. /// ShouldUseExternalRTTIDescriptor - Returns whether the type information for
  2813. /// the given type exists somewhere else, and that we should not emit the type
  2814. /// information in this translation unit. Assumes that it is not a
  2815. /// standard-library type.
  2816. static bool ShouldUseExternalRTTIDescriptor(CodeGenModule &CGM,
  2817. QualType Ty) {
  2818. ASTContext &Context = CGM.getContext();
  2819. // If RTTI is disabled, assume it might be disabled in the
  2820. // translation unit that defines any potential key function, too.
  2821. if (!Context.getLangOpts().RTTI) return false;
  2822. if (const RecordType *RecordTy = dyn_cast<RecordType>(Ty)) {
  2823. const CXXRecordDecl *RD = cast<CXXRecordDecl>(RecordTy->getDecl());
  2824. if (!RD->hasDefinition())
  2825. return false;
  2826. if (!RD->isDynamicClass())
  2827. return false;
  2828. // FIXME: this may need to be reconsidered if the key function
  2829. // changes.
  2830. // N.B. We must always emit the RTTI data ourselves if there exists a key
  2831. // function.
  2832. bool IsDLLImport = RD->hasAttr<DLLImportAttr>();
  2833. // Don't import the RTTI but emit it locally.
  2834. if (CGM.getTriple().isWindowsGNUEnvironment())
  2835. return false;
  2836. if (CGM.getVTables().isVTableExternal(RD)) {
  2837. if (CGM.getTarget().hasPS4DLLImportExport())
  2838. return true;
  2839. return IsDLLImport && !CGM.getTriple().isWindowsItaniumEnvironment()
  2840. ? false
  2841. : true;
  2842. }
  2843. if (IsDLLImport)
  2844. return true;
  2845. }
  2846. return false;
  2847. }
  2848. /// IsIncompleteClassType - Returns whether the given record type is incomplete.
  2849. static bool IsIncompleteClassType(const RecordType *RecordTy) {
  2850. return !RecordTy->getDecl()->isCompleteDefinition();
  2851. }
  2852. /// ContainsIncompleteClassType - Returns whether the given type contains an
  2853. /// incomplete class type. This is true if
  2854. ///
  2855. /// * The given type is an incomplete class type.
  2856. /// * The given type is a pointer type whose pointee type contains an
  2857. /// incomplete class type.
  2858. /// * The given type is a member pointer type whose class is an incomplete
  2859. /// class type.
  2860. /// * The given type is a member pointer type whoise pointee type contains an
  2861. /// incomplete class type.
  2862. /// is an indirect or direct pointer to an incomplete class type.
  2863. static bool ContainsIncompleteClassType(QualType Ty) {
  2864. if (const RecordType *RecordTy = dyn_cast<RecordType>(Ty)) {
  2865. if (IsIncompleteClassType(RecordTy))
  2866. return true;
  2867. }
  2868. if (const PointerType *PointerTy = dyn_cast<PointerType>(Ty))
  2869. return ContainsIncompleteClassType(PointerTy->getPointeeType());
  2870. if (const MemberPointerType *MemberPointerTy =
  2871. dyn_cast<MemberPointerType>(Ty)) {
  2872. // Check if the class type is incomplete.
  2873. const RecordType *ClassType = cast<RecordType>(MemberPointerTy->getClass());
  2874. if (IsIncompleteClassType(ClassType))
  2875. return true;
  2876. return ContainsIncompleteClassType(MemberPointerTy->getPointeeType());
  2877. }
  2878. return false;
  2879. }
  2880. // CanUseSingleInheritance - Return whether the given record decl has a "single,
  2881. // public, non-virtual base at offset zero (i.e. the derived class is dynamic
  2882. // iff the base is)", according to Itanium C++ ABI, 2.95p6b.
  2883. static bool CanUseSingleInheritance(const CXXRecordDecl *RD) {
  2884. // Check the number of bases.
  2885. if (RD->getNumBases() != 1)
  2886. return false;
  2887. // Get the base.
  2888. CXXRecordDecl::base_class_const_iterator Base = RD->bases_begin();
  2889. // Check that the base is not virtual.
  2890. if (Base->isVirtual())
  2891. return false;
  2892. // Check that the base is public.
  2893. if (Base->getAccessSpecifier() != AS_public)
  2894. return false;
  2895. // Check that the class is dynamic iff the base is.
  2896. auto *BaseDecl =
  2897. cast<CXXRecordDecl>(Base->getType()->castAs<RecordType>()->getDecl());
  2898. if (!BaseDecl->isEmpty() &&
  2899. BaseDecl->isDynamicClass() != RD->isDynamicClass())
  2900. return false;
  2901. return true;
  2902. }
  2903. void ItaniumRTTIBuilder::BuildVTablePointer(const Type *Ty) {
  2904. // abi::__class_type_info.
  2905. static const char * const ClassTypeInfo =
  2906. "_ZTVN10__cxxabiv117__class_type_infoE";
  2907. // abi::__si_class_type_info.
  2908. static const char * const SIClassTypeInfo =
  2909. "_ZTVN10__cxxabiv120__si_class_type_infoE";
  2910. // abi::__vmi_class_type_info.
  2911. static const char * const VMIClassTypeInfo =
  2912. "_ZTVN10__cxxabiv121__vmi_class_type_infoE";
  2913. const char *VTableName = nullptr;
  2914. switch (Ty->getTypeClass()) {
  2915. #define TYPE(Class, Base)
  2916. #define ABSTRACT_TYPE(Class, Base)
  2917. #define NON_CANONICAL_UNLESS_DEPENDENT_TYPE(Class, Base) case Type::Class:
  2918. #define NON_CANONICAL_TYPE(Class, Base) case Type::Class:
  2919. #define DEPENDENT_TYPE(Class, Base) case Type::Class:
  2920. #include "clang/AST/TypeNodes.inc"
  2921. llvm_unreachable("Non-canonical and dependent types shouldn't get here");
  2922. case Type::LValueReference:
  2923. case Type::RValueReference:
  2924. llvm_unreachable("References shouldn't get here");
  2925. case Type::Auto:
  2926. case Type::DeducedTemplateSpecialization:
  2927. llvm_unreachable("Undeduced type shouldn't get here");
  2928. case Type::Pipe:
  2929. llvm_unreachable("Pipe types shouldn't get here");
  2930. case Type::Builtin:
  2931. case Type::BitInt:
  2932. // GCC treats vector and complex types as fundamental types.
  2933. case Type::Vector:
  2934. case Type::ExtVector:
  2935. case Type::ConstantMatrix:
  2936. case Type::Complex:
  2937. case Type::Atomic:
  2938. // FIXME: GCC treats block pointers as fundamental types?!
  2939. case Type::BlockPointer:
  2940. // abi::__fundamental_type_info.
  2941. VTableName = "_ZTVN10__cxxabiv123__fundamental_type_infoE";
  2942. break;
  2943. case Type::ConstantArray:
  2944. case Type::IncompleteArray:
  2945. case Type::VariableArray:
  2946. // abi::__array_type_info.
  2947. VTableName = "_ZTVN10__cxxabiv117__array_type_infoE";
  2948. break;
  2949. case Type::FunctionNoProto:
  2950. case Type::FunctionProto:
  2951. // abi::__function_type_info.
  2952. VTableName = "_ZTVN10__cxxabiv120__function_type_infoE";
  2953. break;
  2954. case Type::Enum:
  2955. // abi::__enum_type_info.
  2956. VTableName = "_ZTVN10__cxxabiv116__enum_type_infoE";
  2957. break;
  2958. case Type::Record: {
  2959. const CXXRecordDecl *RD =
  2960. cast<CXXRecordDecl>(cast<RecordType>(Ty)->getDecl());
  2961. if (!RD->hasDefinition() || !RD->getNumBases()) {
  2962. VTableName = ClassTypeInfo;
  2963. } else if (CanUseSingleInheritance(RD)) {
  2964. VTableName = SIClassTypeInfo;
  2965. } else {
  2966. VTableName = VMIClassTypeInfo;
  2967. }
  2968. break;
  2969. }
  2970. case Type::ObjCObject:
  2971. // Ignore protocol qualifiers.
  2972. Ty = cast<ObjCObjectType>(Ty)->getBaseType().getTypePtr();
  2973. // Handle id and Class.
  2974. if (isa<BuiltinType>(Ty)) {
  2975. VTableName = ClassTypeInfo;
  2976. break;
  2977. }
  2978. assert(isa<ObjCInterfaceType>(Ty));
  2979. LLVM_FALLTHROUGH;
  2980. case Type::ObjCInterface:
  2981. if (cast<ObjCInterfaceType>(Ty)->getDecl()->getSuperClass()) {
  2982. VTableName = SIClassTypeInfo;
  2983. } else {
  2984. VTableName = ClassTypeInfo;
  2985. }
  2986. break;
  2987. case Type::ObjCObjectPointer:
  2988. case Type::Pointer:
  2989. // abi::__pointer_type_info.
  2990. VTableName = "_ZTVN10__cxxabiv119__pointer_type_infoE";
  2991. break;
  2992. case Type::MemberPointer:
  2993. // abi::__pointer_to_member_type_info.
  2994. VTableName = "_ZTVN10__cxxabiv129__pointer_to_member_type_infoE";
  2995. break;
  2996. }
  2997. llvm::Constant *VTable = nullptr;
  2998. // Check if the alias exists. If it doesn't, then get or create the global.
  2999. if (CGM.getItaniumVTableContext().isRelativeLayout())
  3000. VTable = CGM.getModule().getNamedAlias(VTableName);
  3001. if (!VTable)
  3002. VTable = CGM.getModule().getOrInsertGlobal(VTableName, CGM.Int8PtrTy);
  3003. CGM.setDSOLocal(cast<llvm::GlobalValue>(VTable->stripPointerCasts()));
  3004. llvm::Type *PtrDiffTy =
  3005. CGM.getTypes().ConvertType(CGM.getContext().getPointerDiffType());
  3006. // The vtable address point is 2.
  3007. if (CGM.getItaniumVTableContext().isRelativeLayout()) {
  3008. // The vtable address point is 8 bytes after its start:
  3009. // 4 for the offset to top + 4 for the relative offset to rtti.
  3010. llvm::Constant *Eight = llvm::ConstantInt::get(CGM.Int32Ty, 8);
  3011. VTable = llvm::ConstantExpr::getBitCast(VTable, CGM.Int8PtrTy);
  3012. VTable =
  3013. llvm::ConstantExpr::getInBoundsGetElementPtr(CGM.Int8Ty, VTable, Eight);
  3014. } else {
  3015. llvm::Constant *Two = llvm::ConstantInt::get(PtrDiffTy, 2);
  3016. VTable = llvm::ConstantExpr::getInBoundsGetElementPtr(CGM.Int8PtrTy, VTable,
  3017. Two);
  3018. }
  3019. VTable = llvm::ConstantExpr::getBitCast(VTable, CGM.Int8PtrTy);
  3020. Fields.push_back(VTable);
  3021. }
  3022. /// Return the linkage that the type info and type info name constants
  3023. /// should have for the given type.
  3024. static llvm::GlobalVariable::LinkageTypes getTypeInfoLinkage(CodeGenModule &CGM,
  3025. QualType Ty) {
  3026. // Itanium C++ ABI 2.9.5p7:
  3027. // In addition, it and all of the intermediate abi::__pointer_type_info
  3028. // structs in the chain down to the abi::__class_type_info for the
  3029. // incomplete class type must be prevented from resolving to the
  3030. // corresponding type_info structs for the complete class type, possibly
  3031. // by making them local static objects. Finally, a dummy class RTTI is
  3032. // generated for the incomplete type that will not resolve to the final
  3033. // complete class RTTI (because the latter need not exist), possibly by
  3034. // making it a local static object.
  3035. if (ContainsIncompleteClassType(Ty))
  3036. return llvm::GlobalValue::InternalLinkage;
  3037. switch (Ty->getLinkage()) {
  3038. case NoLinkage:
  3039. case InternalLinkage:
  3040. case UniqueExternalLinkage:
  3041. return llvm::GlobalValue::InternalLinkage;
  3042. case VisibleNoLinkage:
  3043. case ModuleInternalLinkage:
  3044. case ModuleLinkage:
  3045. case ExternalLinkage:
  3046. // RTTI is not enabled, which means that this type info struct is going
  3047. // to be used for exception handling. Give it linkonce_odr linkage.
  3048. if (!CGM.getLangOpts().RTTI)
  3049. return llvm::GlobalValue::LinkOnceODRLinkage;
  3050. if (const RecordType *Record = dyn_cast<RecordType>(Ty)) {
  3051. const CXXRecordDecl *RD = cast<CXXRecordDecl>(Record->getDecl());
  3052. if (RD->hasAttr<WeakAttr>())
  3053. return llvm::GlobalValue::WeakODRLinkage;
  3054. if (CGM.getTriple().isWindowsItaniumEnvironment())
  3055. if (RD->hasAttr<DLLImportAttr>() &&
  3056. ShouldUseExternalRTTIDescriptor(CGM, Ty))
  3057. return llvm::GlobalValue::ExternalLinkage;
  3058. // MinGW always uses LinkOnceODRLinkage for type info.
  3059. if (RD->isDynamicClass() &&
  3060. !CGM.getContext()
  3061. .getTargetInfo()
  3062. .getTriple()
  3063. .isWindowsGNUEnvironment())
  3064. return CGM.getVTableLinkage(RD);
  3065. }
  3066. return llvm::GlobalValue::LinkOnceODRLinkage;
  3067. }
  3068. llvm_unreachable("Invalid linkage!");
  3069. }
  3070. llvm::Constant *ItaniumRTTIBuilder::BuildTypeInfo(QualType Ty) {
  3071. // We want to operate on the canonical type.
  3072. Ty = Ty.getCanonicalType();
  3073. // Check if we've already emitted an RTTI descriptor for this type.
  3074. SmallString<256> Name;
  3075. llvm::raw_svector_ostream Out(Name);
  3076. CGM.getCXXABI().getMangleContext().mangleCXXRTTI(Ty, Out);
  3077. llvm::GlobalVariable *OldGV = CGM.getModule().getNamedGlobal(Name);
  3078. if (OldGV && !OldGV->isDeclaration()) {
  3079. assert(!OldGV->hasAvailableExternallyLinkage() &&
  3080. "available_externally typeinfos not yet implemented");
  3081. return llvm::ConstantExpr::getBitCast(OldGV, CGM.Int8PtrTy);
  3082. }
  3083. // Check if there is already an external RTTI descriptor for this type.
  3084. if (IsStandardLibraryRTTIDescriptor(Ty) ||
  3085. ShouldUseExternalRTTIDescriptor(CGM, Ty))
  3086. return GetAddrOfExternalRTTIDescriptor(Ty);
  3087. // Emit the standard library with external linkage.
  3088. llvm::GlobalVariable::LinkageTypes Linkage = getTypeInfoLinkage(CGM, Ty);
  3089. // Give the type_info object and name the formal visibility of the
  3090. // type itself.
  3091. llvm::GlobalValue::VisibilityTypes llvmVisibility;
  3092. if (llvm::GlobalValue::isLocalLinkage(Linkage))
  3093. // If the linkage is local, only default visibility makes sense.
  3094. llvmVisibility = llvm::GlobalValue::DefaultVisibility;
  3095. else if (CXXABI.classifyRTTIUniqueness(Ty, Linkage) ==
  3096. ItaniumCXXABI::RUK_NonUniqueHidden)
  3097. llvmVisibility = llvm::GlobalValue::HiddenVisibility;
  3098. else
  3099. llvmVisibility = CodeGenModule::GetLLVMVisibility(Ty->getVisibility());
  3100. llvm::GlobalValue::DLLStorageClassTypes DLLStorageClass =
  3101. llvm::GlobalValue::DefaultStorageClass;
  3102. if (CGM.getTriple().isWindowsItaniumEnvironment()) {
  3103. auto RD = Ty->getAsCXXRecordDecl();
  3104. if (RD && RD->hasAttr<DLLExportAttr>())
  3105. DLLStorageClass = llvm::GlobalValue::DLLExportStorageClass;
  3106. }
  3107. return BuildTypeInfo(Ty, Linkage, llvmVisibility, DLLStorageClass);
  3108. }
  3109. llvm::Constant *ItaniumRTTIBuilder::BuildTypeInfo(
  3110. QualType Ty,
  3111. llvm::GlobalVariable::LinkageTypes Linkage,
  3112. llvm::GlobalValue::VisibilityTypes Visibility,
  3113. llvm::GlobalValue::DLLStorageClassTypes DLLStorageClass) {
  3114. // Add the vtable pointer.
  3115. BuildVTablePointer(cast<Type>(Ty));
  3116. // And the name.
  3117. llvm::GlobalVariable *TypeName = GetAddrOfTypeName(Ty, Linkage);
  3118. llvm::Constant *TypeNameField;
  3119. // If we're supposed to demote the visibility, be sure to set a flag
  3120. // to use a string comparison for type_info comparisons.
  3121. ItaniumCXXABI::RTTIUniquenessKind RTTIUniqueness =
  3122. CXXABI.classifyRTTIUniqueness(Ty, Linkage);
  3123. if (RTTIUniqueness != ItaniumCXXABI::RUK_Unique) {
  3124. // The flag is the sign bit, which on ARM64 is defined to be clear
  3125. // for global pointers. This is very ARM64-specific.
  3126. TypeNameField = llvm::ConstantExpr::getPtrToInt(TypeName, CGM.Int64Ty);
  3127. llvm::Constant *flag =
  3128. llvm::ConstantInt::get(CGM.Int64Ty, ((uint64_t)1) << 63);
  3129. TypeNameField = llvm::ConstantExpr::getAdd(TypeNameField, flag);
  3130. TypeNameField =
  3131. llvm::ConstantExpr::getIntToPtr(TypeNameField, CGM.Int8PtrTy);
  3132. } else {
  3133. TypeNameField = llvm::ConstantExpr::getBitCast(TypeName, CGM.Int8PtrTy);
  3134. }
  3135. Fields.push_back(TypeNameField);
  3136. switch (Ty->getTypeClass()) {
  3137. #define TYPE(Class, Base)
  3138. #define ABSTRACT_TYPE(Class, Base)
  3139. #define NON_CANONICAL_UNLESS_DEPENDENT_TYPE(Class, Base) case Type::Class:
  3140. #define NON_CANONICAL_TYPE(Class, Base) case Type::Class:
  3141. #define DEPENDENT_TYPE(Class, Base) case Type::Class:
  3142. #include "clang/AST/TypeNodes.inc"
  3143. llvm_unreachable("Non-canonical and dependent types shouldn't get here");
  3144. // GCC treats vector types as fundamental types.
  3145. case Type::Builtin:
  3146. case Type::Vector:
  3147. case Type::ExtVector:
  3148. case Type::ConstantMatrix:
  3149. case Type::Complex:
  3150. case Type::BlockPointer:
  3151. // Itanium C++ ABI 2.9.5p4:
  3152. // abi::__fundamental_type_info adds no data members to std::type_info.
  3153. break;
  3154. case Type::LValueReference:
  3155. case Type::RValueReference:
  3156. llvm_unreachable("References shouldn't get here");
  3157. case Type::Auto:
  3158. case Type::DeducedTemplateSpecialization:
  3159. llvm_unreachable("Undeduced type shouldn't get here");
  3160. case Type::Pipe:
  3161. break;
  3162. case Type::BitInt:
  3163. break;
  3164. case Type::ConstantArray:
  3165. case Type::IncompleteArray:
  3166. case Type::VariableArray:
  3167. // Itanium C++ ABI 2.9.5p5:
  3168. // abi::__array_type_info adds no data members to std::type_info.
  3169. break;
  3170. case Type::FunctionNoProto:
  3171. case Type::FunctionProto:
  3172. // Itanium C++ ABI 2.9.5p5:
  3173. // abi::__function_type_info adds no data members to std::type_info.
  3174. break;
  3175. case Type::Enum:
  3176. // Itanium C++ ABI 2.9.5p5:
  3177. // abi::__enum_type_info adds no data members to std::type_info.
  3178. break;
  3179. case Type::Record: {
  3180. const CXXRecordDecl *RD =
  3181. cast<CXXRecordDecl>(cast<RecordType>(Ty)->getDecl());
  3182. if (!RD->hasDefinition() || !RD->getNumBases()) {
  3183. // We don't need to emit any fields.
  3184. break;
  3185. }
  3186. if (CanUseSingleInheritance(RD))
  3187. BuildSIClassTypeInfo(RD);
  3188. else
  3189. BuildVMIClassTypeInfo(RD);
  3190. break;
  3191. }
  3192. case Type::ObjCObject:
  3193. case Type::ObjCInterface:
  3194. BuildObjCObjectTypeInfo(cast<ObjCObjectType>(Ty));
  3195. break;
  3196. case Type::ObjCObjectPointer:
  3197. BuildPointerTypeInfo(cast<ObjCObjectPointerType>(Ty)->getPointeeType());
  3198. break;
  3199. case Type::Pointer:
  3200. BuildPointerTypeInfo(cast<PointerType>(Ty)->getPointeeType());
  3201. break;
  3202. case Type::MemberPointer:
  3203. BuildPointerToMemberTypeInfo(cast<MemberPointerType>(Ty));
  3204. break;
  3205. case Type::Atomic:
  3206. // No fields, at least for the moment.
  3207. break;
  3208. }
  3209. llvm::Constant *Init = llvm::ConstantStruct::getAnon(Fields);
  3210. SmallString<256> Name;
  3211. llvm::raw_svector_ostream Out(Name);
  3212. CGM.getCXXABI().getMangleContext().mangleCXXRTTI(Ty, Out);
  3213. llvm::Module &M = CGM.getModule();
  3214. llvm::GlobalVariable *OldGV = M.getNamedGlobal(Name);
  3215. llvm::GlobalVariable *GV =
  3216. new llvm::GlobalVariable(M, Init->getType(),
  3217. /*isConstant=*/true, Linkage, Init, Name);
  3218. // Export the typeinfo in the same circumstances as the vtable is exported.
  3219. auto GVDLLStorageClass = DLLStorageClass;
  3220. if (CGM.getTarget().hasPS4DLLImportExport()) {
  3221. if (const RecordType *RecordTy = dyn_cast<RecordType>(Ty)) {
  3222. const CXXRecordDecl *RD = cast<CXXRecordDecl>(RecordTy->getDecl());
  3223. if (RD->hasAttr<DLLExportAttr>() ||
  3224. CXXRecordAllNonInlineVirtualsHaveAttr<DLLExportAttr>(RD)) {
  3225. GVDLLStorageClass = llvm::GlobalVariable::DLLExportStorageClass;
  3226. }
  3227. }
  3228. }
  3229. // If there's already an old global variable, replace it with the new one.
  3230. if (OldGV) {
  3231. GV->takeName(OldGV);
  3232. llvm::Constant *NewPtr =
  3233. llvm::ConstantExpr::getBitCast(GV, OldGV->getType());
  3234. OldGV->replaceAllUsesWith(NewPtr);
  3235. OldGV->eraseFromParent();
  3236. }
  3237. if (CGM.supportsCOMDAT() && GV->isWeakForLinker())
  3238. GV->setComdat(M.getOrInsertComdat(GV->getName()));
  3239. CharUnits Align =
  3240. CGM.getContext().toCharUnitsFromBits(CGM.getTarget().getPointerAlign(0));
  3241. GV->setAlignment(Align.getAsAlign());
  3242. // The Itanium ABI specifies that type_info objects must be globally
  3243. // unique, with one exception: if the type is an incomplete class
  3244. // type or a (possibly indirect) pointer to one. That exception
  3245. // affects the general case of comparing type_info objects produced
  3246. // by the typeid operator, which is why the comparison operators on
  3247. // std::type_info generally use the type_info name pointers instead
  3248. // of the object addresses. However, the language's built-in uses
  3249. // of RTTI generally require class types to be complete, even when
  3250. // manipulating pointers to those class types. This allows the
  3251. // implementation of dynamic_cast to rely on address equality tests,
  3252. // which is much faster.
  3253. // All of this is to say that it's important that both the type_info
  3254. // object and the type_info name be uniqued when weakly emitted.
  3255. TypeName->setVisibility(Visibility);
  3256. CGM.setDSOLocal(TypeName);
  3257. GV->setVisibility(Visibility);
  3258. CGM.setDSOLocal(GV);
  3259. TypeName->setDLLStorageClass(DLLStorageClass);
  3260. GV->setDLLStorageClass(CGM.getTarget().hasPS4DLLImportExport()
  3261. ? GVDLLStorageClass
  3262. : DLLStorageClass);
  3263. TypeName->setPartition(CGM.getCodeGenOpts().SymbolPartition);
  3264. GV->setPartition(CGM.getCodeGenOpts().SymbolPartition);
  3265. return llvm::ConstantExpr::getBitCast(GV, CGM.Int8PtrTy);
  3266. }
  3267. /// BuildObjCObjectTypeInfo - Build the appropriate kind of type_info
  3268. /// for the given Objective-C object type.
  3269. void ItaniumRTTIBuilder::BuildObjCObjectTypeInfo(const ObjCObjectType *OT) {
  3270. // Drop qualifiers.
  3271. const Type *T = OT->getBaseType().getTypePtr();
  3272. assert(isa<BuiltinType>(T) || isa<ObjCInterfaceType>(T));
  3273. // The builtin types are abi::__class_type_infos and don't require
  3274. // extra fields.
  3275. if (isa<BuiltinType>(T)) return;
  3276. ObjCInterfaceDecl *Class = cast<ObjCInterfaceType>(T)->getDecl();
  3277. ObjCInterfaceDecl *Super = Class->getSuperClass();
  3278. // Root classes are also __class_type_info.
  3279. if (!Super) return;
  3280. QualType SuperTy = CGM.getContext().getObjCInterfaceType(Super);
  3281. // Everything else is single inheritance.
  3282. llvm::Constant *BaseTypeInfo =
  3283. ItaniumRTTIBuilder(CXXABI).BuildTypeInfo(SuperTy);
  3284. Fields.push_back(BaseTypeInfo);
  3285. }
  3286. /// BuildSIClassTypeInfo - Build an abi::__si_class_type_info, used for single
  3287. /// inheritance, according to the Itanium C++ ABI, 2.95p6b.
  3288. void ItaniumRTTIBuilder::BuildSIClassTypeInfo(const CXXRecordDecl *RD) {
  3289. // Itanium C++ ABI 2.9.5p6b:
  3290. // It adds to abi::__class_type_info a single member pointing to the
  3291. // type_info structure for the base type,
  3292. llvm::Constant *BaseTypeInfo =
  3293. ItaniumRTTIBuilder(CXXABI).BuildTypeInfo(RD->bases_begin()->getType());
  3294. Fields.push_back(BaseTypeInfo);
  3295. }
  3296. namespace {
  3297. /// SeenBases - Contains virtual and non-virtual bases seen when traversing
  3298. /// a class hierarchy.
  3299. struct SeenBases {
  3300. llvm::SmallPtrSet<const CXXRecordDecl *, 16> NonVirtualBases;
  3301. llvm::SmallPtrSet<const CXXRecordDecl *, 16> VirtualBases;
  3302. };
  3303. }
  3304. /// ComputeVMIClassTypeInfoFlags - Compute the value of the flags member in
  3305. /// abi::__vmi_class_type_info.
  3306. ///
  3307. static unsigned ComputeVMIClassTypeInfoFlags(const CXXBaseSpecifier *Base,
  3308. SeenBases &Bases) {
  3309. unsigned Flags = 0;
  3310. auto *BaseDecl =
  3311. cast<CXXRecordDecl>(Base->getType()->castAs<RecordType>()->getDecl());
  3312. if (Base->isVirtual()) {
  3313. // Mark the virtual base as seen.
  3314. if (!Bases.VirtualBases.insert(BaseDecl).second) {
  3315. // If this virtual base has been seen before, then the class is diamond
  3316. // shaped.
  3317. Flags |= ItaniumRTTIBuilder::VMI_DiamondShaped;
  3318. } else {
  3319. if (Bases.NonVirtualBases.count(BaseDecl))
  3320. Flags |= ItaniumRTTIBuilder::VMI_NonDiamondRepeat;
  3321. }
  3322. } else {
  3323. // Mark the non-virtual base as seen.
  3324. if (!Bases.NonVirtualBases.insert(BaseDecl).second) {
  3325. // If this non-virtual base has been seen before, then the class has non-
  3326. // diamond shaped repeated inheritance.
  3327. Flags |= ItaniumRTTIBuilder::VMI_NonDiamondRepeat;
  3328. } else {
  3329. if (Bases.VirtualBases.count(BaseDecl))
  3330. Flags |= ItaniumRTTIBuilder::VMI_NonDiamondRepeat;
  3331. }
  3332. }
  3333. // Walk all bases.
  3334. for (const auto &I : BaseDecl->bases())
  3335. Flags |= ComputeVMIClassTypeInfoFlags(&I, Bases);
  3336. return Flags;
  3337. }
  3338. static unsigned ComputeVMIClassTypeInfoFlags(const CXXRecordDecl *RD) {
  3339. unsigned Flags = 0;
  3340. SeenBases Bases;
  3341. // Walk all bases.
  3342. for (const auto &I : RD->bases())
  3343. Flags |= ComputeVMIClassTypeInfoFlags(&I, Bases);
  3344. return Flags;
  3345. }
  3346. /// BuildVMIClassTypeInfo - Build an abi::__vmi_class_type_info, used for
  3347. /// classes with bases that do not satisfy the abi::__si_class_type_info
  3348. /// constraints, according ti the Itanium C++ ABI, 2.9.5p5c.
  3349. void ItaniumRTTIBuilder::BuildVMIClassTypeInfo(const CXXRecordDecl *RD) {
  3350. llvm::Type *UnsignedIntLTy =
  3351. CGM.getTypes().ConvertType(CGM.getContext().UnsignedIntTy);
  3352. // Itanium C++ ABI 2.9.5p6c:
  3353. // __flags is a word with flags describing details about the class
  3354. // structure, which may be referenced by using the __flags_masks
  3355. // enumeration. These flags refer to both direct and indirect bases.
  3356. unsigned Flags = ComputeVMIClassTypeInfoFlags(RD);
  3357. Fields.push_back(llvm::ConstantInt::get(UnsignedIntLTy, Flags));
  3358. // Itanium C++ ABI 2.9.5p6c:
  3359. // __base_count is a word with the number of direct proper base class
  3360. // descriptions that follow.
  3361. Fields.push_back(llvm::ConstantInt::get(UnsignedIntLTy, RD->getNumBases()));
  3362. if (!RD->getNumBases())
  3363. return;
  3364. // Now add the base class descriptions.
  3365. // Itanium C++ ABI 2.9.5p6c:
  3366. // __base_info[] is an array of base class descriptions -- one for every
  3367. // direct proper base. Each description is of the type:
  3368. //
  3369. // struct abi::__base_class_type_info {
  3370. // public:
  3371. // const __class_type_info *__base_type;
  3372. // long __offset_flags;
  3373. //
  3374. // enum __offset_flags_masks {
  3375. // __virtual_mask = 0x1,
  3376. // __public_mask = 0x2,
  3377. // __offset_shift = 8
  3378. // };
  3379. // };
  3380. // If we're in mingw and 'long' isn't wide enough for a pointer, use 'long
  3381. // long' instead of 'long' for __offset_flags. libstdc++abi uses long long on
  3382. // LLP64 platforms.
  3383. // FIXME: Consider updating libc++abi to match, and extend this logic to all
  3384. // LLP64 platforms.
  3385. QualType OffsetFlagsTy = CGM.getContext().LongTy;
  3386. const TargetInfo &TI = CGM.getContext().getTargetInfo();
  3387. if (TI.getTriple().isOSCygMing() && TI.getPointerWidth(0) > TI.getLongWidth())
  3388. OffsetFlagsTy = CGM.getContext().LongLongTy;
  3389. llvm::Type *OffsetFlagsLTy =
  3390. CGM.getTypes().ConvertType(OffsetFlagsTy);
  3391. for (const auto &Base : RD->bases()) {
  3392. // The __base_type member points to the RTTI for the base type.
  3393. Fields.push_back(ItaniumRTTIBuilder(CXXABI).BuildTypeInfo(Base.getType()));
  3394. auto *BaseDecl =
  3395. cast<CXXRecordDecl>(Base.getType()->castAs<RecordType>()->getDecl());
  3396. int64_t OffsetFlags = 0;
  3397. // All but the lower 8 bits of __offset_flags are a signed offset.
  3398. // For a non-virtual base, this is the offset in the object of the base
  3399. // subobject. For a virtual base, this is the offset in the virtual table of
  3400. // the virtual base offset for the virtual base referenced (negative).
  3401. CharUnits Offset;
  3402. if (Base.isVirtual())
  3403. Offset =
  3404. CGM.getItaniumVTableContext().getVirtualBaseOffsetOffset(RD, BaseDecl);
  3405. else {
  3406. const ASTRecordLayout &Layout = CGM.getContext().getASTRecordLayout(RD);
  3407. Offset = Layout.getBaseClassOffset(BaseDecl);
  3408. };
  3409. OffsetFlags = uint64_t(Offset.getQuantity()) << 8;
  3410. // The low-order byte of __offset_flags contains flags, as given by the
  3411. // masks from the enumeration __offset_flags_masks.
  3412. if (Base.isVirtual())
  3413. OffsetFlags |= BCTI_Virtual;
  3414. if (Base.getAccessSpecifier() == AS_public)
  3415. OffsetFlags |= BCTI_Public;
  3416. Fields.push_back(llvm::ConstantInt::get(OffsetFlagsLTy, OffsetFlags));
  3417. }
  3418. }
  3419. /// Compute the flags for a __pbase_type_info, and remove the corresponding
  3420. /// pieces from \p Type.
  3421. static unsigned extractPBaseFlags(ASTContext &Ctx, QualType &Type) {
  3422. unsigned Flags = 0;
  3423. if (Type.isConstQualified())
  3424. Flags |= ItaniumRTTIBuilder::PTI_Const;
  3425. if (Type.isVolatileQualified())
  3426. Flags |= ItaniumRTTIBuilder::PTI_Volatile;
  3427. if (Type.isRestrictQualified())
  3428. Flags |= ItaniumRTTIBuilder::PTI_Restrict;
  3429. Type = Type.getUnqualifiedType();
  3430. // Itanium C++ ABI 2.9.5p7:
  3431. // When the abi::__pbase_type_info is for a direct or indirect pointer to an
  3432. // incomplete class type, the incomplete target type flag is set.
  3433. if (ContainsIncompleteClassType(Type))
  3434. Flags |= ItaniumRTTIBuilder::PTI_Incomplete;
  3435. if (auto *Proto = Type->getAs<FunctionProtoType>()) {
  3436. if (Proto->isNothrow()) {
  3437. Flags |= ItaniumRTTIBuilder::PTI_Noexcept;
  3438. Type = Ctx.getFunctionTypeWithExceptionSpec(Type, EST_None);
  3439. }
  3440. }
  3441. return Flags;
  3442. }
  3443. /// BuildPointerTypeInfo - Build an abi::__pointer_type_info struct,
  3444. /// used for pointer types.
  3445. void ItaniumRTTIBuilder::BuildPointerTypeInfo(QualType PointeeTy) {
  3446. // Itanium C++ ABI 2.9.5p7:
  3447. // __flags is a flag word describing the cv-qualification and other
  3448. // attributes of the type pointed to
  3449. unsigned Flags = extractPBaseFlags(CGM.getContext(), PointeeTy);
  3450. llvm::Type *UnsignedIntLTy =
  3451. CGM.getTypes().ConvertType(CGM.getContext().UnsignedIntTy);
  3452. Fields.push_back(llvm::ConstantInt::get(UnsignedIntLTy, Flags));
  3453. // Itanium C++ ABI 2.9.5p7:
  3454. // __pointee is a pointer to the std::type_info derivation for the
  3455. // unqualified type being pointed to.
  3456. llvm::Constant *PointeeTypeInfo =
  3457. ItaniumRTTIBuilder(CXXABI).BuildTypeInfo(PointeeTy);
  3458. Fields.push_back(PointeeTypeInfo);
  3459. }
  3460. /// BuildPointerToMemberTypeInfo - Build an abi::__pointer_to_member_type_info
  3461. /// struct, used for member pointer types.
  3462. void
  3463. ItaniumRTTIBuilder::BuildPointerToMemberTypeInfo(const MemberPointerType *Ty) {
  3464. QualType PointeeTy = Ty->getPointeeType();
  3465. // Itanium C++ ABI 2.9.5p7:
  3466. // __flags is a flag word describing the cv-qualification and other
  3467. // attributes of the type pointed to.
  3468. unsigned Flags = extractPBaseFlags(CGM.getContext(), PointeeTy);
  3469. const RecordType *ClassType = cast<RecordType>(Ty->getClass());
  3470. if (IsIncompleteClassType(ClassType))
  3471. Flags |= PTI_ContainingClassIncomplete;
  3472. llvm::Type *UnsignedIntLTy =
  3473. CGM.getTypes().ConvertType(CGM.getContext().UnsignedIntTy);
  3474. Fields.push_back(llvm::ConstantInt::get(UnsignedIntLTy, Flags));
  3475. // Itanium C++ ABI 2.9.5p7:
  3476. // __pointee is a pointer to the std::type_info derivation for the
  3477. // unqualified type being pointed to.
  3478. llvm::Constant *PointeeTypeInfo =
  3479. ItaniumRTTIBuilder(CXXABI).BuildTypeInfo(PointeeTy);
  3480. Fields.push_back(PointeeTypeInfo);
  3481. // Itanium C++ ABI 2.9.5p9:
  3482. // __context is a pointer to an abi::__class_type_info corresponding to the
  3483. // class type containing the member pointed to
  3484. // (e.g., the "A" in "int A::*").
  3485. Fields.push_back(
  3486. ItaniumRTTIBuilder(CXXABI).BuildTypeInfo(QualType(ClassType, 0)));
  3487. }
  3488. llvm::Constant *ItaniumCXXABI::getAddrOfRTTIDescriptor(QualType Ty) {
  3489. return ItaniumRTTIBuilder(*this).BuildTypeInfo(Ty);
  3490. }
  3491. void ItaniumCXXABI::EmitFundamentalRTTIDescriptors(const CXXRecordDecl *RD) {
  3492. // Types added here must also be added to TypeInfoIsInStandardLibrary.
  3493. QualType FundamentalTypes[] = {
  3494. getContext().VoidTy, getContext().NullPtrTy,
  3495. getContext().BoolTy, getContext().WCharTy,
  3496. getContext().CharTy, getContext().UnsignedCharTy,
  3497. getContext().SignedCharTy, getContext().ShortTy,
  3498. getContext().UnsignedShortTy, getContext().IntTy,
  3499. getContext().UnsignedIntTy, getContext().LongTy,
  3500. getContext().UnsignedLongTy, getContext().LongLongTy,
  3501. getContext().UnsignedLongLongTy, getContext().Int128Ty,
  3502. getContext().UnsignedInt128Ty, getContext().HalfTy,
  3503. getContext().FloatTy, getContext().DoubleTy,
  3504. getContext().LongDoubleTy, getContext().Float128Ty,
  3505. getContext().Char8Ty, getContext().Char16Ty,
  3506. getContext().Char32Ty
  3507. };
  3508. llvm::GlobalValue::DLLStorageClassTypes DLLStorageClass =
  3509. RD->hasAttr<DLLExportAttr>()
  3510. ? llvm::GlobalValue::DLLExportStorageClass
  3511. : llvm::GlobalValue::DefaultStorageClass;
  3512. llvm::GlobalValue::VisibilityTypes Visibility =
  3513. CodeGenModule::GetLLVMVisibility(RD->getVisibility());
  3514. for (const QualType &FundamentalType : FundamentalTypes) {
  3515. QualType PointerType = getContext().getPointerType(FundamentalType);
  3516. QualType PointerTypeConst = getContext().getPointerType(
  3517. FundamentalType.withConst());
  3518. for (QualType Type : {FundamentalType, PointerType, PointerTypeConst})
  3519. ItaniumRTTIBuilder(*this).BuildTypeInfo(
  3520. Type, llvm::GlobalValue::ExternalLinkage,
  3521. Visibility, DLLStorageClass);
  3522. }
  3523. }
  3524. /// What sort of uniqueness rules should we use for the RTTI for the
  3525. /// given type?
  3526. ItaniumCXXABI::RTTIUniquenessKind ItaniumCXXABI::classifyRTTIUniqueness(
  3527. QualType CanTy, llvm::GlobalValue::LinkageTypes Linkage) const {
  3528. if (shouldRTTIBeUnique())
  3529. return RUK_Unique;
  3530. // It's only necessary for linkonce_odr or weak_odr linkage.
  3531. if (Linkage != llvm::GlobalValue::LinkOnceODRLinkage &&
  3532. Linkage != llvm::GlobalValue::WeakODRLinkage)
  3533. return RUK_Unique;
  3534. // It's only necessary with default visibility.
  3535. if (CanTy->getVisibility() != DefaultVisibility)
  3536. return RUK_Unique;
  3537. // If we're not required to publish this symbol, hide it.
  3538. if (Linkage == llvm::GlobalValue::LinkOnceODRLinkage)
  3539. return RUK_NonUniqueHidden;
  3540. // If we're required to publish this symbol, as we might be under an
  3541. // explicit instantiation, leave it with default visibility but
  3542. // enable string-comparisons.
  3543. assert(Linkage == llvm::GlobalValue::WeakODRLinkage);
  3544. return RUK_NonUniqueVisible;
  3545. }
  3546. // Find out how to codegen the complete destructor and constructor
  3547. namespace {
  3548. enum class StructorCodegen { Emit, RAUW, Alias, COMDAT };
  3549. }
  3550. static StructorCodegen getCodegenToUse(CodeGenModule &CGM,
  3551. const CXXMethodDecl *MD) {
  3552. if (!CGM.getCodeGenOpts().CXXCtorDtorAliases)
  3553. return StructorCodegen::Emit;
  3554. // The complete and base structors are not equivalent if there are any virtual
  3555. // bases, so emit separate functions.
  3556. if (MD->getParent()->getNumVBases())
  3557. return StructorCodegen::Emit;
  3558. GlobalDecl AliasDecl;
  3559. if (const auto *DD = dyn_cast<CXXDestructorDecl>(MD)) {
  3560. AliasDecl = GlobalDecl(DD, Dtor_Complete);
  3561. } else {
  3562. const auto *CD = cast<CXXConstructorDecl>(MD);
  3563. AliasDecl = GlobalDecl(CD, Ctor_Complete);
  3564. }
  3565. llvm::GlobalValue::LinkageTypes Linkage = CGM.getFunctionLinkage(AliasDecl);
  3566. if (llvm::GlobalValue::isDiscardableIfUnused(Linkage))
  3567. return StructorCodegen::RAUW;
  3568. // FIXME: Should we allow available_externally aliases?
  3569. if (!llvm::GlobalAlias::isValidLinkage(Linkage))
  3570. return StructorCodegen::RAUW;
  3571. if (llvm::GlobalValue::isWeakForLinker(Linkage)) {
  3572. // Only ELF and wasm support COMDATs with arbitrary names (C5/D5).
  3573. if (CGM.getTarget().getTriple().isOSBinFormatELF() ||
  3574. CGM.getTarget().getTriple().isOSBinFormatWasm())
  3575. return StructorCodegen::COMDAT;
  3576. return StructorCodegen::Emit;
  3577. }
  3578. return StructorCodegen::Alias;
  3579. }
  3580. static void emitConstructorDestructorAlias(CodeGenModule &CGM,
  3581. GlobalDecl AliasDecl,
  3582. GlobalDecl TargetDecl) {
  3583. llvm::GlobalValue::LinkageTypes Linkage = CGM.getFunctionLinkage(AliasDecl);
  3584. StringRef MangledName = CGM.getMangledName(AliasDecl);
  3585. llvm::GlobalValue *Entry = CGM.GetGlobalValue(MangledName);
  3586. if (Entry && !Entry->isDeclaration())
  3587. return;
  3588. auto *Aliasee = cast<llvm::GlobalValue>(CGM.GetAddrOfGlobal(TargetDecl));
  3589. // Create the alias with no name.
  3590. auto *Alias = llvm::GlobalAlias::create(Linkage, "", Aliasee);
  3591. // Constructors and destructors are always unnamed_addr.
  3592. Alias->setUnnamedAddr(llvm::GlobalValue::UnnamedAddr::Global);
  3593. // Switch any previous uses to the alias.
  3594. if (Entry) {
  3595. assert(Entry->getType() == Aliasee->getType() &&
  3596. "declaration exists with different type");
  3597. Alias->takeName(Entry);
  3598. Entry->replaceAllUsesWith(Alias);
  3599. Entry->eraseFromParent();
  3600. } else {
  3601. Alias->setName(MangledName);
  3602. }
  3603. // Finally, set up the alias with its proper name and attributes.
  3604. CGM.SetCommonAttributes(AliasDecl, Alias);
  3605. }
  3606. void ItaniumCXXABI::emitCXXStructor(GlobalDecl GD) {
  3607. auto *MD = cast<CXXMethodDecl>(GD.getDecl());
  3608. auto *CD = dyn_cast<CXXConstructorDecl>(MD);
  3609. const CXXDestructorDecl *DD = CD ? nullptr : cast<CXXDestructorDecl>(MD);
  3610. StructorCodegen CGType = getCodegenToUse(CGM, MD);
  3611. if (CD ? GD.getCtorType() == Ctor_Complete
  3612. : GD.getDtorType() == Dtor_Complete) {
  3613. GlobalDecl BaseDecl;
  3614. if (CD)
  3615. BaseDecl = GD.getWithCtorType(Ctor_Base);
  3616. else
  3617. BaseDecl = GD.getWithDtorType(Dtor_Base);
  3618. if (CGType == StructorCodegen::Alias || CGType == StructorCodegen::COMDAT) {
  3619. emitConstructorDestructorAlias(CGM, GD, BaseDecl);
  3620. return;
  3621. }
  3622. if (CGType == StructorCodegen::RAUW) {
  3623. StringRef MangledName = CGM.getMangledName(GD);
  3624. auto *Aliasee = CGM.GetAddrOfGlobal(BaseDecl);
  3625. CGM.addReplacement(MangledName, Aliasee);
  3626. return;
  3627. }
  3628. }
  3629. // The base destructor is equivalent to the base destructor of its
  3630. // base class if there is exactly one non-virtual base class with a
  3631. // non-trivial destructor, there are no fields with a non-trivial
  3632. // destructor, and the body of the destructor is trivial.
  3633. if (DD && GD.getDtorType() == Dtor_Base &&
  3634. CGType != StructorCodegen::COMDAT &&
  3635. !CGM.TryEmitBaseDestructorAsAlias(DD))
  3636. return;
  3637. // FIXME: The deleting destructor is equivalent to the selected operator
  3638. // delete if:
  3639. // * either the delete is a destroying operator delete or the destructor
  3640. // would be trivial if it weren't virtual,
  3641. // * the conversion from the 'this' parameter to the first parameter of the
  3642. // destructor is equivalent to a bitcast,
  3643. // * the destructor does not have an implicit "this" return, and
  3644. // * the operator delete has the same calling convention and IR function type
  3645. // as the destructor.
  3646. // In such cases we should try to emit the deleting dtor as an alias to the
  3647. // selected 'operator delete'.
  3648. llvm::Function *Fn = CGM.codegenCXXStructor(GD);
  3649. if (CGType == StructorCodegen::COMDAT) {
  3650. SmallString<256> Buffer;
  3651. llvm::raw_svector_ostream Out(Buffer);
  3652. if (DD)
  3653. getMangleContext().mangleCXXDtorComdat(DD, Out);
  3654. else
  3655. getMangleContext().mangleCXXCtorComdat(CD, Out);
  3656. llvm::Comdat *C = CGM.getModule().getOrInsertComdat(Out.str());
  3657. Fn->setComdat(C);
  3658. } else {
  3659. CGM.maybeSetTrivialComdat(*MD, *Fn);
  3660. }
  3661. }
  3662. static llvm::FunctionCallee getBeginCatchFn(CodeGenModule &CGM) {
  3663. // void *__cxa_begin_catch(void*);
  3664. llvm::FunctionType *FTy = llvm::FunctionType::get(
  3665. CGM.Int8PtrTy, CGM.Int8PtrTy, /*isVarArg=*/false);
  3666. return CGM.CreateRuntimeFunction(FTy, "__cxa_begin_catch");
  3667. }
  3668. static llvm::FunctionCallee getEndCatchFn(CodeGenModule &CGM) {
  3669. // void __cxa_end_catch();
  3670. llvm::FunctionType *FTy =
  3671. llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
  3672. return CGM.CreateRuntimeFunction(FTy, "__cxa_end_catch");
  3673. }
  3674. static llvm::FunctionCallee getGetExceptionPtrFn(CodeGenModule &CGM) {
  3675. // void *__cxa_get_exception_ptr(void*);
  3676. llvm::FunctionType *FTy = llvm::FunctionType::get(
  3677. CGM.Int8PtrTy, CGM.Int8PtrTy, /*isVarArg=*/false);
  3678. return CGM.CreateRuntimeFunction(FTy, "__cxa_get_exception_ptr");
  3679. }
  3680. namespace {
  3681. /// A cleanup to call __cxa_end_catch. In many cases, the caught
  3682. /// exception type lets us state definitively that the thrown exception
  3683. /// type does not have a destructor. In particular:
  3684. /// - Catch-alls tell us nothing, so we have to conservatively
  3685. /// assume that the thrown exception might have a destructor.
  3686. /// - Catches by reference behave according to their base types.
  3687. /// - Catches of non-record types will only trigger for exceptions
  3688. /// of non-record types, which never have destructors.
  3689. /// - Catches of record types can trigger for arbitrary subclasses
  3690. /// of the caught type, so we have to assume the actual thrown
  3691. /// exception type might have a throwing destructor, even if the
  3692. /// caught type's destructor is trivial or nothrow.
  3693. struct CallEndCatch final : EHScopeStack::Cleanup {
  3694. CallEndCatch(bool MightThrow) : MightThrow(MightThrow) {}
  3695. bool MightThrow;
  3696. void Emit(CodeGenFunction &CGF, Flags flags) override {
  3697. if (!MightThrow) {
  3698. CGF.EmitNounwindRuntimeCall(getEndCatchFn(CGF.CGM));
  3699. return;
  3700. }
  3701. CGF.EmitRuntimeCallOrInvoke(getEndCatchFn(CGF.CGM));
  3702. }
  3703. };
  3704. }
  3705. /// Emits a call to __cxa_begin_catch and enters a cleanup to call
  3706. /// __cxa_end_catch.
  3707. ///
  3708. /// \param EndMightThrow - true if __cxa_end_catch might throw
  3709. static llvm::Value *CallBeginCatch(CodeGenFunction &CGF,
  3710. llvm::Value *Exn,
  3711. bool EndMightThrow) {
  3712. llvm::CallInst *call =
  3713. CGF.EmitNounwindRuntimeCall(getBeginCatchFn(CGF.CGM), Exn);
  3714. CGF.EHStack.pushCleanup<CallEndCatch>(NormalAndEHCleanup, EndMightThrow);
  3715. return call;
  3716. }
  3717. /// A "special initializer" callback for initializing a catch
  3718. /// parameter during catch initialization.
  3719. static void InitCatchParam(CodeGenFunction &CGF,
  3720. const VarDecl &CatchParam,
  3721. Address ParamAddr,
  3722. SourceLocation Loc) {
  3723. // Load the exception from where the landing pad saved it.
  3724. llvm::Value *Exn = CGF.getExceptionFromSlot();
  3725. CanQualType CatchType =
  3726. CGF.CGM.getContext().getCanonicalType(CatchParam.getType());
  3727. llvm::Type *LLVMCatchTy = CGF.ConvertTypeForMem(CatchType);
  3728. // If we're catching by reference, we can just cast the object
  3729. // pointer to the appropriate pointer.
  3730. if (isa<ReferenceType>(CatchType)) {
  3731. QualType CaughtType = cast<ReferenceType>(CatchType)->getPointeeType();
  3732. bool EndCatchMightThrow = CaughtType->isRecordType();
  3733. // __cxa_begin_catch returns the adjusted object pointer.
  3734. llvm::Value *AdjustedExn = CallBeginCatch(CGF, Exn, EndCatchMightThrow);
  3735. // We have no way to tell the personality function that we're
  3736. // catching by reference, so if we're catching a pointer,
  3737. // __cxa_begin_catch will actually return that pointer by value.
  3738. if (const PointerType *PT = dyn_cast<PointerType>(CaughtType)) {
  3739. QualType PointeeType = PT->getPointeeType();
  3740. // When catching by reference, generally we should just ignore
  3741. // this by-value pointer and use the exception object instead.
  3742. if (!PointeeType->isRecordType()) {
  3743. // Exn points to the struct _Unwind_Exception header, which
  3744. // we have to skip past in order to reach the exception data.
  3745. unsigned HeaderSize =
  3746. CGF.CGM.getTargetCodeGenInfo().getSizeOfUnwindException();
  3747. AdjustedExn =
  3748. CGF.Builder.CreateConstGEP1_32(CGF.Int8Ty, Exn, HeaderSize);
  3749. // However, if we're catching a pointer-to-record type that won't
  3750. // work, because the personality function might have adjusted
  3751. // the pointer. There's actually no way for us to fully satisfy
  3752. // the language/ABI contract here: we can't use Exn because it
  3753. // might have the wrong adjustment, but we can't use the by-value
  3754. // pointer because it's off by a level of abstraction.
  3755. //
  3756. // The current solution is to dump the adjusted pointer into an
  3757. // alloca, which breaks language semantics (because changing the
  3758. // pointer doesn't change the exception) but at least works.
  3759. // The better solution would be to filter out non-exact matches
  3760. // and rethrow them, but this is tricky because the rethrow
  3761. // really needs to be catchable by other sites at this landing
  3762. // pad. The best solution is to fix the personality function.
  3763. } else {
  3764. // Pull the pointer for the reference type off.
  3765. llvm::Type *PtrTy = LLVMCatchTy->getPointerElementType();
  3766. // Create the temporary and write the adjusted pointer into it.
  3767. Address ExnPtrTmp =
  3768. CGF.CreateTempAlloca(PtrTy, CGF.getPointerAlign(), "exn.byref.tmp");
  3769. llvm::Value *Casted = CGF.Builder.CreateBitCast(AdjustedExn, PtrTy);
  3770. CGF.Builder.CreateStore(Casted, ExnPtrTmp);
  3771. // Bind the reference to the temporary.
  3772. AdjustedExn = ExnPtrTmp.getPointer();
  3773. }
  3774. }
  3775. llvm::Value *ExnCast =
  3776. CGF.Builder.CreateBitCast(AdjustedExn, LLVMCatchTy, "exn.byref");
  3777. CGF.Builder.CreateStore(ExnCast, ParamAddr);
  3778. return;
  3779. }
  3780. // Scalars and complexes.
  3781. TypeEvaluationKind TEK = CGF.getEvaluationKind(CatchType);
  3782. if (TEK != TEK_Aggregate) {
  3783. llvm::Value *AdjustedExn = CallBeginCatch(CGF, Exn, false);
  3784. // If the catch type is a pointer type, __cxa_begin_catch returns
  3785. // the pointer by value.
  3786. if (CatchType->hasPointerRepresentation()) {
  3787. llvm::Value *CastExn =
  3788. CGF.Builder.CreateBitCast(AdjustedExn, LLVMCatchTy, "exn.casted");
  3789. switch (CatchType.getQualifiers().getObjCLifetime()) {
  3790. case Qualifiers::OCL_Strong:
  3791. CastExn = CGF.EmitARCRetainNonBlock(CastExn);
  3792. LLVM_FALLTHROUGH;
  3793. case Qualifiers::OCL_None:
  3794. case Qualifiers::OCL_ExplicitNone:
  3795. case Qualifiers::OCL_Autoreleasing:
  3796. CGF.Builder.CreateStore(CastExn, ParamAddr);
  3797. return;
  3798. case Qualifiers::OCL_Weak:
  3799. CGF.EmitARCInitWeak(ParamAddr, CastExn);
  3800. return;
  3801. }
  3802. llvm_unreachable("bad ownership qualifier!");
  3803. }
  3804. // Otherwise, it returns a pointer into the exception object.
  3805. llvm::Type *PtrTy = LLVMCatchTy->getPointerTo(0); // addrspace 0 ok
  3806. llvm::Value *Cast = CGF.Builder.CreateBitCast(AdjustedExn, PtrTy);
  3807. LValue srcLV = CGF.MakeNaturalAlignAddrLValue(Cast, CatchType);
  3808. LValue destLV = CGF.MakeAddrLValue(ParamAddr, CatchType);
  3809. switch (TEK) {
  3810. case TEK_Complex:
  3811. CGF.EmitStoreOfComplex(CGF.EmitLoadOfComplex(srcLV, Loc), destLV,
  3812. /*init*/ true);
  3813. return;
  3814. case TEK_Scalar: {
  3815. llvm::Value *ExnLoad = CGF.EmitLoadOfScalar(srcLV, Loc);
  3816. CGF.EmitStoreOfScalar(ExnLoad, destLV, /*init*/ true);
  3817. return;
  3818. }
  3819. case TEK_Aggregate:
  3820. llvm_unreachable("evaluation kind filtered out!");
  3821. }
  3822. llvm_unreachable("bad evaluation kind");
  3823. }
  3824. assert(isa<RecordType>(CatchType) && "unexpected catch type!");
  3825. auto catchRD = CatchType->getAsCXXRecordDecl();
  3826. CharUnits caughtExnAlignment = CGF.CGM.getClassPointerAlignment(catchRD);
  3827. llvm::Type *PtrTy = LLVMCatchTy->getPointerTo(0); // addrspace 0 ok
  3828. // Check for a copy expression. If we don't have a copy expression,
  3829. // that means a trivial copy is okay.
  3830. const Expr *copyExpr = CatchParam.getInit();
  3831. if (!copyExpr) {
  3832. llvm::Value *rawAdjustedExn = CallBeginCatch(CGF, Exn, true);
  3833. Address adjustedExn(CGF.Builder.CreateBitCast(rawAdjustedExn, PtrTy),
  3834. caughtExnAlignment);
  3835. LValue Dest = CGF.MakeAddrLValue(ParamAddr, CatchType);
  3836. LValue Src = CGF.MakeAddrLValue(adjustedExn, CatchType);
  3837. CGF.EmitAggregateCopy(Dest, Src, CatchType, AggValueSlot::DoesNotOverlap);
  3838. return;
  3839. }
  3840. // We have to call __cxa_get_exception_ptr to get the adjusted
  3841. // pointer before copying.
  3842. llvm::CallInst *rawAdjustedExn =
  3843. CGF.EmitNounwindRuntimeCall(getGetExceptionPtrFn(CGF.CGM), Exn);
  3844. // Cast that to the appropriate type.
  3845. Address adjustedExn(CGF.Builder.CreateBitCast(rawAdjustedExn, PtrTy),
  3846. caughtExnAlignment);
  3847. // The copy expression is defined in terms of an OpaqueValueExpr.
  3848. // Find it and map it to the adjusted expression.
  3849. CodeGenFunction::OpaqueValueMapping
  3850. opaque(CGF, OpaqueValueExpr::findInCopyConstruct(copyExpr),
  3851. CGF.MakeAddrLValue(adjustedExn, CatchParam.getType()));
  3852. // Call the copy ctor in a terminate scope.
  3853. CGF.EHStack.pushTerminate();
  3854. // Perform the copy construction.
  3855. CGF.EmitAggExpr(copyExpr,
  3856. AggValueSlot::forAddr(ParamAddr, Qualifiers(),
  3857. AggValueSlot::IsNotDestructed,
  3858. AggValueSlot::DoesNotNeedGCBarriers,
  3859. AggValueSlot::IsNotAliased,
  3860. AggValueSlot::DoesNotOverlap));
  3861. // Leave the terminate scope.
  3862. CGF.EHStack.popTerminate();
  3863. // Undo the opaque value mapping.
  3864. opaque.pop();
  3865. // Finally we can call __cxa_begin_catch.
  3866. CallBeginCatch(CGF, Exn, true);
  3867. }
  3868. /// Begins a catch statement by initializing the catch variable and
  3869. /// calling __cxa_begin_catch.
  3870. void ItaniumCXXABI::emitBeginCatch(CodeGenFunction &CGF,
  3871. const CXXCatchStmt *S) {
  3872. // We have to be very careful with the ordering of cleanups here:
  3873. // C++ [except.throw]p4:
  3874. // The destruction [of the exception temporary] occurs
  3875. // immediately after the destruction of the object declared in
  3876. // the exception-declaration in the handler.
  3877. //
  3878. // So the precise ordering is:
  3879. // 1. Construct catch variable.
  3880. // 2. __cxa_begin_catch
  3881. // 3. Enter __cxa_end_catch cleanup
  3882. // 4. Enter dtor cleanup
  3883. //
  3884. // We do this by using a slightly abnormal initialization process.
  3885. // Delegation sequence:
  3886. // - ExitCXXTryStmt opens a RunCleanupsScope
  3887. // - EmitAutoVarAlloca creates the variable and debug info
  3888. // - InitCatchParam initializes the variable from the exception
  3889. // - CallBeginCatch calls __cxa_begin_catch
  3890. // - CallBeginCatch enters the __cxa_end_catch cleanup
  3891. // - EmitAutoVarCleanups enters the variable destructor cleanup
  3892. // - EmitCXXTryStmt emits the code for the catch body
  3893. // - EmitCXXTryStmt close the RunCleanupsScope
  3894. VarDecl *CatchParam = S->getExceptionDecl();
  3895. if (!CatchParam) {
  3896. llvm::Value *Exn = CGF.getExceptionFromSlot();
  3897. CallBeginCatch(CGF, Exn, true);
  3898. return;
  3899. }
  3900. // Emit the local.
  3901. CodeGenFunction::AutoVarEmission var = CGF.EmitAutoVarAlloca(*CatchParam);
  3902. InitCatchParam(CGF, *CatchParam, var.getObjectAddress(CGF), S->getBeginLoc());
  3903. CGF.EmitAutoVarCleanups(var);
  3904. }
  3905. /// Get or define the following function:
  3906. /// void @__clang_call_terminate(i8* %exn) nounwind noreturn
  3907. /// This code is used only in C++.
  3908. static llvm::FunctionCallee getClangCallTerminateFn(CodeGenModule &CGM) {
  3909. llvm::FunctionType *fnTy =
  3910. llvm::FunctionType::get(CGM.VoidTy, CGM.Int8PtrTy, /*isVarArg=*/false);
  3911. llvm::FunctionCallee fnRef = CGM.CreateRuntimeFunction(
  3912. fnTy, "__clang_call_terminate", llvm::AttributeList(), /*Local=*/true);
  3913. llvm::Function *fn =
  3914. cast<llvm::Function>(fnRef.getCallee()->stripPointerCasts());
  3915. if (fn->empty()) {
  3916. fn->setDoesNotThrow();
  3917. fn->setDoesNotReturn();
  3918. // What we really want is to massively penalize inlining without
  3919. // forbidding it completely. The difference between that and
  3920. // 'noinline' is negligible.
  3921. fn->addFnAttr(llvm::Attribute::NoInline);
  3922. // Allow this function to be shared across translation units, but
  3923. // we don't want it to turn into an exported symbol.
  3924. fn->setLinkage(llvm::Function::LinkOnceODRLinkage);
  3925. fn->setVisibility(llvm::Function::HiddenVisibility);
  3926. if (CGM.supportsCOMDAT())
  3927. fn->setComdat(CGM.getModule().getOrInsertComdat(fn->getName()));
  3928. // Set up the function.
  3929. llvm::BasicBlock *entry =
  3930. llvm::BasicBlock::Create(CGM.getLLVMContext(), "", fn);
  3931. CGBuilderTy builder(CGM, entry);
  3932. // Pull the exception pointer out of the parameter list.
  3933. llvm::Value *exn = &*fn->arg_begin();
  3934. // Call __cxa_begin_catch(exn).
  3935. llvm::CallInst *catchCall = builder.CreateCall(getBeginCatchFn(CGM), exn);
  3936. catchCall->setDoesNotThrow();
  3937. catchCall->setCallingConv(CGM.getRuntimeCC());
  3938. // Call std::terminate().
  3939. llvm::CallInst *termCall = builder.CreateCall(CGM.getTerminateFn());
  3940. termCall->setDoesNotThrow();
  3941. termCall->setDoesNotReturn();
  3942. termCall->setCallingConv(CGM.getRuntimeCC());
  3943. // std::terminate cannot return.
  3944. builder.CreateUnreachable();
  3945. }
  3946. return fnRef;
  3947. }
  3948. llvm::CallInst *
  3949. ItaniumCXXABI::emitTerminateForUnexpectedException(CodeGenFunction &CGF,
  3950. llvm::Value *Exn) {
  3951. // In C++, we want to call __cxa_begin_catch() before terminating.
  3952. if (Exn) {
  3953. assert(CGF.CGM.getLangOpts().CPlusPlus);
  3954. return CGF.EmitNounwindRuntimeCall(getClangCallTerminateFn(CGF.CGM), Exn);
  3955. }
  3956. return CGF.EmitNounwindRuntimeCall(CGF.CGM.getTerminateFn());
  3957. }
  3958. std::pair<llvm::Value *, const CXXRecordDecl *>
  3959. ItaniumCXXABI::LoadVTablePtr(CodeGenFunction &CGF, Address This,
  3960. const CXXRecordDecl *RD) {
  3961. return {CGF.GetVTablePtr(This, CGM.Int8PtrTy, RD), RD};
  3962. }
  3963. void WebAssemblyCXXABI::emitBeginCatch(CodeGenFunction &CGF,
  3964. const CXXCatchStmt *C) {
  3965. if (CGF.getTarget().hasFeature("exception-handling"))
  3966. CGF.EHStack.pushCleanup<CatchRetScope>(
  3967. NormalCleanup, cast<llvm::CatchPadInst>(CGF.CurrentFuncletPad));
  3968. ItaniumCXXABI::emitBeginCatch(CGF, C);
  3969. }
  3970. llvm::CallInst *
  3971. WebAssemblyCXXABI::emitTerminateForUnexpectedException(CodeGenFunction &CGF,
  3972. llvm::Value *Exn) {
  3973. // Itanium ABI calls __clang_call_terminate(), which __cxa_begin_catch() on
  3974. // the violating exception to mark it handled, but it is currently hard to do
  3975. // with wasm EH instruction structure with catch/catch_all, we just call
  3976. // std::terminate and ignore the violating exception as in CGCXXABI.
  3977. // TODO Consider code transformation that makes calling __clang_call_terminate
  3978. // possible.
  3979. return CGCXXABI::emitTerminateForUnexpectedException(CGF, Exn);
  3980. }
  3981. /// Register a global destructor as best as we know how.
  3982. void XLCXXABI::registerGlobalDtor(CodeGenFunction &CGF, const VarDecl &D,
  3983. llvm::FunctionCallee Dtor,
  3984. llvm::Constant *Addr) {
  3985. if (D.getTLSKind() != VarDecl::TLS_None) {
  3986. // atexit routine expects "int(*)(int,...)"
  3987. llvm::FunctionType *FTy =
  3988. llvm::FunctionType::get(CGM.IntTy, CGM.IntTy, true);
  3989. llvm::PointerType *FpTy = FTy->getPointerTo();
  3990. // extern "C" int __pt_atexit_np(int flags, int(*)(int,...), ...);
  3991. llvm::FunctionType *AtExitTy =
  3992. llvm::FunctionType::get(CGM.IntTy, {CGM.IntTy, FpTy}, true);
  3993. // Fetch the actual function.
  3994. llvm::FunctionCallee AtExit =
  3995. CGM.CreateRuntimeFunction(AtExitTy, "__pt_atexit_np");
  3996. // Create __dtor function for the var decl.
  3997. llvm::Function *DtorStub = CGF.createTLSAtExitStub(D, Dtor, Addr, AtExit);
  3998. // Register above __dtor with atexit().
  3999. // First param is flags and must be 0, second param is function ptr
  4000. llvm::Value *NV = llvm::Constant::getNullValue(CGM.IntTy);
  4001. CGF.EmitNounwindRuntimeCall(AtExit, {NV, DtorStub});
  4002. // Cannot unregister TLS __dtor so done
  4003. return;
  4004. }
  4005. // Create __dtor function for the var decl.
  4006. llvm::Function *DtorStub = CGF.createAtExitStub(D, Dtor, Addr);
  4007. // Register above __dtor with atexit().
  4008. CGF.registerGlobalDtorWithAtExit(DtorStub);
  4009. // Emit __finalize function to unregister __dtor and (as appropriate) call
  4010. // __dtor.
  4011. emitCXXStermFinalizer(D, DtorStub, Addr);
  4012. }
  4013. void XLCXXABI::emitCXXStermFinalizer(const VarDecl &D, llvm::Function *dtorStub,
  4014. llvm::Constant *addr) {
  4015. llvm::FunctionType *FTy = llvm::FunctionType::get(CGM.VoidTy, false);
  4016. SmallString<256> FnName;
  4017. {
  4018. llvm::raw_svector_ostream Out(FnName);
  4019. getMangleContext().mangleDynamicStermFinalizer(&D, Out);
  4020. }
  4021. // Create the finalization action associated with a variable.
  4022. const CGFunctionInfo &FI = CGM.getTypes().arrangeNullaryFunction();
  4023. llvm::Function *StermFinalizer = CGM.CreateGlobalInitOrCleanUpFunction(
  4024. FTy, FnName.str(), FI, D.getLocation());
  4025. CodeGenFunction CGF(CGM);
  4026. CGF.StartFunction(GlobalDecl(), CGM.getContext().VoidTy, StermFinalizer, FI,
  4027. FunctionArgList(), D.getLocation(),
  4028. D.getInit()->getExprLoc());
  4029. // The unatexit subroutine unregisters __dtor functions that were previously
  4030. // registered by the atexit subroutine. If the referenced function is found,
  4031. // the unatexit returns a value of 0, meaning that the cleanup is still
  4032. // pending (and we should call the __dtor function).
  4033. llvm::Value *V = CGF.unregisterGlobalDtorWithUnAtExit(dtorStub);
  4034. llvm::Value *NeedsDestruct = CGF.Builder.CreateIsNull(V, "needs_destruct");
  4035. llvm::BasicBlock *DestructCallBlock = CGF.createBasicBlock("destruct.call");
  4036. llvm::BasicBlock *EndBlock = CGF.createBasicBlock("destruct.end");
  4037. // Check if unatexit returns a value of 0. If it does, jump to
  4038. // DestructCallBlock, otherwise jump to EndBlock directly.
  4039. CGF.Builder.CreateCondBr(NeedsDestruct, DestructCallBlock, EndBlock);
  4040. CGF.EmitBlock(DestructCallBlock);
  4041. // Emit the call to dtorStub.
  4042. llvm::CallInst *CI = CGF.Builder.CreateCall(dtorStub);
  4043. // Make sure the call and the callee agree on calling convention.
  4044. CI->setCallingConv(dtorStub->getCallingConv());
  4045. CGF.EmitBlock(EndBlock);
  4046. CGF.FinishFunction();
  4047. if (auto *IPA = D.getAttr<InitPriorityAttr>()) {
  4048. CGM.AddCXXPrioritizedStermFinalizerEntry(StermFinalizer,
  4049. IPA->getPriority());
  4050. } else if (isTemplateInstantiation(D.getTemplateSpecializationKind()) ||
  4051. getContext().GetGVALinkageForVariable(&D) == GVA_DiscardableODR) {
  4052. // According to C++ [basic.start.init]p2, class template static data
  4053. // members (i.e., implicitly or explicitly instantiated specializations)
  4054. // have unordered initialization. As a consequence, we can put them into
  4055. // their own llvm.global_dtors entry.
  4056. CGM.AddCXXStermFinalizerToGlobalDtor(StermFinalizer, 65535);
  4057. } else {
  4058. CGM.AddCXXStermFinalizerEntry(StermFinalizer);
  4059. }
  4060. }