CGClass.cpp 114 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969
  1. //===--- CGClass.cpp - Emit LLVM Code for C++ classes -----------*- C++ -*-===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This contains code dealing with C++ code generation of classes
  10. //
  11. //===----------------------------------------------------------------------===//
  12. #include "CGBlocks.h"
  13. #include "CGCXXABI.h"
  14. #include "CGDebugInfo.h"
  15. #include "CGRecordLayout.h"
  16. #include "CodeGenFunction.h"
  17. #include "TargetInfo.h"
  18. #include "clang/AST/Attr.h"
  19. #include "clang/AST/CXXInheritance.h"
  20. #include "clang/AST/CharUnits.h"
  21. #include "clang/AST/DeclTemplate.h"
  22. #include "clang/AST/EvaluatedExprVisitor.h"
  23. #include "clang/AST/RecordLayout.h"
  24. #include "clang/AST/StmtCXX.h"
  25. #include "clang/Basic/CodeGenOptions.h"
  26. #include "clang/Basic/TargetBuiltins.h"
  27. #include "clang/CodeGen/CGFunctionInfo.h"
  28. #include "llvm/IR/Intrinsics.h"
  29. #include "llvm/IR/Metadata.h"
  30. #include "llvm/Transforms/Utils/SanitizerStats.h"
  31. using namespace clang;
  32. using namespace CodeGen;
  33. /// Return the best known alignment for an unknown pointer to a
  34. /// particular class.
  35. CharUnits CodeGenModule::getClassPointerAlignment(const CXXRecordDecl *RD) {
  36. if (!RD->hasDefinition())
  37. return CharUnits::One(); // Hopefully won't be used anywhere.
  38. auto &layout = getContext().getASTRecordLayout(RD);
  39. // If the class is final, then we know that the pointer points to an
  40. // object of that type and can use the full alignment.
  41. if (RD->isEffectivelyFinal())
  42. return layout.getAlignment();
  43. // Otherwise, we have to assume it could be a subclass.
  44. return layout.getNonVirtualAlignment();
  45. }
  46. /// Return the smallest possible amount of storage that might be allocated
  47. /// starting from the beginning of an object of a particular class.
  48. ///
  49. /// This may be smaller than sizeof(RD) if RD has virtual base classes.
  50. CharUnits CodeGenModule::getMinimumClassObjectSize(const CXXRecordDecl *RD) {
  51. if (!RD->hasDefinition())
  52. return CharUnits::One();
  53. auto &layout = getContext().getASTRecordLayout(RD);
  54. // If the class is final, then we know that the pointer points to an
  55. // object of that type and can use the full alignment.
  56. if (RD->isEffectivelyFinal())
  57. return layout.getSize();
  58. // Otherwise, we have to assume it could be a subclass.
  59. return std::max(layout.getNonVirtualSize(), CharUnits::One());
  60. }
  61. /// Return the best known alignment for a pointer to a virtual base,
  62. /// given the alignment of a pointer to the derived class.
  63. CharUnits CodeGenModule::getVBaseAlignment(CharUnits actualDerivedAlign,
  64. const CXXRecordDecl *derivedClass,
  65. const CXXRecordDecl *vbaseClass) {
  66. // The basic idea here is that an underaligned derived pointer might
  67. // indicate an underaligned base pointer.
  68. assert(vbaseClass->isCompleteDefinition());
  69. auto &baseLayout = getContext().getASTRecordLayout(vbaseClass);
  70. CharUnits expectedVBaseAlign = baseLayout.getNonVirtualAlignment();
  71. return getDynamicOffsetAlignment(actualDerivedAlign, derivedClass,
  72. expectedVBaseAlign);
  73. }
  74. CharUnits
  75. CodeGenModule::getDynamicOffsetAlignment(CharUnits actualBaseAlign,
  76. const CXXRecordDecl *baseDecl,
  77. CharUnits expectedTargetAlign) {
  78. // If the base is an incomplete type (which is, alas, possible with
  79. // member pointers), be pessimistic.
  80. if (!baseDecl->isCompleteDefinition())
  81. return std::min(actualBaseAlign, expectedTargetAlign);
  82. auto &baseLayout = getContext().getASTRecordLayout(baseDecl);
  83. CharUnits expectedBaseAlign = baseLayout.getNonVirtualAlignment();
  84. // If the class is properly aligned, assume the target offset is, too.
  85. //
  86. // This actually isn't necessarily the right thing to do --- if the
  87. // class is a complete object, but it's only properly aligned for a
  88. // base subobject, then the alignments of things relative to it are
  89. // probably off as well. (Note that this requires the alignment of
  90. // the target to be greater than the NV alignment of the derived
  91. // class.)
  92. //
  93. // However, our approach to this kind of under-alignment can only
  94. // ever be best effort; after all, we're never going to propagate
  95. // alignments through variables or parameters. Note, in particular,
  96. // that constructing a polymorphic type in an address that's less
  97. // than pointer-aligned will generally trap in the constructor,
  98. // unless we someday add some sort of attribute to change the
  99. // assumed alignment of 'this'. So our goal here is pretty much
  100. // just to allow the user to explicitly say that a pointer is
  101. // under-aligned and then safely access its fields and vtables.
  102. if (actualBaseAlign >= expectedBaseAlign) {
  103. return expectedTargetAlign;
  104. }
  105. // Otherwise, we might be offset by an arbitrary multiple of the
  106. // actual alignment. The correct adjustment is to take the min of
  107. // the two alignments.
  108. return std::min(actualBaseAlign, expectedTargetAlign);
  109. }
  110. Address CodeGenFunction::LoadCXXThisAddress() {
  111. assert(CurFuncDecl && "loading 'this' without a func declaration?");
  112. auto *MD = cast<CXXMethodDecl>(CurFuncDecl);
  113. // Lazily compute CXXThisAlignment.
  114. if (CXXThisAlignment.isZero()) {
  115. // Just use the best known alignment for the parent.
  116. // TODO: if we're currently emitting a complete-object ctor/dtor,
  117. // we can always use the complete-object alignment.
  118. CXXThisAlignment = CGM.getClassPointerAlignment(MD->getParent());
  119. }
  120. llvm::Type *Ty = ConvertType(MD->getThisType()->getPointeeType());
  121. return Address(LoadCXXThis(), Ty, CXXThisAlignment);
  122. }
  123. /// Emit the address of a field using a member data pointer.
  124. ///
  125. /// \param E Only used for emergency diagnostics
  126. Address
  127. CodeGenFunction::EmitCXXMemberDataPointerAddress(const Expr *E, Address base,
  128. llvm::Value *memberPtr,
  129. const MemberPointerType *memberPtrType,
  130. LValueBaseInfo *BaseInfo,
  131. TBAAAccessInfo *TBAAInfo) {
  132. // Ask the ABI to compute the actual address.
  133. llvm::Value *ptr =
  134. CGM.getCXXABI().EmitMemberDataPointerAddress(*this, E, base,
  135. memberPtr, memberPtrType);
  136. QualType memberType = memberPtrType->getPointeeType();
  137. CharUnits memberAlign =
  138. CGM.getNaturalTypeAlignment(memberType, BaseInfo, TBAAInfo);
  139. memberAlign =
  140. CGM.getDynamicOffsetAlignment(base.getAlignment(),
  141. memberPtrType->getClass()->getAsCXXRecordDecl(),
  142. memberAlign);
  143. return Address(ptr, ConvertTypeForMem(memberPtrType->getPointeeType()),
  144. memberAlign);
  145. }
  146. CharUnits CodeGenModule::computeNonVirtualBaseClassOffset(
  147. const CXXRecordDecl *DerivedClass, CastExpr::path_const_iterator Start,
  148. CastExpr::path_const_iterator End) {
  149. CharUnits Offset = CharUnits::Zero();
  150. const ASTContext &Context = getContext();
  151. const CXXRecordDecl *RD = DerivedClass;
  152. for (CastExpr::path_const_iterator I = Start; I != End; ++I) {
  153. const CXXBaseSpecifier *Base = *I;
  154. assert(!Base->isVirtual() && "Should not see virtual bases here!");
  155. // Get the layout.
  156. const ASTRecordLayout &Layout = Context.getASTRecordLayout(RD);
  157. const auto *BaseDecl =
  158. cast<CXXRecordDecl>(Base->getType()->castAs<RecordType>()->getDecl());
  159. // Add the offset.
  160. Offset += Layout.getBaseClassOffset(BaseDecl);
  161. RD = BaseDecl;
  162. }
  163. return Offset;
  164. }
  165. llvm::Constant *
  166. CodeGenModule::GetNonVirtualBaseClassOffset(const CXXRecordDecl *ClassDecl,
  167. CastExpr::path_const_iterator PathBegin,
  168. CastExpr::path_const_iterator PathEnd) {
  169. assert(PathBegin != PathEnd && "Base path should not be empty!");
  170. CharUnits Offset =
  171. computeNonVirtualBaseClassOffset(ClassDecl, PathBegin, PathEnd);
  172. if (Offset.isZero())
  173. return nullptr;
  174. llvm::Type *PtrDiffTy =
  175. Types.ConvertType(getContext().getPointerDiffType());
  176. return llvm::ConstantInt::get(PtrDiffTy, Offset.getQuantity());
  177. }
  178. /// Gets the address of a direct base class within a complete object.
  179. /// This should only be used for (1) non-virtual bases or (2) virtual bases
  180. /// when the type is known to be complete (e.g. in complete destructors).
  181. ///
  182. /// The object pointed to by 'This' is assumed to be non-null.
  183. Address
  184. CodeGenFunction::GetAddressOfDirectBaseInCompleteClass(Address This,
  185. const CXXRecordDecl *Derived,
  186. const CXXRecordDecl *Base,
  187. bool BaseIsVirtual) {
  188. // 'this' must be a pointer (in some address space) to Derived.
  189. assert(This.getElementType() == ConvertType(Derived));
  190. // Compute the offset of the virtual base.
  191. CharUnits Offset;
  192. const ASTRecordLayout &Layout = getContext().getASTRecordLayout(Derived);
  193. if (BaseIsVirtual)
  194. Offset = Layout.getVBaseClassOffset(Base);
  195. else
  196. Offset = Layout.getBaseClassOffset(Base);
  197. // Shift and cast down to the base type.
  198. // TODO: for complete types, this should be possible with a GEP.
  199. Address V = This;
  200. if (!Offset.isZero()) {
  201. V = Builder.CreateElementBitCast(V, Int8Ty);
  202. V = Builder.CreateConstInBoundsByteGEP(V, Offset);
  203. }
  204. V = Builder.CreateElementBitCast(V, ConvertType(Base));
  205. return V;
  206. }
  207. static Address
  208. ApplyNonVirtualAndVirtualOffset(CodeGenFunction &CGF, Address addr,
  209. CharUnits nonVirtualOffset,
  210. llvm::Value *virtualOffset,
  211. const CXXRecordDecl *derivedClass,
  212. const CXXRecordDecl *nearestVBase) {
  213. // Assert that we have something to do.
  214. assert(!nonVirtualOffset.isZero() || virtualOffset != nullptr);
  215. // Compute the offset from the static and dynamic components.
  216. llvm::Value *baseOffset;
  217. if (!nonVirtualOffset.isZero()) {
  218. llvm::Type *OffsetType =
  219. (CGF.CGM.getTarget().getCXXABI().isItaniumFamily() &&
  220. CGF.CGM.getItaniumVTableContext().isRelativeLayout())
  221. ? CGF.Int32Ty
  222. : CGF.PtrDiffTy;
  223. baseOffset =
  224. llvm::ConstantInt::get(OffsetType, nonVirtualOffset.getQuantity());
  225. if (virtualOffset) {
  226. baseOffset = CGF.Builder.CreateAdd(virtualOffset, baseOffset);
  227. }
  228. } else {
  229. baseOffset = virtualOffset;
  230. }
  231. // Apply the base offset.
  232. llvm::Value *ptr = addr.getPointer();
  233. unsigned AddrSpace = ptr->getType()->getPointerAddressSpace();
  234. ptr = CGF.Builder.CreateBitCast(ptr, CGF.Int8Ty->getPointerTo(AddrSpace));
  235. ptr = CGF.Builder.CreateInBoundsGEP(CGF.Int8Ty, ptr, baseOffset, "add.ptr");
  236. // If we have a virtual component, the alignment of the result will
  237. // be relative only to the known alignment of that vbase.
  238. CharUnits alignment;
  239. if (virtualOffset) {
  240. assert(nearestVBase && "virtual offset without vbase?");
  241. alignment = CGF.CGM.getVBaseAlignment(addr.getAlignment(),
  242. derivedClass, nearestVBase);
  243. } else {
  244. alignment = addr.getAlignment();
  245. }
  246. alignment = alignment.alignmentAtOffset(nonVirtualOffset);
  247. return Address(ptr, CGF.Int8Ty, alignment);
  248. }
  249. Address CodeGenFunction::GetAddressOfBaseClass(
  250. Address Value, const CXXRecordDecl *Derived,
  251. CastExpr::path_const_iterator PathBegin,
  252. CastExpr::path_const_iterator PathEnd, bool NullCheckValue,
  253. SourceLocation Loc) {
  254. assert(PathBegin != PathEnd && "Base path should not be empty!");
  255. CastExpr::path_const_iterator Start = PathBegin;
  256. const CXXRecordDecl *VBase = nullptr;
  257. // Sema has done some convenient canonicalization here: if the
  258. // access path involved any virtual steps, the conversion path will
  259. // *start* with a step down to the correct virtual base subobject,
  260. // and hence will not require any further steps.
  261. if ((*Start)->isVirtual()) {
  262. VBase = cast<CXXRecordDecl>(
  263. (*Start)->getType()->castAs<RecordType>()->getDecl());
  264. ++Start;
  265. }
  266. // Compute the static offset of the ultimate destination within its
  267. // allocating subobject (the virtual base, if there is one, or else
  268. // the "complete" object that we see).
  269. CharUnits NonVirtualOffset = CGM.computeNonVirtualBaseClassOffset(
  270. VBase ? VBase : Derived, Start, PathEnd);
  271. // If there's a virtual step, we can sometimes "devirtualize" it.
  272. // For now, that's limited to when the derived type is final.
  273. // TODO: "devirtualize" this for accesses to known-complete objects.
  274. if (VBase && Derived->hasAttr<FinalAttr>()) {
  275. const ASTRecordLayout &layout = getContext().getASTRecordLayout(Derived);
  276. CharUnits vBaseOffset = layout.getVBaseClassOffset(VBase);
  277. NonVirtualOffset += vBaseOffset;
  278. VBase = nullptr; // we no longer have a virtual step
  279. }
  280. // Get the base pointer type.
  281. llvm::Type *BaseValueTy = ConvertType((PathEnd[-1])->getType());
  282. llvm::Type *BasePtrTy =
  283. BaseValueTy->getPointerTo(Value.getType()->getPointerAddressSpace());
  284. QualType DerivedTy = getContext().getRecordType(Derived);
  285. CharUnits DerivedAlign = CGM.getClassPointerAlignment(Derived);
  286. // If the static offset is zero and we don't have a virtual step,
  287. // just do a bitcast; null checks are unnecessary.
  288. if (NonVirtualOffset.isZero() && !VBase) {
  289. if (sanitizePerformTypeCheck()) {
  290. SanitizerSet SkippedChecks;
  291. SkippedChecks.set(SanitizerKind::Null, !NullCheckValue);
  292. EmitTypeCheck(TCK_Upcast, Loc, Value.getPointer(),
  293. DerivedTy, DerivedAlign, SkippedChecks);
  294. }
  295. return Builder.CreateElementBitCast(Value, BaseValueTy);
  296. }
  297. llvm::BasicBlock *origBB = nullptr;
  298. llvm::BasicBlock *endBB = nullptr;
  299. // Skip over the offset (and the vtable load) if we're supposed to
  300. // null-check the pointer.
  301. if (NullCheckValue) {
  302. origBB = Builder.GetInsertBlock();
  303. llvm::BasicBlock *notNullBB = createBasicBlock("cast.notnull");
  304. endBB = createBasicBlock("cast.end");
  305. llvm::Value *isNull = Builder.CreateIsNull(Value.getPointer());
  306. Builder.CreateCondBr(isNull, endBB, notNullBB);
  307. EmitBlock(notNullBB);
  308. }
  309. if (sanitizePerformTypeCheck()) {
  310. SanitizerSet SkippedChecks;
  311. SkippedChecks.set(SanitizerKind::Null, true);
  312. EmitTypeCheck(VBase ? TCK_UpcastToVirtualBase : TCK_Upcast, Loc,
  313. Value.getPointer(), DerivedTy, DerivedAlign, SkippedChecks);
  314. }
  315. // Compute the virtual offset.
  316. llvm::Value *VirtualOffset = nullptr;
  317. if (VBase) {
  318. VirtualOffset =
  319. CGM.getCXXABI().GetVirtualBaseClassOffset(*this, Value, Derived, VBase);
  320. }
  321. // Apply both offsets.
  322. Value = ApplyNonVirtualAndVirtualOffset(*this, Value, NonVirtualOffset,
  323. VirtualOffset, Derived, VBase);
  324. // Cast to the destination type.
  325. Value = Builder.CreateElementBitCast(Value, BaseValueTy);
  326. // Build a phi if we needed a null check.
  327. if (NullCheckValue) {
  328. llvm::BasicBlock *notNullBB = Builder.GetInsertBlock();
  329. Builder.CreateBr(endBB);
  330. EmitBlock(endBB);
  331. llvm::PHINode *PHI = Builder.CreatePHI(BasePtrTy, 2, "cast.result");
  332. PHI->addIncoming(Value.getPointer(), notNullBB);
  333. PHI->addIncoming(llvm::Constant::getNullValue(BasePtrTy), origBB);
  334. Value = Value.withPointer(PHI);
  335. }
  336. return Value;
  337. }
  338. Address
  339. CodeGenFunction::GetAddressOfDerivedClass(Address BaseAddr,
  340. const CXXRecordDecl *Derived,
  341. CastExpr::path_const_iterator PathBegin,
  342. CastExpr::path_const_iterator PathEnd,
  343. bool NullCheckValue) {
  344. assert(PathBegin != PathEnd && "Base path should not be empty!");
  345. QualType DerivedTy =
  346. getContext().getCanonicalType(getContext().getTagDeclType(Derived));
  347. unsigned AddrSpace = BaseAddr.getAddressSpace();
  348. llvm::Type *DerivedValueTy = ConvertType(DerivedTy);
  349. llvm::Type *DerivedPtrTy = DerivedValueTy->getPointerTo(AddrSpace);
  350. llvm::Value *NonVirtualOffset =
  351. CGM.GetNonVirtualBaseClassOffset(Derived, PathBegin, PathEnd);
  352. if (!NonVirtualOffset) {
  353. // No offset, we can just cast back.
  354. return Builder.CreateElementBitCast(BaseAddr, DerivedValueTy);
  355. }
  356. llvm::BasicBlock *CastNull = nullptr;
  357. llvm::BasicBlock *CastNotNull = nullptr;
  358. llvm::BasicBlock *CastEnd = nullptr;
  359. if (NullCheckValue) {
  360. CastNull = createBasicBlock("cast.null");
  361. CastNotNull = createBasicBlock("cast.notnull");
  362. CastEnd = createBasicBlock("cast.end");
  363. llvm::Value *IsNull = Builder.CreateIsNull(BaseAddr.getPointer());
  364. Builder.CreateCondBr(IsNull, CastNull, CastNotNull);
  365. EmitBlock(CastNotNull);
  366. }
  367. // Apply the offset.
  368. llvm::Value *Value = Builder.CreateBitCast(BaseAddr.getPointer(), Int8PtrTy);
  369. Value = Builder.CreateInBoundsGEP(
  370. Int8Ty, Value, Builder.CreateNeg(NonVirtualOffset), "sub.ptr");
  371. // Just cast.
  372. Value = Builder.CreateBitCast(Value, DerivedPtrTy);
  373. // Produce a PHI if we had a null-check.
  374. if (NullCheckValue) {
  375. Builder.CreateBr(CastEnd);
  376. EmitBlock(CastNull);
  377. Builder.CreateBr(CastEnd);
  378. EmitBlock(CastEnd);
  379. llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2);
  380. PHI->addIncoming(Value, CastNotNull);
  381. PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), CastNull);
  382. Value = PHI;
  383. }
  384. return Address(Value, DerivedValueTy, CGM.getClassPointerAlignment(Derived));
  385. }
  386. llvm::Value *CodeGenFunction::GetVTTParameter(GlobalDecl GD,
  387. bool ForVirtualBase,
  388. bool Delegating) {
  389. if (!CGM.getCXXABI().NeedsVTTParameter(GD)) {
  390. // This constructor/destructor does not need a VTT parameter.
  391. return nullptr;
  392. }
  393. const CXXRecordDecl *RD = cast<CXXMethodDecl>(CurCodeDecl)->getParent();
  394. const CXXRecordDecl *Base = cast<CXXMethodDecl>(GD.getDecl())->getParent();
  395. uint64_t SubVTTIndex;
  396. if (Delegating) {
  397. // If this is a delegating constructor call, just load the VTT.
  398. return LoadCXXVTT();
  399. } else if (RD == Base) {
  400. // If the record matches the base, this is the complete ctor/dtor
  401. // variant calling the base variant in a class with virtual bases.
  402. assert(!CGM.getCXXABI().NeedsVTTParameter(CurGD) &&
  403. "doing no-op VTT offset in base dtor/ctor?");
  404. assert(!ForVirtualBase && "Can't have same class as virtual base!");
  405. SubVTTIndex = 0;
  406. } else {
  407. const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD);
  408. CharUnits BaseOffset = ForVirtualBase ?
  409. Layout.getVBaseClassOffset(Base) :
  410. Layout.getBaseClassOffset(Base);
  411. SubVTTIndex =
  412. CGM.getVTables().getSubVTTIndex(RD, BaseSubobject(Base, BaseOffset));
  413. assert(SubVTTIndex != 0 && "Sub-VTT index must be greater than zero!");
  414. }
  415. if (CGM.getCXXABI().NeedsVTTParameter(CurGD)) {
  416. // A VTT parameter was passed to the constructor, use it.
  417. llvm::Value *VTT = LoadCXXVTT();
  418. return Builder.CreateConstInBoundsGEP1_64(VoidPtrTy, VTT, SubVTTIndex);
  419. } else {
  420. // We're the complete constructor, so get the VTT by name.
  421. llvm::GlobalValue *VTT = CGM.getVTables().GetAddrOfVTT(RD);
  422. return Builder.CreateConstInBoundsGEP2_64(
  423. VTT->getValueType(), VTT, 0, SubVTTIndex);
  424. }
  425. }
  426. namespace {
  427. /// Call the destructor for a direct base class.
  428. struct CallBaseDtor final : EHScopeStack::Cleanup {
  429. const CXXRecordDecl *BaseClass;
  430. bool BaseIsVirtual;
  431. CallBaseDtor(const CXXRecordDecl *Base, bool BaseIsVirtual)
  432. : BaseClass(Base), BaseIsVirtual(BaseIsVirtual) {}
  433. void Emit(CodeGenFunction &CGF, Flags flags) override {
  434. const CXXRecordDecl *DerivedClass =
  435. cast<CXXMethodDecl>(CGF.CurCodeDecl)->getParent();
  436. const CXXDestructorDecl *D = BaseClass->getDestructor();
  437. // We are already inside a destructor, so presumably the object being
  438. // destroyed should have the expected type.
  439. QualType ThisTy = D->getThisObjectType();
  440. Address Addr =
  441. CGF.GetAddressOfDirectBaseInCompleteClass(CGF.LoadCXXThisAddress(),
  442. DerivedClass, BaseClass,
  443. BaseIsVirtual);
  444. CGF.EmitCXXDestructorCall(D, Dtor_Base, BaseIsVirtual,
  445. /*Delegating=*/false, Addr, ThisTy);
  446. }
  447. };
  448. /// A visitor which checks whether an initializer uses 'this' in a
  449. /// way which requires the vtable to be properly set.
  450. struct DynamicThisUseChecker : ConstEvaluatedExprVisitor<DynamicThisUseChecker> {
  451. typedef ConstEvaluatedExprVisitor<DynamicThisUseChecker> super;
  452. bool UsesThis;
  453. DynamicThisUseChecker(const ASTContext &C) : super(C), UsesThis(false) {}
  454. // Black-list all explicit and implicit references to 'this'.
  455. //
  456. // Do we need to worry about external references to 'this' derived
  457. // from arbitrary code? If so, then anything which runs arbitrary
  458. // external code might potentially access the vtable.
  459. void VisitCXXThisExpr(const CXXThisExpr *E) { UsesThis = true; }
  460. };
  461. } // end anonymous namespace
  462. static bool BaseInitializerUsesThis(ASTContext &C, const Expr *Init) {
  463. DynamicThisUseChecker Checker(C);
  464. Checker.Visit(Init);
  465. return Checker.UsesThis;
  466. }
  467. static void EmitBaseInitializer(CodeGenFunction &CGF,
  468. const CXXRecordDecl *ClassDecl,
  469. CXXCtorInitializer *BaseInit) {
  470. assert(BaseInit->isBaseInitializer() &&
  471. "Must have base initializer!");
  472. Address ThisPtr = CGF.LoadCXXThisAddress();
  473. const Type *BaseType = BaseInit->getBaseClass();
  474. const auto *BaseClassDecl =
  475. cast<CXXRecordDecl>(BaseType->castAs<RecordType>()->getDecl());
  476. bool isBaseVirtual = BaseInit->isBaseVirtual();
  477. // If the initializer for the base (other than the constructor
  478. // itself) accesses 'this' in any way, we need to initialize the
  479. // vtables.
  480. if (BaseInitializerUsesThis(CGF.getContext(), BaseInit->getInit()))
  481. CGF.InitializeVTablePointers(ClassDecl);
  482. // We can pretend to be a complete class because it only matters for
  483. // virtual bases, and we only do virtual bases for complete ctors.
  484. Address V =
  485. CGF.GetAddressOfDirectBaseInCompleteClass(ThisPtr, ClassDecl,
  486. BaseClassDecl,
  487. isBaseVirtual);
  488. AggValueSlot AggSlot =
  489. AggValueSlot::forAddr(
  490. V, Qualifiers(),
  491. AggValueSlot::IsDestructed,
  492. AggValueSlot::DoesNotNeedGCBarriers,
  493. AggValueSlot::IsNotAliased,
  494. CGF.getOverlapForBaseInit(ClassDecl, BaseClassDecl, isBaseVirtual));
  495. CGF.EmitAggExpr(BaseInit->getInit(), AggSlot);
  496. if (CGF.CGM.getLangOpts().Exceptions &&
  497. !BaseClassDecl->hasTrivialDestructor())
  498. CGF.EHStack.pushCleanup<CallBaseDtor>(EHCleanup, BaseClassDecl,
  499. isBaseVirtual);
  500. }
  501. static bool isMemcpyEquivalentSpecialMember(const CXXMethodDecl *D) {
  502. auto *CD = dyn_cast<CXXConstructorDecl>(D);
  503. if (!(CD && CD->isCopyOrMoveConstructor()) &&
  504. !D->isCopyAssignmentOperator() && !D->isMoveAssignmentOperator())
  505. return false;
  506. // We can emit a memcpy for a trivial copy or move constructor/assignment.
  507. if (D->isTrivial() && !D->getParent()->mayInsertExtraPadding())
  508. return true;
  509. // We *must* emit a memcpy for a defaulted union copy or move op.
  510. if (D->getParent()->isUnion() && D->isDefaulted())
  511. return true;
  512. return false;
  513. }
  514. static void EmitLValueForAnyFieldInitialization(CodeGenFunction &CGF,
  515. CXXCtorInitializer *MemberInit,
  516. LValue &LHS) {
  517. FieldDecl *Field = MemberInit->getAnyMember();
  518. if (MemberInit->isIndirectMemberInitializer()) {
  519. // If we are initializing an anonymous union field, drill down to the field.
  520. IndirectFieldDecl *IndirectField = MemberInit->getIndirectMember();
  521. for (const auto *I : IndirectField->chain())
  522. LHS = CGF.EmitLValueForFieldInitialization(LHS, cast<FieldDecl>(I));
  523. } else {
  524. LHS = CGF.EmitLValueForFieldInitialization(LHS, Field);
  525. }
  526. }
  527. static void EmitMemberInitializer(CodeGenFunction &CGF,
  528. const CXXRecordDecl *ClassDecl,
  529. CXXCtorInitializer *MemberInit,
  530. const CXXConstructorDecl *Constructor,
  531. FunctionArgList &Args) {
  532. ApplyDebugLocation Loc(CGF, MemberInit->getSourceLocation());
  533. assert(MemberInit->isAnyMemberInitializer() &&
  534. "Must have member initializer!");
  535. assert(MemberInit->getInit() && "Must have initializer!");
  536. // non-static data member initializers.
  537. FieldDecl *Field = MemberInit->getAnyMember();
  538. QualType FieldType = Field->getType();
  539. llvm::Value *ThisPtr = CGF.LoadCXXThis();
  540. QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl);
  541. LValue LHS;
  542. // If a base constructor is being emitted, create an LValue that has the
  543. // non-virtual alignment.
  544. if (CGF.CurGD.getCtorType() == Ctor_Base)
  545. LHS = CGF.MakeNaturalAlignPointeeAddrLValue(ThisPtr, RecordTy);
  546. else
  547. LHS = CGF.MakeNaturalAlignAddrLValue(ThisPtr, RecordTy);
  548. EmitLValueForAnyFieldInitialization(CGF, MemberInit, LHS);
  549. // Special case: if we are in a copy or move constructor, and we are copying
  550. // an array of PODs or classes with trivial copy constructors, ignore the
  551. // AST and perform the copy we know is equivalent.
  552. // FIXME: This is hacky at best... if we had a bit more explicit information
  553. // in the AST, we could generalize it more easily.
  554. const ConstantArrayType *Array
  555. = CGF.getContext().getAsConstantArrayType(FieldType);
  556. if (Array && Constructor->isDefaulted() &&
  557. Constructor->isCopyOrMoveConstructor()) {
  558. QualType BaseElementTy = CGF.getContext().getBaseElementType(Array);
  559. CXXConstructExpr *CE = dyn_cast<CXXConstructExpr>(MemberInit->getInit());
  560. if (BaseElementTy.isPODType(CGF.getContext()) ||
  561. (CE && isMemcpyEquivalentSpecialMember(CE->getConstructor()))) {
  562. unsigned SrcArgIndex =
  563. CGF.CGM.getCXXABI().getSrcArgforCopyCtor(Constructor, Args);
  564. llvm::Value *SrcPtr
  565. = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(Args[SrcArgIndex]));
  566. LValue ThisRHSLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy);
  567. LValue Src = CGF.EmitLValueForFieldInitialization(ThisRHSLV, Field);
  568. // Copy the aggregate.
  569. CGF.EmitAggregateCopy(LHS, Src, FieldType, CGF.getOverlapForFieldInit(Field),
  570. LHS.isVolatileQualified());
  571. // Ensure that we destroy the objects if an exception is thrown later in
  572. // the constructor.
  573. QualType::DestructionKind dtorKind = FieldType.isDestructedType();
  574. if (CGF.needsEHCleanup(dtorKind))
  575. CGF.pushEHDestroy(dtorKind, LHS.getAddress(CGF), FieldType);
  576. return;
  577. }
  578. }
  579. CGF.EmitInitializerForField(Field, LHS, MemberInit->getInit());
  580. }
  581. void CodeGenFunction::EmitInitializerForField(FieldDecl *Field, LValue LHS,
  582. Expr *Init) {
  583. QualType FieldType = Field->getType();
  584. switch (getEvaluationKind(FieldType)) {
  585. case TEK_Scalar:
  586. if (LHS.isSimple()) {
  587. EmitExprAsInit(Init, Field, LHS, false);
  588. } else {
  589. RValue RHS = RValue::get(EmitScalarExpr(Init));
  590. EmitStoreThroughLValue(RHS, LHS);
  591. }
  592. break;
  593. case TEK_Complex:
  594. EmitComplexExprIntoLValue(Init, LHS, /*isInit*/ true);
  595. break;
  596. case TEK_Aggregate: {
  597. AggValueSlot Slot = AggValueSlot::forLValue(
  598. LHS, *this, AggValueSlot::IsDestructed,
  599. AggValueSlot::DoesNotNeedGCBarriers, AggValueSlot::IsNotAliased,
  600. getOverlapForFieldInit(Field), AggValueSlot::IsNotZeroed,
  601. // Checks are made by the code that calls constructor.
  602. AggValueSlot::IsSanitizerChecked);
  603. EmitAggExpr(Init, Slot);
  604. break;
  605. }
  606. }
  607. // Ensure that we destroy this object if an exception is thrown
  608. // later in the constructor.
  609. QualType::DestructionKind dtorKind = FieldType.isDestructedType();
  610. if (needsEHCleanup(dtorKind))
  611. pushEHDestroy(dtorKind, LHS.getAddress(*this), FieldType);
  612. }
  613. /// Checks whether the given constructor is a valid subject for the
  614. /// complete-to-base constructor delegation optimization, i.e.
  615. /// emitting the complete constructor as a simple call to the base
  616. /// constructor.
  617. bool CodeGenFunction::IsConstructorDelegationValid(
  618. const CXXConstructorDecl *Ctor) {
  619. // Currently we disable the optimization for classes with virtual
  620. // bases because (1) the addresses of parameter variables need to be
  621. // consistent across all initializers but (2) the delegate function
  622. // call necessarily creates a second copy of the parameter variable.
  623. //
  624. // The limiting example (purely theoretical AFAIK):
  625. // struct A { A(int &c) { c++; } };
  626. // struct B : virtual A {
  627. // B(int count) : A(count) { printf("%d\n", count); }
  628. // };
  629. // ...although even this example could in principle be emitted as a
  630. // delegation since the address of the parameter doesn't escape.
  631. if (Ctor->getParent()->getNumVBases()) {
  632. // TODO: white-list trivial vbase initializers. This case wouldn't
  633. // be subject to the restrictions below.
  634. // TODO: white-list cases where:
  635. // - there are no non-reference parameters to the constructor
  636. // - the initializers don't access any non-reference parameters
  637. // - the initializers don't take the address of non-reference
  638. // parameters
  639. // - etc.
  640. // If we ever add any of the above cases, remember that:
  641. // - function-try-blocks will always exclude this optimization
  642. // - we need to perform the constructor prologue and cleanup in
  643. // EmitConstructorBody.
  644. return false;
  645. }
  646. // We also disable the optimization for variadic functions because
  647. // it's impossible to "re-pass" varargs.
  648. if (Ctor->getType()->castAs<FunctionProtoType>()->isVariadic())
  649. return false;
  650. // FIXME: Decide if we can do a delegation of a delegating constructor.
  651. if (Ctor->isDelegatingConstructor())
  652. return false;
  653. return true;
  654. }
  655. // Emit code in ctor (Prologue==true) or dtor (Prologue==false)
  656. // to poison the extra field paddings inserted under
  657. // -fsanitize-address-field-padding=1|2.
  658. void CodeGenFunction::EmitAsanPrologueOrEpilogue(bool Prologue) {
  659. ASTContext &Context = getContext();
  660. const CXXRecordDecl *ClassDecl =
  661. Prologue ? cast<CXXConstructorDecl>(CurGD.getDecl())->getParent()
  662. : cast<CXXDestructorDecl>(CurGD.getDecl())->getParent();
  663. if (!ClassDecl->mayInsertExtraPadding()) return;
  664. struct SizeAndOffset {
  665. uint64_t Size;
  666. uint64_t Offset;
  667. };
  668. unsigned PtrSize = CGM.getDataLayout().getPointerSizeInBits();
  669. const ASTRecordLayout &Info = Context.getASTRecordLayout(ClassDecl);
  670. // Populate sizes and offsets of fields.
  671. SmallVector<SizeAndOffset, 16> SSV(Info.getFieldCount());
  672. for (unsigned i = 0, e = Info.getFieldCount(); i != e; ++i)
  673. SSV[i].Offset =
  674. Context.toCharUnitsFromBits(Info.getFieldOffset(i)).getQuantity();
  675. size_t NumFields = 0;
  676. for (const auto *Field : ClassDecl->fields()) {
  677. const FieldDecl *D = Field;
  678. auto FieldInfo = Context.getTypeInfoInChars(D->getType());
  679. CharUnits FieldSize = FieldInfo.Width;
  680. assert(NumFields < SSV.size());
  681. SSV[NumFields].Size = D->isBitField() ? 0 : FieldSize.getQuantity();
  682. NumFields++;
  683. }
  684. assert(NumFields == SSV.size());
  685. if (SSV.size() <= 1) return;
  686. // We will insert calls to __asan_* run-time functions.
  687. // LLVM AddressSanitizer pass may decide to inline them later.
  688. llvm::Type *Args[2] = {IntPtrTy, IntPtrTy};
  689. llvm::FunctionType *FTy =
  690. llvm::FunctionType::get(CGM.VoidTy, Args, false);
  691. llvm::FunctionCallee F = CGM.CreateRuntimeFunction(
  692. FTy, Prologue ? "__asan_poison_intra_object_redzone"
  693. : "__asan_unpoison_intra_object_redzone");
  694. llvm::Value *ThisPtr = LoadCXXThis();
  695. ThisPtr = Builder.CreatePtrToInt(ThisPtr, IntPtrTy);
  696. uint64_t TypeSize = Info.getNonVirtualSize().getQuantity();
  697. // For each field check if it has sufficient padding,
  698. // if so (un)poison it with a call.
  699. for (size_t i = 0; i < SSV.size(); i++) {
  700. uint64_t AsanAlignment = 8;
  701. uint64_t NextField = i == SSV.size() - 1 ? TypeSize : SSV[i + 1].Offset;
  702. uint64_t PoisonSize = NextField - SSV[i].Offset - SSV[i].Size;
  703. uint64_t EndOffset = SSV[i].Offset + SSV[i].Size;
  704. if (PoisonSize < AsanAlignment || !SSV[i].Size ||
  705. (NextField % AsanAlignment) != 0)
  706. continue;
  707. Builder.CreateCall(
  708. F, {Builder.CreateAdd(ThisPtr, Builder.getIntN(PtrSize, EndOffset)),
  709. Builder.getIntN(PtrSize, PoisonSize)});
  710. }
  711. }
  712. /// EmitConstructorBody - Emits the body of the current constructor.
  713. void CodeGenFunction::EmitConstructorBody(FunctionArgList &Args) {
  714. EmitAsanPrologueOrEpilogue(true);
  715. const CXXConstructorDecl *Ctor = cast<CXXConstructorDecl>(CurGD.getDecl());
  716. CXXCtorType CtorType = CurGD.getCtorType();
  717. assert((CGM.getTarget().getCXXABI().hasConstructorVariants() ||
  718. CtorType == Ctor_Complete) &&
  719. "can only generate complete ctor for this ABI");
  720. // Before we go any further, try the complete->base constructor
  721. // delegation optimization.
  722. if (CtorType == Ctor_Complete && IsConstructorDelegationValid(Ctor) &&
  723. CGM.getTarget().getCXXABI().hasConstructorVariants()) {
  724. EmitDelegateCXXConstructorCall(Ctor, Ctor_Base, Args, Ctor->getEndLoc());
  725. return;
  726. }
  727. const FunctionDecl *Definition = nullptr;
  728. Stmt *Body = Ctor->getBody(Definition);
  729. assert(Definition == Ctor && "emitting wrong constructor body");
  730. // Enter the function-try-block before the constructor prologue if
  731. // applicable.
  732. bool IsTryBody = (Body && isa<CXXTryStmt>(Body));
  733. if (IsTryBody)
  734. EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true);
  735. incrementProfileCounter(Body);
  736. RunCleanupsScope RunCleanups(*this);
  737. // TODO: in restricted cases, we can emit the vbase initializers of
  738. // a complete ctor and then delegate to the base ctor.
  739. // Emit the constructor prologue, i.e. the base and member
  740. // initializers.
  741. EmitCtorPrologue(Ctor, CtorType, Args);
  742. // Emit the body of the statement.
  743. if (IsTryBody)
  744. EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock());
  745. else if (Body)
  746. EmitStmt(Body);
  747. // Emit any cleanup blocks associated with the member or base
  748. // initializers, which includes (along the exceptional path) the
  749. // destructors for those members and bases that were fully
  750. // constructed.
  751. RunCleanups.ForceCleanup();
  752. if (IsTryBody)
  753. ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true);
  754. }
  755. namespace {
  756. /// RAII object to indicate that codegen is copying the value representation
  757. /// instead of the object representation. Useful when copying a struct or
  758. /// class which has uninitialized members and we're only performing
  759. /// lvalue-to-rvalue conversion on the object but not its members.
  760. class CopyingValueRepresentation {
  761. public:
  762. explicit CopyingValueRepresentation(CodeGenFunction &CGF)
  763. : CGF(CGF), OldSanOpts(CGF.SanOpts) {
  764. CGF.SanOpts.set(SanitizerKind::Bool, false);
  765. CGF.SanOpts.set(SanitizerKind::Enum, false);
  766. }
  767. ~CopyingValueRepresentation() {
  768. CGF.SanOpts = OldSanOpts;
  769. }
  770. private:
  771. CodeGenFunction &CGF;
  772. SanitizerSet OldSanOpts;
  773. };
  774. } // end anonymous namespace
  775. namespace {
  776. class FieldMemcpyizer {
  777. public:
  778. FieldMemcpyizer(CodeGenFunction &CGF, const CXXRecordDecl *ClassDecl,
  779. const VarDecl *SrcRec)
  780. : CGF(CGF), ClassDecl(ClassDecl), SrcRec(SrcRec),
  781. RecLayout(CGF.getContext().getASTRecordLayout(ClassDecl)),
  782. FirstField(nullptr), LastField(nullptr), FirstFieldOffset(0),
  783. LastFieldOffset(0), LastAddedFieldIndex(0) {}
  784. bool isMemcpyableField(FieldDecl *F) const {
  785. // Never memcpy fields when we are adding poisoned paddings.
  786. if (CGF.getContext().getLangOpts().SanitizeAddressFieldPadding)
  787. return false;
  788. Qualifiers Qual = F->getType().getQualifiers();
  789. if (Qual.hasVolatile() || Qual.hasObjCLifetime())
  790. return false;
  791. return true;
  792. }
  793. void addMemcpyableField(FieldDecl *F) {
  794. if (F->isZeroSize(CGF.getContext()))
  795. return;
  796. if (!FirstField)
  797. addInitialField(F);
  798. else
  799. addNextField(F);
  800. }
  801. CharUnits getMemcpySize(uint64_t FirstByteOffset) const {
  802. ASTContext &Ctx = CGF.getContext();
  803. unsigned LastFieldSize =
  804. LastField->isBitField()
  805. ? LastField->getBitWidthValue(Ctx)
  806. : Ctx.toBits(
  807. Ctx.getTypeInfoDataSizeInChars(LastField->getType()).Width);
  808. uint64_t MemcpySizeBits = LastFieldOffset + LastFieldSize -
  809. FirstByteOffset + Ctx.getCharWidth() - 1;
  810. CharUnits MemcpySize = Ctx.toCharUnitsFromBits(MemcpySizeBits);
  811. return MemcpySize;
  812. }
  813. void emitMemcpy() {
  814. // Give the subclass a chance to bail out if it feels the memcpy isn't
  815. // worth it (e.g. Hasn't aggregated enough data).
  816. if (!FirstField) {
  817. return;
  818. }
  819. uint64_t FirstByteOffset;
  820. if (FirstField->isBitField()) {
  821. const CGRecordLayout &RL =
  822. CGF.getTypes().getCGRecordLayout(FirstField->getParent());
  823. const CGBitFieldInfo &BFInfo = RL.getBitFieldInfo(FirstField);
  824. // FirstFieldOffset is not appropriate for bitfields,
  825. // we need to use the storage offset instead.
  826. FirstByteOffset = CGF.getContext().toBits(BFInfo.StorageOffset);
  827. } else {
  828. FirstByteOffset = FirstFieldOffset;
  829. }
  830. CharUnits MemcpySize = getMemcpySize(FirstByteOffset);
  831. QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl);
  832. Address ThisPtr = CGF.LoadCXXThisAddress();
  833. LValue DestLV = CGF.MakeAddrLValue(ThisPtr, RecordTy);
  834. LValue Dest = CGF.EmitLValueForFieldInitialization(DestLV, FirstField);
  835. llvm::Value *SrcPtr = CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(SrcRec));
  836. LValue SrcLV = CGF.MakeNaturalAlignAddrLValue(SrcPtr, RecordTy);
  837. LValue Src = CGF.EmitLValueForFieldInitialization(SrcLV, FirstField);
  838. emitMemcpyIR(
  839. Dest.isBitField() ? Dest.getBitFieldAddress() : Dest.getAddress(CGF),
  840. Src.isBitField() ? Src.getBitFieldAddress() : Src.getAddress(CGF),
  841. MemcpySize);
  842. reset();
  843. }
  844. void reset() {
  845. FirstField = nullptr;
  846. }
  847. protected:
  848. CodeGenFunction &CGF;
  849. const CXXRecordDecl *ClassDecl;
  850. private:
  851. void emitMemcpyIR(Address DestPtr, Address SrcPtr, CharUnits Size) {
  852. DestPtr = CGF.Builder.CreateElementBitCast(DestPtr, CGF.Int8Ty);
  853. SrcPtr = CGF.Builder.CreateElementBitCast(SrcPtr, CGF.Int8Ty);
  854. CGF.Builder.CreateMemCpy(DestPtr, SrcPtr, Size.getQuantity());
  855. }
  856. void addInitialField(FieldDecl *F) {
  857. FirstField = F;
  858. LastField = F;
  859. FirstFieldOffset = RecLayout.getFieldOffset(F->getFieldIndex());
  860. LastFieldOffset = FirstFieldOffset;
  861. LastAddedFieldIndex = F->getFieldIndex();
  862. }
  863. void addNextField(FieldDecl *F) {
  864. // For the most part, the following invariant will hold:
  865. // F->getFieldIndex() == LastAddedFieldIndex + 1
  866. // The one exception is that Sema won't add a copy-initializer for an
  867. // unnamed bitfield, which will show up here as a gap in the sequence.
  868. assert(F->getFieldIndex() >= LastAddedFieldIndex + 1 &&
  869. "Cannot aggregate fields out of order.");
  870. LastAddedFieldIndex = F->getFieldIndex();
  871. // The 'first' and 'last' fields are chosen by offset, rather than field
  872. // index. This allows the code to support bitfields, as well as regular
  873. // fields.
  874. uint64_t FOffset = RecLayout.getFieldOffset(F->getFieldIndex());
  875. if (FOffset < FirstFieldOffset) {
  876. FirstField = F;
  877. FirstFieldOffset = FOffset;
  878. } else if (FOffset >= LastFieldOffset) {
  879. LastField = F;
  880. LastFieldOffset = FOffset;
  881. }
  882. }
  883. const VarDecl *SrcRec;
  884. const ASTRecordLayout &RecLayout;
  885. FieldDecl *FirstField;
  886. FieldDecl *LastField;
  887. uint64_t FirstFieldOffset, LastFieldOffset;
  888. unsigned LastAddedFieldIndex;
  889. };
  890. class ConstructorMemcpyizer : public FieldMemcpyizer {
  891. private:
  892. /// Get source argument for copy constructor. Returns null if not a copy
  893. /// constructor.
  894. static const VarDecl *getTrivialCopySource(CodeGenFunction &CGF,
  895. const CXXConstructorDecl *CD,
  896. FunctionArgList &Args) {
  897. if (CD->isCopyOrMoveConstructor() && CD->isDefaulted())
  898. return Args[CGF.CGM.getCXXABI().getSrcArgforCopyCtor(CD, Args)];
  899. return nullptr;
  900. }
  901. // Returns true if a CXXCtorInitializer represents a member initialization
  902. // that can be rolled into a memcpy.
  903. bool isMemberInitMemcpyable(CXXCtorInitializer *MemberInit) const {
  904. if (!MemcpyableCtor)
  905. return false;
  906. FieldDecl *Field = MemberInit->getMember();
  907. assert(Field && "No field for member init.");
  908. QualType FieldType = Field->getType();
  909. CXXConstructExpr *CE = dyn_cast<CXXConstructExpr>(MemberInit->getInit());
  910. // Bail out on non-memcpyable, not-trivially-copyable members.
  911. if (!(CE && isMemcpyEquivalentSpecialMember(CE->getConstructor())) &&
  912. !(FieldType.isTriviallyCopyableType(CGF.getContext()) ||
  913. FieldType->isReferenceType()))
  914. return false;
  915. // Bail out on volatile fields.
  916. if (!isMemcpyableField(Field))
  917. return false;
  918. // Otherwise we're good.
  919. return true;
  920. }
  921. public:
  922. ConstructorMemcpyizer(CodeGenFunction &CGF, const CXXConstructorDecl *CD,
  923. FunctionArgList &Args)
  924. : FieldMemcpyizer(CGF, CD->getParent(), getTrivialCopySource(CGF, CD, Args)),
  925. ConstructorDecl(CD),
  926. MemcpyableCtor(CD->isDefaulted() &&
  927. CD->isCopyOrMoveConstructor() &&
  928. CGF.getLangOpts().getGC() == LangOptions::NonGC),
  929. Args(Args) { }
  930. void addMemberInitializer(CXXCtorInitializer *MemberInit) {
  931. if (isMemberInitMemcpyable(MemberInit)) {
  932. AggregatedInits.push_back(MemberInit);
  933. addMemcpyableField(MemberInit->getMember());
  934. } else {
  935. emitAggregatedInits();
  936. EmitMemberInitializer(CGF, ConstructorDecl->getParent(), MemberInit,
  937. ConstructorDecl, Args);
  938. }
  939. }
  940. void emitAggregatedInits() {
  941. if (AggregatedInits.size() <= 1) {
  942. // This memcpy is too small to be worthwhile. Fall back on default
  943. // codegen.
  944. if (!AggregatedInits.empty()) {
  945. CopyingValueRepresentation CVR(CGF);
  946. EmitMemberInitializer(CGF, ConstructorDecl->getParent(),
  947. AggregatedInits[0], ConstructorDecl, Args);
  948. AggregatedInits.clear();
  949. }
  950. reset();
  951. return;
  952. }
  953. pushEHDestructors();
  954. emitMemcpy();
  955. AggregatedInits.clear();
  956. }
  957. void pushEHDestructors() {
  958. Address ThisPtr = CGF.LoadCXXThisAddress();
  959. QualType RecordTy = CGF.getContext().getTypeDeclType(ClassDecl);
  960. LValue LHS = CGF.MakeAddrLValue(ThisPtr, RecordTy);
  961. for (unsigned i = 0; i < AggregatedInits.size(); ++i) {
  962. CXXCtorInitializer *MemberInit = AggregatedInits[i];
  963. QualType FieldType = MemberInit->getAnyMember()->getType();
  964. QualType::DestructionKind dtorKind = FieldType.isDestructedType();
  965. if (!CGF.needsEHCleanup(dtorKind))
  966. continue;
  967. LValue FieldLHS = LHS;
  968. EmitLValueForAnyFieldInitialization(CGF, MemberInit, FieldLHS);
  969. CGF.pushEHDestroy(dtorKind, FieldLHS.getAddress(CGF), FieldType);
  970. }
  971. }
  972. void finish() {
  973. emitAggregatedInits();
  974. }
  975. private:
  976. const CXXConstructorDecl *ConstructorDecl;
  977. bool MemcpyableCtor;
  978. FunctionArgList &Args;
  979. SmallVector<CXXCtorInitializer*, 16> AggregatedInits;
  980. };
  981. class AssignmentMemcpyizer : public FieldMemcpyizer {
  982. private:
  983. // Returns the memcpyable field copied by the given statement, if one
  984. // exists. Otherwise returns null.
  985. FieldDecl *getMemcpyableField(Stmt *S) {
  986. if (!AssignmentsMemcpyable)
  987. return nullptr;
  988. if (BinaryOperator *BO = dyn_cast<BinaryOperator>(S)) {
  989. // Recognise trivial assignments.
  990. if (BO->getOpcode() != BO_Assign)
  991. return nullptr;
  992. MemberExpr *ME = dyn_cast<MemberExpr>(BO->getLHS());
  993. if (!ME)
  994. return nullptr;
  995. FieldDecl *Field = dyn_cast<FieldDecl>(ME->getMemberDecl());
  996. if (!Field || !isMemcpyableField(Field))
  997. return nullptr;
  998. Stmt *RHS = BO->getRHS();
  999. if (ImplicitCastExpr *EC = dyn_cast<ImplicitCastExpr>(RHS))
  1000. RHS = EC->getSubExpr();
  1001. if (!RHS)
  1002. return nullptr;
  1003. if (MemberExpr *ME2 = dyn_cast<MemberExpr>(RHS)) {
  1004. if (ME2->getMemberDecl() == Field)
  1005. return Field;
  1006. }
  1007. return nullptr;
  1008. } else if (CXXMemberCallExpr *MCE = dyn_cast<CXXMemberCallExpr>(S)) {
  1009. CXXMethodDecl *MD = dyn_cast<CXXMethodDecl>(MCE->getCalleeDecl());
  1010. if (!(MD && isMemcpyEquivalentSpecialMember(MD)))
  1011. return nullptr;
  1012. MemberExpr *IOA = dyn_cast<MemberExpr>(MCE->getImplicitObjectArgument());
  1013. if (!IOA)
  1014. return nullptr;
  1015. FieldDecl *Field = dyn_cast<FieldDecl>(IOA->getMemberDecl());
  1016. if (!Field || !isMemcpyableField(Field))
  1017. return nullptr;
  1018. MemberExpr *Arg0 = dyn_cast<MemberExpr>(MCE->getArg(0));
  1019. if (!Arg0 || Field != dyn_cast<FieldDecl>(Arg0->getMemberDecl()))
  1020. return nullptr;
  1021. return Field;
  1022. } else if (CallExpr *CE = dyn_cast<CallExpr>(S)) {
  1023. FunctionDecl *FD = dyn_cast<FunctionDecl>(CE->getCalleeDecl());
  1024. if (!FD || FD->getBuiltinID() != Builtin::BI__builtin_memcpy)
  1025. return nullptr;
  1026. Expr *DstPtr = CE->getArg(0);
  1027. if (ImplicitCastExpr *DC = dyn_cast<ImplicitCastExpr>(DstPtr))
  1028. DstPtr = DC->getSubExpr();
  1029. UnaryOperator *DUO = dyn_cast<UnaryOperator>(DstPtr);
  1030. if (!DUO || DUO->getOpcode() != UO_AddrOf)
  1031. return nullptr;
  1032. MemberExpr *ME = dyn_cast<MemberExpr>(DUO->getSubExpr());
  1033. if (!ME)
  1034. return nullptr;
  1035. FieldDecl *Field = dyn_cast<FieldDecl>(ME->getMemberDecl());
  1036. if (!Field || !isMemcpyableField(Field))
  1037. return nullptr;
  1038. Expr *SrcPtr = CE->getArg(1);
  1039. if (ImplicitCastExpr *SC = dyn_cast<ImplicitCastExpr>(SrcPtr))
  1040. SrcPtr = SC->getSubExpr();
  1041. UnaryOperator *SUO = dyn_cast<UnaryOperator>(SrcPtr);
  1042. if (!SUO || SUO->getOpcode() != UO_AddrOf)
  1043. return nullptr;
  1044. MemberExpr *ME2 = dyn_cast<MemberExpr>(SUO->getSubExpr());
  1045. if (!ME2 || Field != dyn_cast<FieldDecl>(ME2->getMemberDecl()))
  1046. return nullptr;
  1047. return Field;
  1048. }
  1049. return nullptr;
  1050. }
  1051. bool AssignmentsMemcpyable;
  1052. SmallVector<Stmt*, 16> AggregatedStmts;
  1053. public:
  1054. AssignmentMemcpyizer(CodeGenFunction &CGF, const CXXMethodDecl *AD,
  1055. FunctionArgList &Args)
  1056. : FieldMemcpyizer(CGF, AD->getParent(), Args[Args.size() - 1]),
  1057. AssignmentsMemcpyable(CGF.getLangOpts().getGC() == LangOptions::NonGC) {
  1058. assert(Args.size() == 2);
  1059. }
  1060. void emitAssignment(Stmt *S) {
  1061. FieldDecl *F = getMemcpyableField(S);
  1062. if (F) {
  1063. addMemcpyableField(F);
  1064. AggregatedStmts.push_back(S);
  1065. } else {
  1066. emitAggregatedStmts();
  1067. CGF.EmitStmt(S);
  1068. }
  1069. }
  1070. void emitAggregatedStmts() {
  1071. if (AggregatedStmts.size() <= 1) {
  1072. if (!AggregatedStmts.empty()) {
  1073. CopyingValueRepresentation CVR(CGF);
  1074. CGF.EmitStmt(AggregatedStmts[0]);
  1075. }
  1076. reset();
  1077. }
  1078. emitMemcpy();
  1079. AggregatedStmts.clear();
  1080. }
  1081. void finish() {
  1082. emitAggregatedStmts();
  1083. }
  1084. };
  1085. } // end anonymous namespace
  1086. static bool isInitializerOfDynamicClass(const CXXCtorInitializer *BaseInit) {
  1087. const Type *BaseType = BaseInit->getBaseClass();
  1088. const auto *BaseClassDecl =
  1089. cast<CXXRecordDecl>(BaseType->castAs<RecordType>()->getDecl());
  1090. return BaseClassDecl->isDynamicClass();
  1091. }
  1092. /// EmitCtorPrologue - This routine generates necessary code to initialize
  1093. /// base classes and non-static data members belonging to this constructor.
  1094. void CodeGenFunction::EmitCtorPrologue(const CXXConstructorDecl *CD,
  1095. CXXCtorType CtorType,
  1096. FunctionArgList &Args) {
  1097. if (CD->isDelegatingConstructor())
  1098. return EmitDelegatingCXXConstructorCall(CD, Args);
  1099. const CXXRecordDecl *ClassDecl = CD->getParent();
  1100. CXXConstructorDecl::init_const_iterator B = CD->init_begin(),
  1101. E = CD->init_end();
  1102. // Virtual base initializers first, if any. They aren't needed if:
  1103. // - This is a base ctor variant
  1104. // - There are no vbases
  1105. // - The class is abstract, so a complete object of it cannot be constructed
  1106. //
  1107. // The check for an abstract class is necessary because sema may not have
  1108. // marked virtual base destructors referenced.
  1109. bool ConstructVBases = CtorType != Ctor_Base &&
  1110. ClassDecl->getNumVBases() != 0 &&
  1111. !ClassDecl->isAbstract();
  1112. // In the Microsoft C++ ABI, there are no constructor variants. Instead, the
  1113. // constructor of a class with virtual bases takes an additional parameter to
  1114. // conditionally construct the virtual bases. Emit that check here.
  1115. llvm::BasicBlock *BaseCtorContinueBB = nullptr;
  1116. if (ConstructVBases &&
  1117. !CGM.getTarget().getCXXABI().hasConstructorVariants()) {
  1118. BaseCtorContinueBB =
  1119. CGM.getCXXABI().EmitCtorCompleteObjectHandler(*this, ClassDecl);
  1120. assert(BaseCtorContinueBB);
  1121. }
  1122. llvm::Value *const OldThis = CXXThisValue;
  1123. for (; B != E && (*B)->isBaseInitializer() && (*B)->isBaseVirtual(); B++) {
  1124. if (!ConstructVBases)
  1125. continue;
  1126. if (CGM.getCodeGenOpts().StrictVTablePointers &&
  1127. CGM.getCodeGenOpts().OptimizationLevel > 0 &&
  1128. isInitializerOfDynamicClass(*B))
  1129. CXXThisValue = Builder.CreateLaunderInvariantGroup(LoadCXXThis());
  1130. EmitBaseInitializer(*this, ClassDecl, *B);
  1131. }
  1132. if (BaseCtorContinueBB) {
  1133. // Complete object handler should continue to the remaining initializers.
  1134. Builder.CreateBr(BaseCtorContinueBB);
  1135. EmitBlock(BaseCtorContinueBB);
  1136. }
  1137. // Then, non-virtual base initializers.
  1138. for (; B != E && (*B)->isBaseInitializer(); B++) {
  1139. assert(!(*B)->isBaseVirtual());
  1140. if (CGM.getCodeGenOpts().StrictVTablePointers &&
  1141. CGM.getCodeGenOpts().OptimizationLevel > 0 &&
  1142. isInitializerOfDynamicClass(*B))
  1143. CXXThisValue = Builder.CreateLaunderInvariantGroup(LoadCXXThis());
  1144. EmitBaseInitializer(*this, ClassDecl, *B);
  1145. }
  1146. CXXThisValue = OldThis;
  1147. InitializeVTablePointers(ClassDecl);
  1148. // And finally, initialize class members.
  1149. FieldConstructionScope FCS(*this, LoadCXXThisAddress());
  1150. ConstructorMemcpyizer CM(*this, CD, Args);
  1151. for (; B != E; B++) {
  1152. CXXCtorInitializer *Member = (*B);
  1153. assert(!Member->isBaseInitializer());
  1154. assert(Member->isAnyMemberInitializer() &&
  1155. "Delegating initializer on non-delegating constructor");
  1156. CM.addMemberInitializer(Member);
  1157. }
  1158. CM.finish();
  1159. }
  1160. static bool
  1161. FieldHasTrivialDestructorBody(ASTContext &Context, const FieldDecl *Field);
  1162. static bool
  1163. HasTrivialDestructorBody(ASTContext &Context,
  1164. const CXXRecordDecl *BaseClassDecl,
  1165. const CXXRecordDecl *MostDerivedClassDecl)
  1166. {
  1167. // If the destructor is trivial we don't have to check anything else.
  1168. if (BaseClassDecl->hasTrivialDestructor())
  1169. return true;
  1170. if (!BaseClassDecl->getDestructor()->hasTrivialBody())
  1171. return false;
  1172. // Check fields.
  1173. for (const auto *Field : BaseClassDecl->fields())
  1174. if (!FieldHasTrivialDestructorBody(Context, Field))
  1175. return false;
  1176. // Check non-virtual bases.
  1177. for (const auto &I : BaseClassDecl->bases()) {
  1178. if (I.isVirtual())
  1179. continue;
  1180. const CXXRecordDecl *NonVirtualBase =
  1181. cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());
  1182. if (!HasTrivialDestructorBody(Context, NonVirtualBase,
  1183. MostDerivedClassDecl))
  1184. return false;
  1185. }
  1186. if (BaseClassDecl == MostDerivedClassDecl) {
  1187. // Check virtual bases.
  1188. for (const auto &I : BaseClassDecl->vbases()) {
  1189. const CXXRecordDecl *VirtualBase =
  1190. cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());
  1191. if (!HasTrivialDestructorBody(Context, VirtualBase,
  1192. MostDerivedClassDecl))
  1193. return false;
  1194. }
  1195. }
  1196. return true;
  1197. }
  1198. static bool
  1199. FieldHasTrivialDestructorBody(ASTContext &Context,
  1200. const FieldDecl *Field)
  1201. {
  1202. QualType FieldBaseElementType = Context.getBaseElementType(Field->getType());
  1203. const RecordType *RT = FieldBaseElementType->getAs<RecordType>();
  1204. if (!RT)
  1205. return true;
  1206. CXXRecordDecl *FieldClassDecl = cast<CXXRecordDecl>(RT->getDecl());
  1207. // The destructor for an implicit anonymous union member is never invoked.
  1208. if (FieldClassDecl->isUnion() && FieldClassDecl->isAnonymousStructOrUnion())
  1209. return false;
  1210. return HasTrivialDestructorBody(Context, FieldClassDecl, FieldClassDecl);
  1211. }
  1212. /// CanSkipVTablePointerInitialization - Check whether we need to initialize
  1213. /// any vtable pointers before calling this destructor.
  1214. static bool CanSkipVTablePointerInitialization(CodeGenFunction &CGF,
  1215. const CXXDestructorDecl *Dtor) {
  1216. const CXXRecordDecl *ClassDecl = Dtor->getParent();
  1217. if (!ClassDecl->isDynamicClass())
  1218. return true;
  1219. // For a final class, the vtable pointer is known to already point to the
  1220. // class's vtable.
  1221. if (ClassDecl->isEffectivelyFinal())
  1222. return true;
  1223. if (!Dtor->hasTrivialBody())
  1224. return false;
  1225. // Check the fields.
  1226. for (const auto *Field : ClassDecl->fields())
  1227. if (!FieldHasTrivialDestructorBody(CGF.getContext(), Field))
  1228. return false;
  1229. return true;
  1230. }
  1231. /// EmitDestructorBody - Emits the body of the current destructor.
  1232. void CodeGenFunction::EmitDestructorBody(FunctionArgList &Args) {
  1233. const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CurGD.getDecl());
  1234. CXXDtorType DtorType = CurGD.getDtorType();
  1235. // For an abstract class, non-base destructors are never used (and can't
  1236. // be emitted in general, because vbase dtors may not have been validated
  1237. // by Sema), but the Itanium ABI doesn't make them optional and Clang may
  1238. // in fact emit references to them from other compilations, so emit them
  1239. // as functions containing a trap instruction.
  1240. if (DtorType != Dtor_Base && Dtor->getParent()->isAbstract()) {
  1241. llvm::CallInst *TrapCall = EmitTrapCall(llvm::Intrinsic::trap);
  1242. TrapCall->setDoesNotReturn();
  1243. TrapCall->setDoesNotThrow();
  1244. Builder.CreateUnreachable();
  1245. Builder.ClearInsertionPoint();
  1246. return;
  1247. }
  1248. Stmt *Body = Dtor->getBody();
  1249. if (Body)
  1250. incrementProfileCounter(Body);
  1251. // The call to operator delete in a deleting destructor happens
  1252. // outside of the function-try-block, which means it's always
  1253. // possible to delegate the destructor body to the complete
  1254. // destructor. Do so.
  1255. if (DtorType == Dtor_Deleting) {
  1256. RunCleanupsScope DtorEpilogue(*this);
  1257. EnterDtorCleanups(Dtor, Dtor_Deleting);
  1258. if (HaveInsertPoint()) {
  1259. QualType ThisTy = Dtor->getThisObjectType();
  1260. EmitCXXDestructorCall(Dtor, Dtor_Complete, /*ForVirtualBase=*/false,
  1261. /*Delegating=*/false, LoadCXXThisAddress(), ThisTy);
  1262. }
  1263. return;
  1264. }
  1265. // If the body is a function-try-block, enter the try before
  1266. // anything else.
  1267. bool isTryBody = (Body && isa<CXXTryStmt>(Body));
  1268. if (isTryBody)
  1269. EnterCXXTryStmt(*cast<CXXTryStmt>(Body), true);
  1270. EmitAsanPrologueOrEpilogue(false);
  1271. // Enter the epilogue cleanups.
  1272. RunCleanupsScope DtorEpilogue(*this);
  1273. // If this is the complete variant, just invoke the base variant;
  1274. // the epilogue will destruct the virtual bases. But we can't do
  1275. // this optimization if the body is a function-try-block, because
  1276. // we'd introduce *two* handler blocks. In the Microsoft ABI, we
  1277. // always delegate because we might not have a definition in this TU.
  1278. switch (DtorType) {
  1279. case Dtor_Comdat: llvm_unreachable("not expecting a COMDAT");
  1280. case Dtor_Deleting: llvm_unreachable("already handled deleting case");
  1281. case Dtor_Complete:
  1282. assert((Body || getTarget().getCXXABI().isMicrosoft()) &&
  1283. "can't emit a dtor without a body for non-Microsoft ABIs");
  1284. // Enter the cleanup scopes for virtual bases.
  1285. EnterDtorCleanups(Dtor, Dtor_Complete);
  1286. if (!isTryBody) {
  1287. QualType ThisTy = Dtor->getThisObjectType();
  1288. EmitCXXDestructorCall(Dtor, Dtor_Base, /*ForVirtualBase=*/false,
  1289. /*Delegating=*/false, LoadCXXThisAddress(), ThisTy);
  1290. break;
  1291. }
  1292. // Fallthrough: act like we're in the base variant.
  1293. LLVM_FALLTHROUGH;
  1294. case Dtor_Base:
  1295. assert(Body);
  1296. // Enter the cleanup scopes for fields and non-virtual bases.
  1297. EnterDtorCleanups(Dtor, Dtor_Base);
  1298. // Initialize the vtable pointers before entering the body.
  1299. if (!CanSkipVTablePointerInitialization(*this, Dtor)) {
  1300. // Insert the llvm.launder.invariant.group intrinsic before initializing
  1301. // the vptrs to cancel any previous assumptions we might have made.
  1302. if (CGM.getCodeGenOpts().StrictVTablePointers &&
  1303. CGM.getCodeGenOpts().OptimizationLevel > 0)
  1304. CXXThisValue = Builder.CreateLaunderInvariantGroup(LoadCXXThis());
  1305. InitializeVTablePointers(Dtor->getParent());
  1306. }
  1307. if (isTryBody)
  1308. EmitStmt(cast<CXXTryStmt>(Body)->getTryBlock());
  1309. else if (Body)
  1310. EmitStmt(Body);
  1311. else {
  1312. assert(Dtor->isImplicit() && "bodyless dtor not implicit");
  1313. // nothing to do besides what's in the epilogue
  1314. }
  1315. // -fapple-kext must inline any call to this dtor into
  1316. // the caller's body.
  1317. if (getLangOpts().AppleKext)
  1318. CurFn->addFnAttr(llvm::Attribute::AlwaysInline);
  1319. break;
  1320. }
  1321. // Jump out through the epilogue cleanups.
  1322. DtorEpilogue.ForceCleanup();
  1323. // Exit the try if applicable.
  1324. if (isTryBody)
  1325. ExitCXXTryStmt(*cast<CXXTryStmt>(Body), true);
  1326. }
  1327. void CodeGenFunction::emitImplicitAssignmentOperatorBody(FunctionArgList &Args) {
  1328. const CXXMethodDecl *AssignOp = cast<CXXMethodDecl>(CurGD.getDecl());
  1329. const Stmt *RootS = AssignOp->getBody();
  1330. assert(isa<CompoundStmt>(RootS) &&
  1331. "Body of an implicit assignment operator should be compound stmt.");
  1332. const CompoundStmt *RootCS = cast<CompoundStmt>(RootS);
  1333. LexicalScope Scope(*this, RootCS->getSourceRange());
  1334. incrementProfileCounter(RootCS);
  1335. AssignmentMemcpyizer AM(*this, AssignOp, Args);
  1336. for (auto *I : RootCS->body())
  1337. AM.emitAssignment(I);
  1338. AM.finish();
  1339. }
  1340. namespace {
  1341. llvm::Value *LoadThisForDtorDelete(CodeGenFunction &CGF,
  1342. const CXXDestructorDecl *DD) {
  1343. if (Expr *ThisArg = DD->getOperatorDeleteThisArg())
  1344. return CGF.EmitScalarExpr(ThisArg);
  1345. return CGF.LoadCXXThis();
  1346. }
  1347. /// Call the operator delete associated with the current destructor.
  1348. struct CallDtorDelete final : EHScopeStack::Cleanup {
  1349. CallDtorDelete() {}
  1350. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1351. const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl);
  1352. const CXXRecordDecl *ClassDecl = Dtor->getParent();
  1353. CGF.EmitDeleteCall(Dtor->getOperatorDelete(),
  1354. LoadThisForDtorDelete(CGF, Dtor),
  1355. CGF.getContext().getTagDeclType(ClassDecl));
  1356. }
  1357. };
  1358. void EmitConditionalDtorDeleteCall(CodeGenFunction &CGF,
  1359. llvm::Value *ShouldDeleteCondition,
  1360. bool ReturnAfterDelete) {
  1361. llvm::BasicBlock *callDeleteBB = CGF.createBasicBlock("dtor.call_delete");
  1362. llvm::BasicBlock *continueBB = CGF.createBasicBlock("dtor.continue");
  1363. llvm::Value *ShouldCallDelete
  1364. = CGF.Builder.CreateIsNull(ShouldDeleteCondition);
  1365. CGF.Builder.CreateCondBr(ShouldCallDelete, continueBB, callDeleteBB);
  1366. CGF.EmitBlock(callDeleteBB);
  1367. const CXXDestructorDecl *Dtor = cast<CXXDestructorDecl>(CGF.CurCodeDecl);
  1368. const CXXRecordDecl *ClassDecl = Dtor->getParent();
  1369. CGF.EmitDeleteCall(Dtor->getOperatorDelete(),
  1370. LoadThisForDtorDelete(CGF, Dtor),
  1371. CGF.getContext().getTagDeclType(ClassDecl));
  1372. assert(Dtor->getOperatorDelete()->isDestroyingOperatorDelete() ==
  1373. ReturnAfterDelete &&
  1374. "unexpected value for ReturnAfterDelete");
  1375. if (ReturnAfterDelete)
  1376. CGF.EmitBranchThroughCleanup(CGF.ReturnBlock);
  1377. else
  1378. CGF.Builder.CreateBr(continueBB);
  1379. CGF.EmitBlock(continueBB);
  1380. }
  1381. struct CallDtorDeleteConditional final : EHScopeStack::Cleanup {
  1382. llvm::Value *ShouldDeleteCondition;
  1383. public:
  1384. CallDtorDeleteConditional(llvm::Value *ShouldDeleteCondition)
  1385. : ShouldDeleteCondition(ShouldDeleteCondition) {
  1386. assert(ShouldDeleteCondition != nullptr);
  1387. }
  1388. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1389. EmitConditionalDtorDeleteCall(CGF, ShouldDeleteCondition,
  1390. /*ReturnAfterDelete*/false);
  1391. }
  1392. };
  1393. class DestroyField final : public EHScopeStack::Cleanup {
  1394. const FieldDecl *field;
  1395. CodeGenFunction::Destroyer *destroyer;
  1396. bool useEHCleanupForArray;
  1397. public:
  1398. DestroyField(const FieldDecl *field, CodeGenFunction::Destroyer *destroyer,
  1399. bool useEHCleanupForArray)
  1400. : field(field), destroyer(destroyer),
  1401. useEHCleanupForArray(useEHCleanupForArray) {}
  1402. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1403. // Find the address of the field.
  1404. Address thisValue = CGF.LoadCXXThisAddress();
  1405. QualType RecordTy = CGF.getContext().getTagDeclType(field->getParent());
  1406. LValue ThisLV = CGF.MakeAddrLValue(thisValue, RecordTy);
  1407. LValue LV = CGF.EmitLValueForField(ThisLV, field);
  1408. assert(LV.isSimple());
  1409. CGF.emitDestroy(LV.getAddress(CGF), field->getType(), destroyer,
  1410. flags.isForNormalCleanup() && useEHCleanupForArray);
  1411. }
  1412. };
  1413. static void EmitSanitizerDtorCallback(CodeGenFunction &CGF, llvm::Value *Ptr,
  1414. CharUnits::QuantityType PoisonSize) {
  1415. CodeGenFunction::SanitizerScope SanScope(&CGF);
  1416. // Pass in void pointer and size of region as arguments to runtime
  1417. // function
  1418. llvm::Value *Args[] = {CGF.Builder.CreateBitCast(Ptr, CGF.VoidPtrTy),
  1419. llvm::ConstantInt::get(CGF.SizeTy, PoisonSize)};
  1420. llvm::Type *ArgTypes[] = {CGF.VoidPtrTy, CGF.SizeTy};
  1421. llvm::FunctionType *FnType =
  1422. llvm::FunctionType::get(CGF.VoidTy, ArgTypes, false);
  1423. llvm::FunctionCallee Fn =
  1424. CGF.CGM.CreateRuntimeFunction(FnType, "__sanitizer_dtor_callback");
  1425. CGF.EmitNounwindRuntimeCall(Fn, Args);
  1426. }
  1427. class SanitizeDtorMembers final : public EHScopeStack::Cleanup {
  1428. const CXXDestructorDecl *Dtor;
  1429. public:
  1430. SanitizeDtorMembers(const CXXDestructorDecl *Dtor) : Dtor(Dtor) {}
  1431. // Generate function call for handling object poisoning.
  1432. // Disables tail call elimination, to prevent the current stack frame
  1433. // from disappearing from the stack trace.
  1434. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1435. const ASTRecordLayout &Layout =
  1436. CGF.getContext().getASTRecordLayout(Dtor->getParent());
  1437. // Nothing to poison.
  1438. if (Layout.getFieldCount() == 0)
  1439. return;
  1440. // Prevent the current stack frame from disappearing from the stack trace.
  1441. CGF.CurFn->addFnAttr("disable-tail-calls", "true");
  1442. // Construct pointer to region to begin poisoning, and calculate poison
  1443. // size, so that only members declared in this class are poisoned.
  1444. ASTContext &Context = CGF.getContext();
  1445. const RecordDecl *Decl = Dtor->getParent();
  1446. auto Fields = Decl->fields();
  1447. auto IsTrivial = [&](const FieldDecl *F) {
  1448. return FieldHasTrivialDestructorBody(Context, F);
  1449. };
  1450. auto IsZeroSize = [&](const FieldDecl *F) {
  1451. return F->isZeroSize(Context);
  1452. };
  1453. // Poison blocks of fields with trivial destructors making sure that block
  1454. // begin and end do not point to zero-sized fields. They don't have
  1455. // correct offsets so can't be used to calculate poisoning range.
  1456. for (auto It = Fields.begin(); It != Fields.end();) {
  1457. It = std::find_if(It, Fields.end(), [&](const FieldDecl *F) {
  1458. return IsTrivial(F) && !IsZeroSize(F);
  1459. });
  1460. if (It == Fields.end())
  1461. break;
  1462. auto Start = It++;
  1463. It = std::find_if(It, Fields.end(), [&](const FieldDecl *F) {
  1464. return !IsTrivial(F) && !IsZeroSize(F);
  1465. });
  1466. PoisonMembers(CGF, (*Start)->getFieldIndex(),
  1467. It == Fields.end() ? -1 : (*It)->getFieldIndex());
  1468. }
  1469. }
  1470. private:
  1471. /// \param layoutStartOffset index of the ASTRecordLayout field to
  1472. /// start poisoning (inclusive)
  1473. /// \param layoutEndOffset index of the ASTRecordLayout field to
  1474. /// end poisoning (exclusive)
  1475. void PoisonMembers(CodeGenFunction &CGF, unsigned layoutStartOffset,
  1476. unsigned layoutEndOffset) {
  1477. ASTContext &Context = CGF.getContext();
  1478. const ASTRecordLayout &Layout =
  1479. Context.getASTRecordLayout(Dtor->getParent());
  1480. // It's a first trivia field so it should be at the begining of char,
  1481. // still round up start offset just in case.
  1482. CharUnits PoisonStart =
  1483. Context.toCharUnitsFromBits(Layout.getFieldOffset(layoutStartOffset) +
  1484. Context.getCharWidth() - 1);
  1485. llvm::ConstantInt *OffsetSizePtr =
  1486. llvm::ConstantInt::get(CGF.SizeTy, PoisonStart.getQuantity());
  1487. llvm::Value *OffsetPtr = CGF.Builder.CreateGEP(
  1488. CGF.Int8Ty,
  1489. CGF.Builder.CreateBitCast(CGF.LoadCXXThis(), CGF.Int8PtrTy),
  1490. OffsetSizePtr);
  1491. CharUnits PoisonEnd;
  1492. if (layoutEndOffset >= Layout.getFieldCount()) {
  1493. PoisonEnd = Layout.getNonVirtualSize();
  1494. } else {
  1495. PoisonEnd =
  1496. Context.toCharUnitsFromBits(Layout.getFieldOffset(layoutEndOffset));
  1497. }
  1498. CharUnits PoisonSize = PoisonEnd - PoisonStart;
  1499. if (!PoisonSize.isPositive())
  1500. return;
  1501. EmitSanitizerDtorCallback(CGF, OffsetPtr, PoisonSize.getQuantity());
  1502. }
  1503. };
  1504. class SanitizeDtorVTable final : public EHScopeStack::Cleanup {
  1505. const CXXDestructorDecl *Dtor;
  1506. public:
  1507. SanitizeDtorVTable(const CXXDestructorDecl *Dtor) : Dtor(Dtor) {}
  1508. // Generate function call for handling vtable pointer poisoning.
  1509. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1510. assert(Dtor->getParent()->isDynamicClass());
  1511. (void)Dtor;
  1512. ASTContext &Context = CGF.getContext();
  1513. // Poison vtable and vtable ptr if they exist for this class.
  1514. llvm::Value *VTablePtr = CGF.LoadCXXThis();
  1515. CharUnits::QuantityType PoisonSize =
  1516. Context.toCharUnitsFromBits(CGF.PointerWidthInBits).getQuantity();
  1517. // Pass in void pointer and size of region as arguments to runtime
  1518. // function
  1519. EmitSanitizerDtorCallback(CGF, VTablePtr, PoisonSize);
  1520. }
  1521. };
  1522. } // end anonymous namespace
  1523. /// Emit all code that comes at the end of class's
  1524. /// destructor. This is to call destructors on members and base classes
  1525. /// in reverse order of their construction.
  1526. ///
  1527. /// For a deleting destructor, this also handles the case where a destroying
  1528. /// operator delete completely overrides the definition.
  1529. void CodeGenFunction::EnterDtorCleanups(const CXXDestructorDecl *DD,
  1530. CXXDtorType DtorType) {
  1531. assert((!DD->isTrivial() || DD->hasAttr<DLLExportAttr>()) &&
  1532. "Should not emit dtor epilogue for non-exported trivial dtor!");
  1533. // The deleting-destructor phase just needs to call the appropriate
  1534. // operator delete that Sema picked up.
  1535. if (DtorType == Dtor_Deleting) {
  1536. assert(DD->getOperatorDelete() &&
  1537. "operator delete missing - EnterDtorCleanups");
  1538. if (CXXStructorImplicitParamValue) {
  1539. // If there is an implicit param to the deleting dtor, it's a boolean
  1540. // telling whether this is a deleting destructor.
  1541. if (DD->getOperatorDelete()->isDestroyingOperatorDelete())
  1542. EmitConditionalDtorDeleteCall(*this, CXXStructorImplicitParamValue,
  1543. /*ReturnAfterDelete*/true);
  1544. else
  1545. EHStack.pushCleanup<CallDtorDeleteConditional>(
  1546. NormalAndEHCleanup, CXXStructorImplicitParamValue);
  1547. } else {
  1548. if (DD->getOperatorDelete()->isDestroyingOperatorDelete()) {
  1549. const CXXRecordDecl *ClassDecl = DD->getParent();
  1550. EmitDeleteCall(DD->getOperatorDelete(),
  1551. LoadThisForDtorDelete(*this, DD),
  1552. getContext().getTagDeclType(ClassDecl));
  1553. EmitBranchThroughCleanup(ReturnBlock);
  1554. } else {
  1555. EHStack.pushCleanup<CallDtorDelete>(NormalAndEHCleanup);
  1556. }
  1557. }
  1558. return;
  1559. }
  1560. const CXXRecordDecl *ClassDecl = DD->getParent();
  1561. // Unions have no bases and do not call field destructors.
  1562. if (ClassDecl->isUnion())
  1563. return;
  1564. // The complete-destructor phase just destructs all the virtual bases.
  1565. if (DtorType == Dtor_Complete) {
  1566. // Poison the vtable pointer such that access after the base
  1567. // and member destructors are invoked is invalid.
  1568. if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor &&
  1569. SanOpts.has(SanitizerKind::Memory) && ClassDecl->getNumVBases() &&
  1570. ClassDecl->isPolymorphic())
  1571. EHStack.pushCleanup<SanitizeDtorVTable>(NormalAndEHCleanup, DD);
  1572. // We push them in the forward order so that they'll be popped in
  1573. // the reverse order.
  1574. for (const auto &Base : ClassDecl->vbases()) {
  1575. auto *BaseClassDecl =
  1576. cast<CXXRecordDecl>(Base.getType()->castAs<RecordType>()->getDecl());
  1577. // Ignore trivial destructors.
  1578. if (BaseClassDecl->hasTrivialDestructor())
  1579. continue;
  1580. EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup,
  1581. BaseClassDecl,
  1582. /*BaseIsVirtual*/ true);
  1583. }
  1584. return;
  1585. }
  1586. assert(DtorType == Dtor_Base);
  1587. // Poison the vtable pointer if it has no virtual bases, but inherits
  1588. // virtual functions.
  1589. if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor &&
  1590. SanOpts.has(SanitizerKind::Memory) && !ClassDecl->getNumVBases() &&
  1591. ClassDecl->isPolymorphic())
  1592. EHStack.pushCleanup<SanitizeDtorVTable>(NormalAndEHCleanup, DD);
  1593. // Destroy non-virtual bases.
  1594. for (const auto &Base : ClassDecl->bases()) {
  1595. // Ignore virtual bases.
  1596. if (Base.isVirtual())
  1597. continue;
  1598. CXXRecordDecl *BaseClassDecl = Base.getType()->getAsCXXRecordDecl();
  1599. // Ignore trivial destructors.
  1600. if (BaseClassDecl->hasTrivialDestructor())
  1601. continue;
  1602. EHStack.pushCleanup<CallBaseDtor>(NormalAndEHCleanup,
  1603. BaseClassDecl,
  1604. /*BaseIsVirtual*/ false);
  1605. }
  1606. // Poison fields such that access after their destructors are
  1607. // invoked, and before the base class destructor runs, is invalid.
  1608. if (CGM.getCodeGenOpts().SanitizeMemoryUseAfterDtor &&
  1609. SanOpts.has(SanitizerKind::Memory))
  1610. EHStack.pushCleanup<SanitizeDtorMembers>(NormalAndEHCleanup, DD);
  1611. // Destroy direct fields.
  1612. for (const auto *Field : ClassDecl->fields()) {
  1613. QualType type = Field->getType();
  1614. QualType::DestructionKind dtorKind = type.isDestructedType();
  1615. if (!dtorKind) continue;
  1616. // Anonymous union members do not have their destructors called.
  1617. const RecordType *RT = type->getAsUnionType();
  1618. if (RT && RT->getDecl()->isAnonymousStructOrUnion()) continue;
  1619. CleanupKind cleanupKind = getCleanupKind(dtorKind);
  1620. EHStack.pushCleanup<DestroyField>(cleanupKind, Field,
  1621. getDestroyer(dtorKind),
  1622. cleanupKind & EHCleanup);
  1623. }
  1624. }
  1625. /// EmitCXXAggrConstructorCall - Emit a loop to call a particular
  1626. /// constructor for each of several members of an array.
  1627. ///
  1628. /// \param ctor the constructor to call for each element
  1629. /// \param arrayType the type of the array to initialize
  1630. /// \param arrayBegin an arrayType*
  1631. /// \param zeroInitialize true if each element should be
  1632. /// zero-initialized before it is constructed
  1633. void CodeGenFunction::EmitCXXAggrConstructorCall(
  1634. const CXXConstructorDecl *ctor, const ArrayType *arrayType,
  1635. Address arrayBegin, const CXXConstructExpr *E, bool NewPointerIsChecked,
  1636. bool zeroInitialize) {
  1637. QualType elementType;
  1638. llvm::Value *numElements =
  1639. emitArrayLength(arrayType, elementType, arrayBegin);
  1640. EmitCXXAggrConstructorCall(ctor, numElements, arrayBegin, E,
  1641. NewPointerIsChecked, zeroInitialize);
  1642. }
  1643. /// EmitCXXAggrConstructorCall - Emit a loop to call a particular
  1644. /// constructor for each of several members of an array.
  1645. ///
  1646. /// \param ctor the constructor to call for each element
  1647. /// \param numElements the number of elements in the array;
  1648. /// may be zero
  1649. /// \param arrayBase a T*, where T is the type constructed by ctor
  1650. /// \param zeroInitialize true if each element should be
  1651. /// zero-initialized before it is constructed
  1652. void CodeGenFunction::EmitCXXAggrConstructorCall(const CXXConstructorDecl *ctor,
  1653. llvm::Value *numElements,
  1654. Address arrayBase,
  1655. const CXXConstructExpr *E,
  1656. bool NewPointerIsChecked,
  1657. bool zeroInitialize) {
  1658. // It's legal for numElements to be zero. This can happen both
  1659. // dynamically, because x can be zero in 'new A[x]', and statically,
  1660. // because of GCC extensions that permit zero-length arrays. There
  1661. // are probably legitimate places where we could assume that this
  1662. // doesn't happen, but it's not clear that it's worth it.
  1663. llvm::BranchInst *zeroCheckBranch = nullptr;
  1664. // Optimize for a constant count.
  1665. llvm::ConstantInt *constantCount
  1666. = dyn_cast<llvm::ConstantInt>(numElements);
  1667. if (constantCount) {
  1668. // Just skip out if the constant count is zero.
  1669. if (constantCount->isZero()) return;
  1670. // Otherwise, emit the check.
  1671. } else {
  1672. llvm::BasicBlock *loopBB = createBasicBlock("new.ctorloop");
  1673. llvm::Value *iszero = Builder.CreateIsNull(numElements, "isempty");
  1674. zeroCheckBranch = Builder.CreateCondBr(iszero, loopBB, loopBB);
  1675. EmitBlock(loopBB);
  1676. }
  1677. // Find the end of the array.
  1678. llvm::Type *elementType = arrayBase.getElementType();
  1679. llvm::Value *arrayBegin = arrayBase.getPointer();
  1680. llvm::Value *arrayEnd = Builder.CreateInBoundsGEP(
  1681. elementType, arrayBegin, numElements, "arrayctor.end");
  1682. // Enter the loop, setting up a phi for the current location to initialize.
  1683. llvm::BasicBlock *entryBB = Builder.GetInsertBlock();
  1684. llvm::BasicBlock *loopBB = createBasicBlock("arrayctor.loop");
  1685. EmitBlock(loopBB);
  1686. llvm::PHINode *cur = Builder.CreatePHI(arrayBegin->getType(), 2,
  1687. "arrayctor.cur");
  1688. cur->addIncoming(arrayBegin, entryBB);
  1689. // Inside the loop body, emit the constructor call on the array element.
  1690. // The alignment of the base, adjusted by the size of a single element,
  1691. // provides a conservative estimate of the alignment of every element.
  1692. // (This assumes we never start tracking offsetted alignments.)
  1693. //
  1694. // Note that these are complete objects and so we don't need to
  1695. // use the non-virtual size or alignment.
  1696. QualType type = getContext().getTypeDeclType(ctor->getParent());
  1697. CharUnits eltAlignment =
  1698. arrayBase.getAlignment()
  1699. .alignmentOfArrayElement(getContext().getTypeSizeInChars(type));
  1700. Address curAddr = Address(cur, elementType, eltAlignment);
  1701. // Zero initialize the storage, if requested.
  1702. if (zeroInitialize)
  1703. EmitNullInitialization(curAddr, type);
  1704. // C++ [class.temporary]p4:
  1705. // There are two contexts in which temporaries are destroyed at a different
  1706. // point than the end of the full-expression. The first context is when a
  1707. // default constructor is called to initialize an element of an array.
  1708. // If the constructor has one or more default arguments, the destruction of
  1709. // every temporary created in a default argument expression is sequenced
  1710. // before the construction of the next array element, if any.
  1711. {
  1712. RunCleanupsScope Scope(*this);
  1713. // Evaluate the constructor and its arguments in a regular
  1714. // partial-destroy cleanup.
  1715. if (getLangOpts().Exceptions &&
  1716. !ctor->getParent()->hasTrivialDestructor()) {
  1717. Destroyer *destroyer = destroyCXXObject;
  1718. pushRegularPartialArrayCleanup(arrayBegin, cur, type, eltAlignment,
  1719. *destroyer);
  1720. }
  1721. auto currAVS = AggValueSlot::forAddr(
  1722. curAddr, type.getQualifiers(), AggValueSlot::IsDestructed,
  1723. AggValueSlot::DoesNotNeedGCBarriers, AggValueSlot::IsNotAliased,
  1724. AggValueSlot::DoesNotOverlap, AggValueSlot::IsNotZeroed,
  1725. NewPointerIsChecked ? AggValueSlot::IsSanitizerChecked
  1726. : AggValueSlot::IsNotSanitizerChecked);
  1727. EmitCXXConstructorCall(ctor, Ctor_Complete, /*ForVirtualBase=*/false,
  1728. /*Delegating=*/false, currAVS, E);
  1729. }
  1730. // Go to the next element.
  1731. llvm::Value *next = Builder.CreateInBoundsGEP(
  1732. elementType, cur, llvm::ConstantInt::get(SizeTy, 1), "arrayctor.next");
  1733. cur->addIncoming(next, Builder.GetInsertBlock());
  1734. // Check whether that's the end of the loop.
  1735. llvm::Value *done = Builder.CreateICmpEQ(next, arrayEnd, "arrayctor.done");
  1736. llvm::BasicBlock *contBB = createBasicBlock("arrayctor.cont");
  1737. Builder.CreateCondBr(done, contBB, loopBB);
  1738. // Patch the earlier check to skip over the loop.
  1739. if (zeroCheckBranch) zeroCheckBranch->setSuccessor(0, contBB);
  1740. EmitBlock(contBB);
  1741. }
  1742. void CodeGenFunction::destroyCXXObject(CodeGenFunction &CGF,
  1743. Address addr,
  1744. QualType type) {
  1745. const RecordType *rtype = type->castAs<RecordType>();
  1746. const CXXRecordDecl *record = cast<CXXRecordDecl>(rtype->getDecl());
  1747. const CXXDestructorDecl *dtor = record->getDestructor();
  1748. assert(!dtor->isTrivial());
  1749. CGF.EmitCXXDestructorCall(dtor, Dtor_Complete, /*for vbase*/ false,
  1750. /*Delegating=*/false, addr, type);
  1751. }
  1752. void CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D,
  1753. CXXCtorType Type,
  1754. bool ForVirtualBase,
  1755. bool Delegating,
  1756. AggValueSlot ThisAVS,
  1757. const CXXConstructExpr *E) {
  1758. CallArgList Args;
  1759. Address This = ThisAVS.getAddress();
  1760. LangAS SlotAS = ThisAVS.getQualifiers().getAddressSpace();
  1761. QualType ThisType = D->getThisType();
  1762. LangAS ThisAS = ThisType.getTypePtr()->getPointeeType().getAddressSpace();
  1763. llvm::Value *ThisPtr = This.getPointer();
  1764. if (SlotAS != ThisAS) {
  1765. unsigned TargetThisAS = getContext().getTargetAddressSpace(ThisAS);
  1766. llvm::Type *NewType = llvm::PointerType::getWithSamePointeeType(
  1767. This.getType(), TargetThisAS);
  1768. ThisPtr = getTargetHooks().performAddrSpaceCast(*this, This.getPointer(),
  1769. ThisAS, SlotAS, NewType);
  1770. }
  1771. // Push the this ptr.
  1772. Args.add(RValue::get(ThisPtr), D->getThisType());
  1773. // If this is a trivial constructor, emit a memcpy now before we lose
  1774. // the alignment information on the argument.
  1775. // FIXME: It would be better to preserve alignment information into CallArg.
  1776. if (isMemcpyEquivalentSpecialMember(D)) {
  1777. assert(E->getNumArgs() == 1 && "unexpected argcount for trivial ctor");
  1778. const Expr *Arg = E->getArg(0);
  1779. LValue Src = EmitLValue(Arg);
  1780. QualType DestTy = getContext().getTypeDeclType(D->getParent());
  1781. LValue Dest = MakeAddrLValue(This, DestTy);
  1782. EmitAggregateCopyCtor(Dest, Src, ThisAVS.mayOverlap());
  1783. return;
  1784. }
  1785. // Add the rest of the user-supplied arguments.
  1786. const FunctionProtoType *FPT = D->getType()->castAs<FunctionProtoType>();
  1787. EvaluationOrder Order = E->isListInitialization()
  1788. ? EvaluationOrder::ForceLeftToRight
  1789. : EvaluationOrder::Default;
  1790. EmitCallArgs(Args, FPT, E->arguments(), E->getConstructor(),
  1791. /*ParamsToSkip*/ 0, Order);
  1792. EmitCXXConstructorCall(D, Type, ForVirtualBase, Delegating, This, Args,
  1793. ThisAVS.mayOverlap(), E->getExprLoc(),
  1794. ThisAVS.isSanitizerChecked());
  1795. }
  1796. static bool canEmitDelegateCallArgs(CodeGenFunction &CGF,
  1797. const CXXConstructorDecl *Ctor,
  1798. CXXCtorType Type, CallArgList &Args) {
  1799. // We can't forward a variadic call.
  1800. if (Ctor->isVariadic())
  1801. return false;
  1802. if (CGF.getTarget().getCXXABI().areArgsDestroyedLeftToRightInCallee()) {
  1803. // If the parameters are callee-cleanup, it's not safe to forward.
  1804. for (auto *P : Ctor->parameters())
  1805. if (P->needsDestruction(CGF.getContext()))
  1806. return false;
  1807. // Likewise if they're inalloca.
  1808. const CGFunctionInfo &Info =
  1809. CGF.CGM.getTypes().arrangeCXXConstructorCall(Args, Ctor, Type, 0, 0);
  1810. if (Info.usesInAlloca())
  1811. return false;
  1812. }
  1813. // Anything else should be OK.
  1814. return true;
  1815. }
  1816. void CodeGenFunction::EmitCXXConstructorCall(const CXXConstructorDecl *D,
  1817. CXXCtorType Type,
  1818. bool ForVirtualBase,
  1819. bool Delegating,
  1820. Address This,
  1821. CallArgList &Args,
  1822. AggValueSlot::Overlap_t Overlap,
  1823. SourceLocation Loc,
  1824. bool NewPointerIsChecked) {
  1825. const CXXRecordDecl *ClassDecl = D->getParent();
  1826. if (!NewPointerIsChecked)
  1827. EmitTypeCheck(CodeGenFunction::TCK_ConstructorCall, Loc, This.getPointer(),
  1828. getContext().getRecordType(ClassDecl), CharUnits::Zero());
  1829. if (D->isTrivial() && D->isDefaultConstructor()) {
  1830. assert(Args.size() == 1 && "trivial default ctor with args");
  1831. return;
  1832. }
  1833. // If this is a trivial constructor, just emit what's needed. If this is a
  1834. // union copy constructor, we must emit a memcpy, because the AST does not
  1835. // model that copy.
  1836. if (isMemcpyEquivalentSpecialMember(D)) {
  1837. assert(Args.size() == 2 && "unexpected argcount for trivial ctor");
  1838. QualType SrcTy = D->getParamDecl(0)->getType().getNonReferenceType();
  1839. Address Src(Args[1].getRValue(*this).getScalarVal(),
  1840. CGM.getNaturalTypeAlignment(SrcTy));
  1841. LValue SrcLVal = MakeAddrLValue(Src, SrcTy);
  1842. QualType DestTy = getContext().getTypeDeclType(ClassDecl);
  1843. LValue DestLVal = MakeAddrLValue(This, DestTy);
  1844. EmitAggregateCopyCtor(DestLVal, SrcLVal, Overlap);
  1845. return;
  1846. }
  1847. bool PassPrototypeArgs = true;
  1848. // Check whether we can actually emit the constructor before trying to do so.
  1849. if (auto Inherited = D->getInheritedConstructor()) {
  1850. PassPrototypeArgs = getTypes().inheritingCtorHasParams(Inherited, Type);
  1851. if (PassPrototypeArgs && !canEmitDelegateCallArgs(*this, D, Type, Args)) {
  1852. EmitInlinedInheritingCXXConstructorCall(D, Type, ForVirtualBase,
  1853. Delegating, Args);
  1854. return;
  1855. }
  1856. }
  1857. // Insert any ABI-specific implicit constructor arguments.
  1858. CGCXXABI::AddedStructorArgCounts ExtraArgs =
  1859. CGM.getCXXABI().addImplicitConstructorArgs(*this, D, Type, ForVirtualBase,
  1860. Delegating, Args);
  1861. // Emit the call.
  1862. llvm::Constant *CalleePtr = CGM.getAddrOfCXXStructor(GlobalDecl(D, Type));
  1863. const CGFunctionInfo &Info = CGM.getTypes().arrangeCXXConstructorCall(
  1864. Args, D, Type, ExtraArgs.Prefix, ExtraArgs.Suffix, PassPrototypeArgs);
  1865. CGCallee Callee = CGCallee::forDirect(CalleePtr, GlobalDecl(D, Type));
  1866. EmitCall(Info, Callee, ReturnValueSlot(), Args, nullptr, false, Loc);
  1867. // Generate vtable assumptions if we're constructing a complete object
  1868. // with a vtable. We don't do this for base subobjects for two reasons:
  1869. // first, it's incorrect for classes with virtual bases, and second, we're
  1870. // about to overwrite the vptrs anyway.
  1871. // We also have to make sure if we can refer to vtable:
  1872. // - Otherwise we can refer to vtable if it's safe to speculatively emit.
  1873. // FIXME: If vtable is used by ctor/dtor, or if vtable is external and we are
  1874. // sure that definition of vtable is not hidden,
  1875. // then we are always safe to refer to it.
  1876. // FIXME: It looks like InstCombine is very inefficient on dealing with
  1877. // assumes. Make assumption loads require -fstrict-vtable-pointers temporarily.
  1878. if (CGM.getCodeGenOpts().OptimizationLevel > 0 &&
  1879. ClassDecl->isDynamicClass() && Type != Ctor_Base &&
  1880. CGM.getCXXABI().canSpeculativelyEmitVTable(ClassDecl) &&
  1881. CGM.getCodeGenOpts().StrictVTablePointers)
  1882. EmitVTableAssumptionLoads(ClassDecl, This);
  1883. }
  1884. void CodeGenFunction::EmitInheritedCXXConstructorCall(
  1885. const CXXConstructorDecl *D, bool ForVirtualBase, Address This,
  1886. bool InheritedFromVBase, const CXXInheritedCtorInitExpr *E) {
  1887. CallArgList Args;
  1888. CallArg ThisArg(RValue::get(This.getPointer()), D->getThisType());
  1889. // Forward the parameters.
  1890. if (InheritedFromVBase &&
  1891. CGM.getTarget().getCXXABI().hasConstructorVariants()) {
  1892. // Nothing to do; this construction is not responsible for constructing
  1893. // the base class containing the inherited constructor.
  1894. // FIXME: Can we just pass undef's for the remaining arguments if we don't
  1895. // have constructor variants?
  1896. Args.push_back(ThisArg);
  1897. } else if (!CXXInheritedCtorInitExprArgs.empty()) {
  1898. // The inheriting constructor was inlined; just inject its arguments.
  1899. assert(CXXInheritedCtorInitExprArgs.size() >= D->getNumParams() &&
  1900. "wrong number of parameters for inherited constructor call");
  1901. Args = CXXInheritedCtorInitExprArgs;
  1902. Args[0] = ThisArg;
  1903. } else {
  1904. // The inheriting constructor was not inlined. Emit delegating arguments.
  1905. Args.push_back(ThisArg);
  1906. const auto *OuterCtor = cast<CXXConstructorDecl>(CurCodeDecl);
  1907. assert(OuterCtor->getNumParams() == D->getNumParams());
  1908. assert(!OuterCtor->isVariadic() && "should have been inlined");
  1909. for (const auto *Param : OuterCtor->parameters()) {
  1910. assert(getContext().hasSameUnqualifiedType(
  1911. OuterCtor->getParamDecl(Param->getFunctionScopeIndex())->getType(),
  1912. Param->getType()));
  1913. EmitDelegateCallArg(Args, Param, E->getLocation());
  1914. // Forward __attribute__(pass_object_size).
  1915. if (Param->hasAttr<PassObjectSizeAttr>()) {
  1916. auto *POSParam = SizeArguments[Param];
  1917. assert(POSParam && "missing pass_object_size value for forwarding");
  1918. EmitDelegateCallArg(Args, POSParam, E->getLocation());
  1919. }
  1920. }
  1921. }
  1922. EmitCXXConstructorCall(D, Ctor_Base, ForVirtualBase, /*Delegating*/false,
  1923. This, Args, AggValueSlot::MayOverlap,
  1924. E->getLocation(), /*NewPointerIsChecked*/true);
  1925. }
  1926. void CodeGenFunction::EmitInlinedInheritingCXXConstructorCall(
  1927. const CXXConstructorDecl *Ctor, CXXCtorType CtorType, bool ForVirtualBase,
  1928. bool Delegating, CallArgList &Args) {
  1929. GlobalDecl GD(Ctor, CtorType);
  1930. InlinedInheritingConstructorScope Scope(*this, GD);
  1931. ApplyInlineDebugLocation DebugScope(*this, GD);
  1932. RunCleanupsScope RunCleanups(*this);
  1933. // Save the arguments to be passed to the inherited constructor.
  1934. CXXInheritedCtorInitExprArgs = Args;
  1935. FunctionArgList Params;
  1936. QualType RetType = BuildFunctionArgList(CurGD, Params);
  1937. FnRetTy = RetType;
  1938. // Insert any ABI-specific implicit constructor arguments.
  1939. CGM.getCXXABI().addImplicitConstructorArgs(*this, Ctor, CtorType,
  1940. ForVirtualBase, Delegating, Args);
  1941. // Emit a simplified prolog. We only need to emit the implicit params.
  1942. assert(Args.size() >= Params.size() && "too few arguments for call");
  1943. for (unsigned I = 0, N = Args.size(); I != N; ++I) {
  1944. if (I < Params.size() && isa<ImplicitParamDecl>(Params[I])) {
  1945. const RValue &RV = Args[I].getRValue(*this);
  1946. assert(!RV.isComplex() && "complex indirect params not supported");
  1947. ParamValue Val = RV.isScalar()
  1948. ? ParamValue::forDirect(RV.getScalarVal())
  1949. : ParamValue::forIndirect(RV.getAggregateAddress());
  1950. EmitParmDecl(*Params[I], Val, I + 1);
  1951. }
  1952. }
  1953. // Create a return value slot if the ABI implementation wants one.
  1954. // FIXME: This is dumb, we should ask the ABI not to try to set the return
  1955. // value instead.
  1956. if (!RetType->isVoidType())
  1957. ReturnValue = CreateIRTemp(RetType, "retval.inhctor");
  1958. CGM.getCXXABI().EmitInstanceFunctionProlog(*this);
  1959. CXXThisValue = CXXABIThisValue;
  1960. // Directly emit the constructor initializers.
  1961. EmitCtorPrologue(Ctor, CtorType, Params);
  1962. }
  1963. void CodeGenFunction::EmitVTableAssumptionLoad(const VPtr &Vptr, Address This) {
  1964. llvm::Value *VTableGlobal =
  1965. CGM.getCXXABI().getVTableAddressPoint(Vptr.Base, Vptr.VTableClass);
  1966. if (!VTableGlobal)
  1967. return;
  1968. // We can just use the base offset in the complete class.
  1969. CharUnits NonVirtualOffset = Vptr.Base.getBaseOffset();
  1970. if (!NonVirtualOffset.isZero())
  1971. This =
  1972. ApplyNonVirtualAndVirtualOffset(*this, This, NonVirtualOffset, nullptr,
  1973. Vptr.VTableClass, Vptr.NearestVBase);
  1974. llvm::Value *VPtrValue =
  1975. GetVTablePtr(This, VTableGlobal->getType(), Vptr.VTableClass);
  1976. llvm::Value *Cmp =
  1977. Builder.CreateICmpEQ(VPtrValue, VTableGlobal, "cmp.vtables");
  1978. Builder.CreateAssumption(Cmp);
  1979. }
  1980. void CodeGenFunction::EmitVTableAssumptionLoads(const CXXRecordDecl *ClassDecl,
  1981. Address This) {
  1982. if (CGM.getCXXABI().doStructorsInitializeVPtrs(ClassDecl))
  1983. for (const VPtr &Vptr : getVTablePointers(ClassDecl))
  1984. EmitVTableAssumptionLoad(Vptr, This);
  1985. }
  1986. void
  1987. CodeGenFunction::EmitSynthesizedCXXCopyCtorCall(const CXXConstructorDecl *D,
  1988. Address This, Address Src,
  1989. const CXXConstructExpr *E) {
  1990. const FunctionProtoType *FPT = D->getType()->castAs<FunctionProtoType>();
  1991. CallArgList Args;
  1992. // Push the this ptr.
  1993. Args.add(RValue::get(This.getPointer()), D->getThisType());
  1994. // Push the src ptr.
  1995. QualType QT = *(FPT->param_type_begin());
  1996. llvm::Type *t = CGM.getTypes().ConvertType(QT);
  1997. Src = Builder.CreateBitCast(Src, t);
  1998. Args.add(RValue::get(Src.getPointer()), QT);
  1999. // Skip over first argument (Src).
  2000. EmitCallArgs(Args, FPT, drop_begin(E->arguments(), 1), E->getConstructor(),
  2001. /*ParamsToSkip*/ 1);
  2002. EmitCXXConstructorCall(D, Ctor_Complete, /*ForVirtualBase*/false,
  2003. /*Delegating*/false, This, Args,
  2004. AggValueSlot::MayOverlap, E->getExprLoc(),
  2005. /*NewPointerIsChecked*/false);
  2006. }
  2007. void
  2008. CodeGenFunction::EmitDelegateCXXConstructorCall(const CXXConstructorDecl *Ctor,
  2009. CXXCtorType CtorType,
  2010. const FunctionArgList &Args,
  2011. SourceLocation Loc) {
  2012. CallArgList DelegateArgs;
  2013. FunctionArgList::const_iterator I = Args.begin(), E = Args.end();
  2014. assert(I != E && "no parameters to constructor");
  2015. // this
  2016. Address This = LoadCXXThisAddress();
  2017. DelegateArgs.add(RValue::get(This.getPointer()), (*I)->getType());
  2018. ++I;
  2019. // FIXME: The location of the VTT parameter in the parameter list is
  2020. // specific to the Itanium ABI and shouldn't be hardcoded here.
  2021. if (CGM.getCXXABI().NeedsVTTParameter(CurGD)) {
  2022. assert(I != E && "cannot skip vtt parameter, already done with args");
  2023. assert((*I)->getType()->isPointerType() &&
  2024. "skipping parameter not of vtt type");
  2025. ++I;
  2026. }
  2027. // Explicit arguments.
  2028. for (; I != E; ++I) {
  2029. const VarDecl *param = *I;
  2030. // FIXME: per-argument source location
  2031. EmitDelegateCallArg(DelegateArgs, param, Loc);
  2032. }
  2033. EmitCXXConstructorCall(Ctor, CtorType, /*ForVirtualBase=*/false,
  2034. /*Delegating=*/true, This, DelegateArgs,
  2035. AggValueSlot::MayOverlap, Loc,
  2036. /*NewPointerIsChecked=*/true);
  2037. }
  2038. namespace {
  2039. struct CallDelegatingCtorDtor final : EHScopeStack::Cleanup {
  2040. const CXXDestructorDecl *Dtor;
  2041. Address Addr;
  2042. CXXDtorType Type;
  2043. CallDelegatingCtorDtor(const CXXDestructorDecl *D, Address Addr,
  2044. CXXDtorType Type)
  2045. : Dtor(D), Addr(Addr), Type(Type) {}
  2046. void Emit(CodeGenFunction &CGF, Flags flags) override {
  2047. // We are calling the destructor from within the constructor.
  2048. // Therefore, "this" should have the expected type.
  2049. QualType ThisTy = Dtor->getThisObjectType();
  2050. CGF.EmitCXXDestructorCall(Dtor, Type, /*ForVirtualBase=*/false,
  2051. /*Delegating=*/true, Addr, ThisTy);
  2052. }
  2053. };
  2054. } // end anonymous namespace
  2055. void
  2056. CodeGenFunction::EmitDelegatingCXXConstructorCall(const CXXConstructorDecl *Ctor,
  2057. const FunctionArgList &Args) {
  2058. assert(Ctor->isDelegatingConstructor());
  2059. Address ThisPtr = LoadCXXThisAddress();
  2060. AggValueSlot AggSlot =
  2061. AggValueSlot::forAddr(ThisPtr, Qualifiers(),
  2062. AggValueSlot::IsDestructed,
  2063. AggValueSlot::DoesNotNeedGCBarriers,
  2064. AggValueSlot::IsNotAliased,
  2065. AggValueSlot::MayOverlap,
  2066. AggValueSlot::IsNotZeroed,
  2067. // Checks are made by the code that calls constructor.
  2068. AggValueSlot::IsSanitizerChecked);
  2069. EmitAggExpr(Ctor->init_begin()[0]->getInit(), AggSlot);
  2070. const CXXRecordDecl *ClassDecl = Ctor->getParent();
  2071. if (CGM.getLangOpts().Exceptions && !ClassDecl->hasTrivialDestructor()) {
  2072. CXXDtorType Type =
  2073. CurGD.getCtorType() == Ctor_Complete ? Dtor_Complete : Dtor_Base;
  2074. EHStack.pushCleanup<CallDelegatingCtorDtor>(EHCleanup,
  2075. ClassDecl->getDestructor(),
  2076. ThisPtr, Type);
  2077. }
  2078. }
  2079. void CodeGenFunction::EmitCXXDestructorCall(const CXXDestructorDecl *DD,
  2080. CXXDtorType Type,
  2081. bool ForVirtualBase,
  2082. bool Delegating, Address This,
  2083. QualType ThisTy) {
  2084. CGM.getCXXABI().EmitDestructorCall(*this, DD, Type, ForVirtualBase,
  2085. Delegating, This, ThisTy);
  2086. }
  2087. namespace {
  2088. struct CallLocalDtor final : EHScopeStack::Cleanup {
  2089. const CXXDestructorDecl *Dtor;
  2090. Address Addr;
  2091. QualType Ty;
  2092. CallLocalDtor(const CXXDestructorDecl *D, Address Addr, QualType Ty)
  2093. : Dtor(D), Addr(Addr), Ty(Ty) {}
  2094. void Emit(CodeGenFunction &CGF, Flags flags) override {
  2095. CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete,
  2096. /*ForVirtualBase=*/false,
  2097. /*Delegating=*/false, Addr, Ty);
  2098. }
  2099. };
  2100. } // end anonymous namespace
  2101. void CodeGenFunction::PushDestructorCleanup(const CXXDestructorDecl *D,
  2102. QualType T, Address Addr) {
  2103. EHStack.pushCleanup<CallLocalDtor>(NormalAndEHCleanup, D, Addr, T);
  2104. }
  2105. void CodeGenFunction::PushDestructorCleanup(QualType T, Address Addr) {
  2106. CXXRecordDecl *ClassDecl = T->getAsCXXRecordDecl();
  2107. if (!ClassDecl) return;
  2108. if (ClassDecl->hasTrivialDestructor()) return;
  2109. const CXXDestructorDecl *D = ClassDecl->getDestructor();
  2110. assert(D && D->isUsed() && "destructor not marked as used!");
  2111. PushDestructorCleanup(D, T, Addr);
  2112. }
  2113. void CodeGenFunction::InitializeVTablePointer(const VPtr &Vptr) {
  2114. // Compute the address point.
  2115. llvm::Value *VTableAddressPoint =
  2116. CGM.getCXXABI().getVTableAddressPointInStructor(
  2117. *this, Vptr.VTableClass, Vptr.Base, Vptr.NearestVBase);
  2118. if (!VTableAddressPoint)
  2119. return;
  2120. // Compute where to store the address point.
  2121. llvm::Value *VirtualOffset = nullptr;
  2122. CharUnits NonVirtualOffset = CharUnits::Zero();
  2123. if (CGM.getCXXABI().isVirtualOffsetNeededForVTableField(*this, Vptr)) {
  2124. // We need to use the virtual base offset offset because the virtual base
  2125. // might have a different offset in the most derived class.
  2126. VirtualOffset = CGM.getCXXABI().GetVirtualBaseClassOffset(
  2127. *this, LoadCXXThisAddress(), Vptr.VTableClass, Vptr.NearestVBase);
  2128. NonVirtualOffset = Vptr.OffsetFromNearestVBase;
  2129. } else {
  2130. // We can just use the base offset in the complete class.
  2131. NonVirtualOffset = Vptr.Base.getBaseOffset();
  2132. }
  2133. // Apply the offsets.
  2134. Address VTableField = LoadCXXThisAddress();
  2135. if (!NonVirtualOffset.isZero() || VirtualOffset)
  2136. VTableField = ApplyNonVirtualAndVirtualOffset(
  2137. *this, VTableField, NonVirtualOffset, VirtualOffset, Vptr.VTableClass,
  2138. Vptr.NearestVBase);
  2139. // Finally, store the address point. Use the same LLVM types as the field to
  2140. // support optimization.
  2141. unsigned GlobalsAS = CGM.getDataLayout().getDefaultGlobalsAddressSpace();
  2142. unsigned ProgAS = CGM.getDataLayout().getProgramAddressSpace();
  2143. llvm::Type *VTablePtrTy =
  2144. llvm::FunctionType::get(CGM.Int32Ty, /*isVarArg=*/true)
  2145. ->getPointerTo(ProgAS)
  2146. ->getPointerTo(GlobalsAS);
  2147. // vtable field is is derived from `this` pointer, therefore they should be in
  2148. // the same addr space. Note that this might not be LLVM address space 0.
  2149. VTableField = Builder.CreateElementBitCast(VTableField, VTablePtrTy);
  2150. VTableAddressPoint = Builder.CreateBitCast(VTableAddressPoint, VTablePtrTy);
  2151. llvm::StoreInst *Store = Builder.CreateStore(VTableAddressPoint, VTableField);
  2152. TBAAAccessInfo TBAAInfo = CGM.getTBAAVTablePtrAccessInfo(VTablePtrTy);
  2153. CGM.DecorateInstructionWithTBAA(Store, TBAAInfo);
  2154. if (CGM.getCodeGenOpts().OptimizationLevel > 0 &&
  2155. CGM.getCodeGenOpts().StrictVTablePointers)
  2156. CGM.DecorateInstructionWithInvariantGroup(Store, Vptr.VTableClass);
  2157. }
  2158. CodeGenFunction::VPtrsVector
  2159. CodeGenFunction::getVTablePointers(const CXXRecordDecl *VTableClass) {
  2160. CodeGenFunction::VPtrsVector VPtrsResult;
  2161. VisitedVirtualBasesSetTy VBases;
  2162. getVTablePointers(BaseSubobject(VTableClass, CharUnits::Zero()),
  2163. /*NearestVBase=*/nullptr,
  2164. /*OffsetFromNearestVBase=*/CharUnits::Zero(),
  2165. /*BaseIsNonVirtualPrimaryBase=*/false, VTableClass, VBases,
  2166. VPtrsResult);
  2167. return VPtrsResult;
  2168. }
  2169. void CodeGenFunction::getVTablePointers(BaseSubobject Base,
  2170. const CXXRecordDecl *NearestVBase,
  2171. CharUnits OffsetFromNearestVBase,
  2172. bool BaseIsNonVirtualPrimaryBase,
  2173. const CXXRecordDecl *VTableClass,
  2174. VisitedVirtualBasesSetTy &VBases,
  2175. VPtrsVector &Vptrs) {
  2176. // If this base is a non-virtual primary base the address point has already
  2177. // been set.
  2178. if (!BaseIsNonVirtualPrimaryBase) {
  2179. // Initialize the vtable pointer for this base.
  2180. VPtr Vptr = {Base, NearestVBase, OffsetFromNearestVBase, VTableClass};
  2181. Vptrs.push_back(Vptr);
  2182. }
  2183. const CXXRecordDecl *RD = Base.getBase();
  2184. // Traverse bases.
  2185. for (const auto &I : RD->bases()) {
  2186. auto *BaseDecl =
  2187. cast<CXXRecordDecl>(I.getType()->castAs<RecordType>()->getDecl());
  2188. // Ignore classes without a vtable.
  2189. if (!BaseDecl->isDynamicClass())
  2190. continue;
  2191. CharUnits BaseOffset;
  2192. CharUnits BaseOffsetFromNearestVBase;
  2193. bool BaseDeclIsNonVirtualPrimaryBase;
  2194. if (I.isVirtual()) {
  2195. // Check if we've visited this virtual base before.
  2196. if (!VBases.insert(BaseDecl).second)
  2197. continue;
  2198. const ASTRecordLayout &Layout =
  2199. getContext().getASTRecordLayout(VTableClass);
  2200. BaseOffset = Layout.getVBaseClassOffset(BaseDecl);
  2201. BaseOffsetFromNearestVBase = CharUnits::Zero();
  2202. BaseDeclIsNonVirtualPrimaryBase = false;
  2203. } else {
  2204. const ASTRecordLayout &Layout = getContext().getASTRecordLayout(RD);
  2205. BaseOffset = Base.getBaseOffset() + Layout.getBaseClassOffset(BaseDecl);
  2206. BaseOffsetFromNearestVBase =
  2207. OffsetFromNearestVBase + Layout.getBaseClassOffset(BaseDecl);
  2208. BaseDeclIsNonVirtualPrimaryBase = Layout.getPrimaryBase() == BaseDecl;
  2209. }
  2210. getVTablePointers(
  2211. BaseSubobject(BaseDecl, BaseOffset),
  2212. I.isVirtual() ? BaseDecl : NearestVBase, BaseOffsetFromNearestVBase,
  2213. BaseDeclIsNonVirtualPrimaryBase, VTableClass, VBases, Vptrs);
  2214. }
  2215. }
  2216. void CodeGenFunction::InitializeVTablePointers(const CXXRecordDecl *RD) {
  2217. // Ignore classes without a vtable.
  2218. if (!RD->isDynamicClass())
  2219. return;
  2220. // Initialize the vtable pointers for this class and all of its bases.
  2221. if (CGM.getCXXABI().doStructorsInitializeVPtrs(RD))
  2222. for (const VPtr &Vptr : getVTablePointers(RD))
  2223. InitializeVTablePointer(Vptr);
  2224. if (RD->getNumVBases())
  2225. CGM.getCXXABI().initializeHiddenVirtualInheritanceMembers(*this, RD);
  2226. }
  2227. llvm::Value *CodeGenFunction::GetVTablePtr(Address This,
  2228. llvm::Type *VTableTy,
  2229. const CXXRecordDecl *RD) {
  2230. Address VTablePtrSrc = Builder.CreateElementBitCast(This, VTableTy);
  2231. llvm::Instruction *VTable = Builder.CreateLoad(VTablePtrSrc, "vtable");
  2232. TBAAAccessInfo TBAAInfo = CGM.getTBAAVTablePtrAccessInfo(VTableTy);
  2233. CGM.DecorateInstructionWithTBAA(VTable, TBAAInfo);
  2234. if (CGM.getCodeGenOpts().OptimizationLevel > 0 &&
  2235. CGM.getCodeGenOpts().StrictVTablePointers)
  2236. CGM.DecorateInstructionWithInvariantGroup(VTable, RD);
  2237. return VTable;
  2238. }
  2239. // If a class has a single non-virtual base and does not introduce or override
  2240. // virtual member functions or fields, it will have the same layout as its base.
  2241. // This function returns the least derived such class.
  2242. //
  2243. // Casting an instance of a base class to such a derived class is technically
  2244. // undefined behavior, but it is a relatively common hack for introducing member
  2245. // functions on class instances with specific properties (e.g. llvm::Operator)
  2246. // that works under most compilers and should not have security implications, so
  2247. // we allow it by default. It can be disabled with -fsanitize=cfi-cast-strict.
  2248. static const CXXRecordDecl *
  2249. LeastDerivedClassWithSameLayout(const CXXRecordDecl *RD) {
  2250. if (!RD->field_empty())
  2251. return RD;
  2252. if (RD->getNumVBases() != 0)
  2253. return RD;
  2254. if (RD->getNumBases() != 1)
  2255. return RD;
  2256. for (const CXXMethodDecl *MD : RD->methods()) {
  2257. if (MD->isVirtual()) {
  2258. // Virtual member functions are only ok if they are implicit destructors
  2259. // because the implicit destructor will have the same semantics as the
  2260. // base class's destructor if no fields are added.
  2261. if (isa<CXXDestructorDecl>(MD) && MD->isImplicit())
  2262. continue;
  2263. return RD;
  2264. }
  2265. }
  2266. return LeastDerivedClassWithSameLayout(
  2267. RD->bases_begin()->getType()->getAsCXXRecordDecl());
  2268. }
  2269. void CodeGenFunction::EmitTypeMetadataCodeForVCall(const CXXRecordDecl *RD,
  2270. llvm::Value *VTable,
  2271. SourceLocation Loc) {
  2272. if (SanOpts.has(SanitizerKind::CFIVCall))
  2273. EmitVTablePtrCheckForCall(RD, VTable, CodeGenFunction::CFITCK_VCall, Loc);
  2274. else if (CGM.getCodeGenOpts().WholeProgramVTables &&
  2275. // Don't insert type test assumes if we are forcing public std
  2276. // visibility.
  2277. !CGM.HasLTOVisibilityPublicStd(RD)) {
  2278. llvm::Metadata *MD =
  2279. CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0));
  2280. llvm::Value *TypeId =
  2281. llvm::MetadataAsValue::get(CGM.getLLVMContext(), MD);
  2282. llvm::Value *CastedVTable = Builder.CreateBitCast(VTable, Int8PtrTy);
  2283. llvm::Value *TypeTest =
  2284. Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::type_test),
  2285. {CastedVTable, TypeId});
  2286. Builder.CreateCall(CGM.getIntrinsic(llvm::Intrinsic::assume), TypeTest);
  2287. }
  2288. }
  2289. void CodeGenFunction::EmitVTablePtrCheckForCall(const CXXRecordDecl *RD,
  2290. llvm::Value *VTable,
  2291. CFITypeCheckKind TCK,
  2292. SourceLocation Loc) {
  2293. if (!SanOpts.has(SanitizerKind::CFICastStrict))
  2294. RD = LeastDerivedClassWithSameLayout(RD);
  2295. EmitVTablePtrCheck(RD, VTable, TCK, Loc);
  2296. }
  2297. void CodeGenFunction::EmitVTablePtrCheckForCast(QualType T,
  2298. llvm::Value *Derived,
  2299. bool MayBeNull,
  2300. CFITypeCheckKind TCK,
  2301. SourceLocation Loc) {
  2302. if (!getLangOpts().CPlusPlus)
  2303. return;
  2304. auto *ClassTy = T->getAs<RecordType>();
  2305. if (!ClassTy)
  2306. return;
  2307. const CXXRecordDecl *ClassDecl = cast<CXXRecordDecl>(ClassTy->getDecl());
  2308. if (!ClassDecl->isCompleteDefinition() || !ClassDecl->isDynamicClass())
  2309. return;
  2310. if (!SanOpts.has(SanitizerKind::CFICastStrict))
  2311. ClassDecl = LeastDerivedClassWithSameLayout(ClassDecl);
  2312. llvm::BasicBlock *ContBlock = nullptr;
  2313. if (MayBeNull) {
  2314. llvm::Value *DerivedNotNull =
  2315. Builder.CreateIsNotNull(Derived, "cast.nonnull");
  2316. llvm::BasicBlock *CheckBlock = createBasicBlock("cast.check");
  2317. ContBlock = createBasicBlock("cast.cont");
  2318. Builder.CreateCondBr(DerivedNotNull, CheckBlock, ContBlock);
  2319. EmitBlock(CheckBlock);
  2320. }
  2321. llvm::Value *VTable;
  2322. std::tie(VTable, ClassDecl) = CGM.getCXXABI().LoadVTablePtr(
  2323. *this, Address(Derived, getPointerAlign()), ClassDecl);
  2324. EmitVTablePtrCheck(ClassDecl, VTable, TCK, Loc);
  2325. if (MayBeNull) {
  2326. Builder.CreateBr(ContBlock);
  2327. EmitBlock(ContBlock);
  2328. }
  2329. }
  2330. void CodeGenFunction::EmitVTablePtrCheck(const CXXRecordDecl *RD,
  2331. llvm::Value *VTable,
  2332. CFITypeCheckKind TCK,
  2333. SourceLocation Loc) {
  2334. if (!CGM.getCodeGenOpts().SanitizeCfiCrossDso &&
  2335. !CGM.HasHiddenLTOVisibility(RD))
  2336. return;
  2337. SanitizerMask M;
  2338. llvm::SanitizerStatKind SSK;
  2339. switch (TCK) {
  2340. case CFITCK_VCall:
  2341. M = SanitizerKind::CFIVCall;
  2342. SSK = llvm::SanStat_CFI_VCall;
  2343. break;
  2344. case CFITCK_NVCall:
  2345. M = SanitizerKind::CFINVCall;
  2346. SSK = llvm::SanStat_CFI_NVCall;
  2347. break;
  2348. case CFITCK_DerivedCast:
  2349. M = SanitizerKind::CFIDerivedCast;
  2350. SSK = llvm::SanStat_CFI_DerivedCast;
  2351. break;
  2352. case CFITCK_UnrelatedCast:
  2353. M = SanitizerKind::CFIUnrelatedCast;
  2354. SSK = llvm::SanStat_CFI_UnrelatedCast;
  2355. break;
  2356. case CFITCK_ICall:
  2357. case CFITCK_NVMFCall:
  2358. case CFITCK_VMFCall:
  2359. llvm_unreachable("unexpected sanitizer kind");
  2360. }
  2361. std::string TypeName = RD->getQualifiedNameAsString();
  2362. if (getContext().getNoSanitizeList().containsType(M, TypeName))
  2363. return;
  2364. SanitizerScope SanScope(this);
  2365. EmitSanitizerStatReport(SSK);
  2366. llvm::Metadata *MD =
  2367. CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0));
  2368. llvm::Value *TypeId = llvm::MetadataAsValue::get(getLLVMContext(), MD);
  2369. llvm::Value *CastedVTable = Builder.CreateBitCast(VTable, Int8PtrTy);
  2370. llvm::Value *TypeTest = Builder.CreateCall(
  2371. CGM.getIntrinsic(llvm::Intrinsic::type_test), {CastedVTable, TypeId});
  2372. llvm::Constant *StaticData[] = {
  2373. llvm::ConstantInt::get(Int8Ty, TCK),
  2374. EmitCheckSourceLocation(Loc),
  2375. EmitCheckTypeDescriptor(QualType(RD->getTypeForDecl(), 0)),
  2376. };
  2377. auto CrossDsoTypeId = CGM.CreateCrossDsoCfiTypeId(MD);
  2378. if (CGM.getCodeGenOpts().SanitizeCfiCrossDso && CrossDsoTypeId) {
  2379. EmitCfiSlowPathCheck(M, TypeTest, CrossDsoTypeId, CastedVTable, StaticData);
  2380. return;
  2381. }
  2382. if (CGM.getCodeGenOpts().SanitizeTrap.has(M)) {
  2383. EmitTrapCheck(TypeTest, SanitizerHandler::CFICheckFail);
  2384. return;
  2385. }
  2386. llvm::Value *AllVtables = llvm::MetadataAsValue::get(
  2387. CGM.getLLVMContext(),
  2388. llvm::MDString::get(CGM.getLLVMContext(), "all-vtables"));
  2389. llvm::Value *ValidVtable = Builder.CreateCall(
  2390. CGM.getIntrinsic(llvm::Intrinsic::type_test), {CastedVTable, AllVtables});
  2391. EmitCheck(std::make_pair(TypeTest, M), SanitizerHandler::CFICheckFail,
  2392. StaticData, {CastedVTable, ValidVtable});
  2393. }
  2394. bool CodeGenFunction::ShouldEmitVTableTypeCheckedLoad(const CXXRecordDecl *RD) {
  2395. if (!CGM.getCodeGenOpts().WholeProgramVTables ||
  2396. !CGM.HasHiddenLTOVisibility(RD))
  2397. return false;
  2398. if (CGM.getCodeGenOpts().VirtualFunctionElimination)
  2399. return true;
  2400. if (!SanOpts.has(SanitizerKind::CFIVCall) ||
  2401. !CGM.getCodeGenOpts().SanitizeTrap.has(SanitizerKind::CFIVCall))
  2402. return false;
  2403. std::string TypeName = RD->getQualifiedNameAsString();
  2404. return !getContext().getNoSanitizeList().containsType(SanitizerKind::CFIVCall,
  2405. TypeName);
  2406. }
  2407. llvm::Value *CodeGenFunction::EmitVTableTypeCheckedLoad(
  2408. const CXXRecordDecl *RD, llvm::Value *VTable, uint64_t VTableByteOffset) {
  2409. SanitizerScope SanScope(this);
  2410. EmitSanitizerStatReport(llvm::SanStat_CFI_VCall);
  2411. llvm::Metadata *MD =
  2412. CGM.CreateMetadataIdentifierForType(QualType(RD->getTypeForDecl(), 0));
  2413. llvm::Value *TypeId = llvm::MetadataAsValue::get(CGM.getLLVMContext(), MD);
  2414. llvm::Value *CastedVTable = Builder.CreateBitCast(VTable, Int8PtrTy);
  2415. llvm::Value *CheckedLoad = Builder.CreateCall(
  2416. CGM.getIntrinsic(llvm::Intrinsic::type_checked_load),
  2417. {CastedVTable, llvm::ConstantInt::get(Int32Ty, VTableByteOffset),
  2418. TypeId});
  2419. llvm::Value *CheckResult = Builder.CreateExtractValue(CheckedLoad, 1);
  2420. std::string TypeName = RD->getQualifiedNameAsString();
  2421. if (SanOpts.has(SanitizerKind::CFIVCall) &&
  2422. !getContext().getNoSanitizeList().containsType(SanitizerKind::CFIVCall,
  2423. TypeName)) {
  2424. EmitCheck(std::make_pair(CheckResult, SanitizerKind::CFIVCall),
  2425. SanitizerHandler::CFICheckFail, {}, {});
  2426. }
  2427. return Builder.CreateBitCast(Builder.CreateExtractValue(CheckedLoad, 0),
  2428. VTable->getType()->getPointerElementType());
  2429. }
  2430. void CodeGenFunction::EmitForwardingCallToLambda(
  2431. const CXXMethodDecl *callOperator,
  2432. CallArgList &callArgs) {
  2433. // Get the address of the call operator.
  2434. const CGFunctionInfo &calleeFnInfo =
  2435. CGM.getTypes().arrangeCXXMethodDeclaration(callOperator);
  2436. llvm::Constant *calleePtr =
  2437. CGM.GetAddrOfFunction(GlobalDecl(callOperator),
  2438. CGM.getTypes().GetFunctionType(calleeFnInfo));
  2439. // Prepare the return slot.
  2440. const FunctionProtoType *FPT =
  2441. callOperator->getType()->castAs<FunctionProtoType>();
  2442. QualType resultType = FPT->getReturnType();
  2443. ReturnValueSlot returnSlot;
  2444. if (!resultType->isVoidType() &&
  2445. calleeFnInfo.getReturnInfo().getKind() == ABIArgInfo::Indirect &&
  2446. !hasScalarEvaluationKind(calleeFnInfo.getReturnType()))
  2447. returnSlot =
  2448. ReturnValueSlot(ReturnValue, resultType.isVolatileQualified(),
  2449. /*IsUnused=*/false, /*IsExternallyDestructed=*/true);
  2450. // We don't need to separately arrange the call arguments because
  2451. // the call can't be variadic anyway --- it's impossible to forward
  2452. // variadic arguments.
  2453. // Now emit our call.
  2454. auto callee = CGCallee::forDirect(calleePtr, GlobalDecl(callOperator));
  2455. RValue RV = EmitCall(calleeFnInfo, callee, returnSlot, callArgs);
  2456. // If necessary, copy the returned value into the slot.
  2457. if (!resultType->isVoidType() && returnSlot.isNull()) {
  2458. if (getLangOpts().ObjCAutoRefCount && resultType->isObjCRetainableType()) {
  2459. RV = RValue::get(EmitARCRetainAutoreleasedReturnValue(RV.getScalarVal()));
  2460. }
  2461. EmitReturnOfRValue(RV, resultType);
  2462. } else
  2463. EmitBranchThroughCleanup(ReturnBlock);
  2464. }
  2465. void CodeGenFunction::EmitLambdaBlockInvokeBody() {
  2466. const BlockDecl *BD = BlockInfo->getBlockDecl();
  2467. const VarDecl *variable = BD->capture_begin()->getVariable();
  2468. const CXXRecordDecl *Lambda = variable->getType()->getAsCXXRecordDecl();
  2469. const CXXMethodDecl *CallOp = Lambda->getLambdaCallOperator();
  2470. if (CallOp->isVariadic()) {
  2471. // FIXME: Making this work correctly is nasty because it requires either
  2472. // cloning the body of the call operator or making the call operator
  2473. // forward.
  2474. CGM.ErrorUnsupported(CurCodeDecl, "lambda conversion to variadic function");
  2475. return;
  2476. }
  2477. // Start building arguments for forwarding call
  2478. CallArgList CallArgs;
  2479. QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda));
  2480. Address ThisPtr = GetAddrOfBlockDecl(variable);
  2481. CallArgs.add(RValue::get(ThisPtr.getPointer()), ThisType);
  2482. // Add the rest of the parameters.
  2483. for (auto param : BD->parameters())
  2484. EmitDelegateCallArg(CallArgs, param, param->getBeginLoc());
  2485. assert(!Lambda->isGenericLambda() &&
  2486. "generic lambda interconversion to block not implemented");
  2487. EmitForwardingCallToLambda(CallOp, CallArgs);
  2488. }
  2489. void CodeGenFunction::EmitLambdaDelegatingInvokeBody(const CXXMethodDecl *MD) {
  2490. const CXXRecordDecl *Lambda = MD->getParent();
  2491. // Start building arguments for forwarding call
  2492. CallArgList CallArgs;
  2493. QualType ThisType = getContext().getPointerType(getContext().getRecordType(Lambda));
  2494. llvm::Value *ThisPtr = llvm::UndefValue::get(getTypes().ConvertType(ThisType));
  2495. CallArgs.add(RValue::get(ThisPtr), ThisType);
  2496. // Add the rest of the parameters.
  2497. for (auto Param : MD->parameters())
  2498. EmitDelegateCallArg(CallArgs, Param, Param->getBeginLoc());
  2499. const CXXMethodDecl *CallOp = Lambda->getLambdaCallOperator();
  2500. // For a generic lambda, find the corresponding call operator specialization
  2501. // to which the call to the static-invoker shall be forwarded.
  2502. if (Lambda->isGenericLambda()) {
  2503. assert(MD->isFunctionTemplateSpecialization());
  2504. const TemplateArgumentList *TAL = MD->getTemplateSpecializationArgs();
  2505. FunctionTemplateDecl *CallOpTemplate = CallOp->getDescribedFunctionTemplate();
  2506. void *InsertPos = nullptr;
  2507. FunctionDecl *CorrespondingCallOpSpecialization =
  2508. CallOpTemplate->findSpecialization(TAL->asArray(), InsertPos);
  2509. assert(CorrespondingCallOpSpecialization);
  2510. CallOp = cast<CXXMethodDecl>(CorrespondingCallOpSpecialization);
  2511. }
  2512. EmitForwardingCallToLambda(CallOp, CallArgs);
  2513. }
  2514. void CodeGenFunction::EmitLambdaStaticInvokeBody(const CXXMethodDecl *MD) {
  2515. if (MD->isVariadic()) {
  2516. // FIXME: Making this work correctly is nasty because it requires either
  2517. // cloning the body of the call operator or making the call operator forward.
  2518. CGM.ErrorUnsupported(MD, "lambda conversion to variadic function");
  2519. return;
  2520. }
  2521. EmitLambdaDelegatingInvokeBody(MD);
  2522. }