CGExprCXX.cpp 91 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319
  1. //===--- CGExprCXX.cpp - Emit LLVM Code for C++ expressions ---------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This contains code dealing with code generation of C++ expressions
  10. //
  11. //===----------------------------------------------------------------------===//
  12. #include "CGCUDARuntime.h"
  13. #include "CGCXXABI.h"
  14. #include "CGDebugInfo.h"
  15. #include "CGObjCRuntime.h"
  16. #include "CodeGenFunction.h"
  17. #include "ConstantEmitter.h"
  18. #include "TargetInfo.h"
  19. #include "clang/Basic/CodeGenOptions.h"
  20. #include "clang/CodeGen/CGFunctionInfo.h"
  21. #include "llvm/IR/Intrinsics.h"
  22. using namespace clang;
  23. using namespace CodeGen;
  24. namespace {
  25. struct MemberCallInfo {
  26. RequiredArgs ReqArgs;
  27. // Number of prefix arguments for the call. Ignores the `this` pointer.
  28. unsigned PrefixSize;
  29. };
  30. }
  31. static MemberCallInfo
  32. commonEmitCXXMemberOrOperatorCall(CodeGenFunction &CGF, const CXXMethodDecl *MD,
  33. llvm::Value *This, llvm::Value *ImplicitParam,
  34. QualType ImplicitParamTy, const CallExpr *CE,
  35. CallArgList &Args, CallArgList *RtlArgs) {
  36. assert(CE == nullptr || isa<CXXMemberCallExpr>(CE) ||
  37. isa<CXXOperatorCallExpr>(CE));
  38. assert(MD->isInstance() &&
  39. "Trying to emit a member or operator call expr on a static method!");
  40. // Push the this ptr.
  41. const CXXRecordDecl *RD =
  42. CGF.CGM.getCXXABI().getThisArgumentTypeForMethod(MD);
  43. Args.add(RValue::get(This), CGF.getTypes().DeriveThisType(RD, MD));
  44. // If there is an implicit parameter (e.g. VTT), emit it.
  45. if (ImplicitParam) {
  46. Args.add(RValue::get(ImplicitParam), ImplicitParamTy);
  47. }
  48. const FunctionProtoType *FPT = MD->getType()->castAs<FunctionProtoType>();
  49. RequiredArgs required = RequiredArgs::forPrototypePlus(FPT, Args.size());
  50. unsigned PrefixSize = Args.size() - 1;
  51. // And the rest of the call args.
  52. if (RtlArgs) {
  53. // Special case: if the caller emitted the arguments right-to-left already
  54. // (prior to emitting the *this argument), we're done. This happens for
  55. // assignment operators.
  56. Args.addFrom(*RtlArgs);
  57. } else if (CE) {
  58. // Special case: skip first argument of CXXOperatorCall (it is "this").
  59. unsigned ArgsToSkip = isa<CXXOperatorCallExpr>(CE) ? 1 : 0;
  60. CGF.EmitCallArgs(Args, FPT, drop_begin(CE->arguments(), ArgsToSkip),
  61. CE->getDirectCallee());
  62. } else {
  63. assert(
  64. FPT->getNumParams() == 0 &&
  65. "No CallExpr specified for function with non-zero number of arguments");
  66. }
  67. return {required, PrefixSize};
  68. }
  69. RValue CodeGenFunction::EmitCXXMemberOrOperatorCall(
  70. const CXXMethodDecl *MD, const CGCallee &Callee,
  71. ReturnValueSlot ReturnValue,
  72. llvm::Value *This, llvm::Value *ImplicitParam, QualType ImplicitParamTy,
  73. const CallExpr *CE, CallArgList *RtlArgs) {
  74. const FunctionProtoType *FPT = MD->getType()->castAs<FunctionProtoType>();
  75. CallArgList Args;
  76. MemberCallInfo CallInfo = commonEmitCXXMemberOrOperatorCall(
  77. *this, MD, This, ImplicitParam, ImplicitParamTy, CE, Args, RtlArgs);
  78. auto &FnInfo = CGM.getTypes().arrangeCXXMethodCall(
  79. Args, FPT, CallInfo.ReqArgs, CallInfo.PrefixSize);
  80. return EmitCall(FnInfo, Callee, ReturnValue, Args, nullptr,
  81. CE && CE == MustTailCall,
  82. CE ? CE->getExprLoc() : SourceLocation());
  83. }
  84. RValue CodeGenFunction::EmitCXXDestructorCall(
  85. GlobalDecl Dtor, const CGCallee &Callee, llvm::Value *This, QualType ThisTy,
  86. llvm::Value *ImplicitParam, QualType ImplicitParamTy, const CallExpr *CE) {
  87. const CXXMethodDecl *DtorDecl = cast<CXXMethodDecl>(Dtor.getDecl());
  88. assert(!ThisTy.isNull());
  89. assert(ThisTy->getAsCXXRecordDecl() == DtorDecl->getParent() &&
  90. "Pointer/Object mixup");
  91. LangAS SrcAS = ThisTy.getAddressSpace();
  92. LangAS DstAS = DtorDecl->getMethodQualifiers().getAddressSpace();
  93. if (SrcAS != DstAS) {
  94. QualType DstTy = DtorDecl->getThisType();
  95. llvm::Type *NewType = CGM.getTypes().ConvertType(DstTy);
  96. This = getTargetHooks().performAddrSpaceCast(*this, This, SrcAS, DstAS,
  97. NewType);
  98. }
  99. CallArgList Args;
  100. commonEmitCXXMemberOrOperatorCall(*this, DtorDecl, This, ImplicitParam,
  101. ImplicitParamTy, CE, Args, nullptr);
  102. return EmitCall(CGM.getTypes().arrangeCXXStructorDeclaration(Dtor), Callee,
  103. ReturnValueSlot(), Args, nullptr, CE && CE == MustTailCall,
  104. CE ? CE->getExprLoc() : SourceLocation{});
  105. }
  106. RValue CodeGenFunction::EmitCXXPseudoDestructorExpr(
  107. const CXXPseudoDestructorExpr *E) {
  108. QualType DestroyedType = E->getDestroyedType();
  109. if (DestroyedType.hasStrongOrWeakObjCLifetime()) {
  110. // Automatic Reference Counting:
  111. // If the pseudo-expression names a retainable object with weak or
  112. // strong lifetime, the object shall be released.
  113. Expr *BaseExpr = E->getBase();
  114. Address BaseValue = Address::invalid();
  115. Qualifiers BaseQuals;
  116. // If this is s.x, emit s as an lvalue. If it is s->x, emit s as a scalar.
  117. if (E->isArrow()) {
  118. BaseValue = EmitPointerWithAlignment(BaseExpr);
  119. const auto *PTy = BaseExpr->getType()->castAs<PointerType>();
  120. BaseQuals = PTy->getPointeeType().getQualifiers();
  121. } else {
  122. LValue BaseLV = EmitLValue(BaseExpr);
  123. BaseValue = BaseLV.getAddress(*this);
  124. QualType BaseTy = BaseExpr->getType();
  125. BaseQuals = BaseTy.getQualifiers();
  126. }
  127. switch (DestroyedType.getObjCLifetime()) {
  128. case Qualifiers::OCL_None:
  129. case Qualifiers::OCL_ExplicitNone:
  130. case Qualifiers::OCL_Autoreleasing:
  131. break;
  132. case Qualifiers::OCL_Strong:
  133. EmitARCRelease(Builder.CreateLoad(BaseValue,
  134. DestroyedType.isVolatileQualified()),
  135. ARCPreciseLifetime);
  136. break;
  137. case Qualifiers::OCL_Weak:
  138. EmitARCDestroyWeak(BaseValue);
  139. break;
  140. }
  141. } else {
  142. // C++ [expr.pseudo]p1:
  143. // The result shall only be used as the operand for the function call
  144. // operator (), and the result of such a call has type void. The only
  145. // effect is the evaluation of the postfix-expression before the dot or
  146. // arrow.
  147. EmitIgnoredExpr(E->getBase());
  148. }
  149. return RValue::get(nullptr);
  150. }
  151. static CXXRecordDecl *getCXXRecord(const Expr *E) {
  152. QualType T = E->getType();
  153. if (const PointerType *PTy = T->getAs<PointerType>())
  154. T = PTy->getPointeeType();
  155. const RecordType *Ty = T->castAs<RecordType>();
  156. return cast<CXXRecordDecl>(Ty->getDecl());
  157. }
  158. // Note: This function also emit constructor calls to support a MSVC
  159. // extensions allowing explicit constructor function call.
  160. RValue CodeGenFunction::EmitCXXMemberCallExpr(const CXXMemberCallExpr *CE,
  161. ReturnValueSlot ReturnValue) {
  162. const Expr *callee = CE->getCallee()->IgnoreParens();
  163. if (isa<BinaryOperator>(callee))
  164. return EmitCXXMemberPointerCallExpr(CE, ReturnValue);
  165. const MemberExpr *ME = cast<MemberExpr>(callee);
  166. const CXXMethodDecl *MD = cast<CXXMethodDecl>(ME->getMemberDecl());
  167. if (MD->isStatic()) {
  168. // The method is static, emit it as we would a regular call.
  169. CGCallee callee =
  170. CGCallee::forDirect(CGM.GetAddrOfFunction(MD), GlobalDecl(MD));
  171. return EmitCall(getContext().getPointerType(MD->getType()), callee, CE,
  172. ReturnValue);
  173. }
  174. bool HasQualifier = ME->hasQualifier();
  175. NestedNameSpecifier *Qualifier = HasQualifier ? ME->getQualifier() : nullptr;
  176. bool IsArrow = ME->isArrow();
  177. const Expr *Base = ME->getBase();
  178. return EmitCXXMemberOrOperatorMemberCallExpr(
  179. CE, MD, ReturnValue, HasQualifier, Qualifier, IsArrow, Base);
  180. }
  181. RValue CodeGenFunction::EmitCXXMemberOrOperatorMemberCallExpr(
  182. const CallExpr *CE, const CXXMethodDecl *MD, ReturnValueSlot ReturnValue,
  183. bool HasQualifier, NestedNameSpecifier *Qualifier, bool IsArrow,
  184. const Expr *Base) {
  185. assert(isa<CXXMemberCallExpr>(CE) || isa<CXXOperatorCallExpr>(CE));
  186. // Compute the object pointer.
  187. bool CanUseVirtualCall = MD->isVirtual() && !HasQualifier;
  188. const CXXMethodDecl *DevirtualizedMethod = nullptr;
  189. if (CanUseVirtualCall &&
  190. MD->getDevirtualizedMethod(Base, getLangOpts().AppleKext)) {
  191. const CXXRecordDecl *BestDynamicDecl = Base->getBestDynamicClassType();
  192. DevirtualizedMethod = MD->getCorrespondingMethodInClass(BestDynamicDecl);
  193. assert(DevirtualizedMethod);
  194. const CXXRecordDecl *DevirtualizedClass = DevirtualizedMethod->getParent();
  195. const Expr *Inner = Base->IgnoreParenBaseCasts();
  196. if (DevirtualizedMethod->getReturnType().getCanonicalType() !=
  197. MD->getReturnType().getCanonicalType())
  198. // If the return types are not the same, this might be a case where more
  199. // code needs to run to compensate for it. For example, the derived
  200. // method might return a type that inherits form from the return
  201. // type of MD and has a prefix.
  202. // For now we just avoid devirtualizing these covariant cases.
  203. DevirtualizedMethod = nullptr;
  204. else if (getCXXRecord(Inner) == DevirtualizedClass)
  205. // If the class of the Inner expression is where the dynamic method
  206. // is defined, build the this pointer from it.
  207. Base = Inner;
  208. else if (getCXXRecord(Base) != DevirtualizedClass) {
  209. // If the method is defined in a class that is not the best dynamic
  210. // one or the one of the full expression, we would have to build
  211. // a derived-to-base cast to compute the correct this pointer, but
  212. // we don't have support for that yet, so do a virtual call.
  213. DevirtualizedMethod = nullptr;
  214. }
  215. }
  216. bool TrivialForCodegen =
  217. MD->isTrivial() || (MD->isDefaulted() && MD->getParent()->isUnion());
  218. bool TrivialAssignment =
  219. TrivialForCodegen &&
  220. (MD->isCopyAssignmentOperator() || MD->isMoveAssignmentOperator()) &&
  221. !MD->getParent()->mayInsertExtraPadding();
  222. // C++17 demands that we evaluate the RHS of a (possibly-compound) assignment
  223. // operator before the LHS.
  224. CallArgList RtlArgStorage;
  225. CallArgList *RtlArgs = nullptr;
  226. LValue TrivialAssignmentRHS;
  227. if (auto *OCE = dyn_cast<CXXOperatorCallExpr>(CE)) {
  228. if (OCE->isAssignmentOp()) {
  229. if (TrivialAssignment) {
  230. TrivialAssignmentRHS = EmitLValue(CE->getArg(1));
  231. } else {
  232. RtlArgs = &RtlArgStorage;
  233. EmitCallArgs(*RtlArgs, MD->getType()->castAs<FunctionProtoType>(),
  234. drop_begin(CE->arguments(), 1), CE->getDirectCallee(),
  235. /*ParamsToSkip*/0, EvaluationOrder::ForceRightToLeft);
  236. }
  237. }
  238. }
  239. LValue This;
  240. if (IsArrow) {
  241. LValueBaseInfo BaseInfo;
  242. TBAAAccessInfo TBAAInfo;
  243. Address ThisValue = EmitPointerWithAlignment(Base, &BaseInfo, &TBAAInfo);
  244. This = MakeAddrLValue(ThisValue, Base->getType(), BaseInfo, TBAAInfo);
  245. } else {
  246. This = EmitLValue(Base);
  247. }
  248. if (const CXXConstructorDecl *Ctor = dyn_cast<CXXConstructorDecl>(MD)) {
  249. // This is the MSVC p->Ctor::Ctor(...) extension. We assume that's
  250. // constructing a new complete object of type Ctor.
  251. assert(!RtlArgs);
  252. assert(ReturnValue.isNull() && "Constructor shouldn't have return value");
  253. CallArgList Args;
  254. commonEmitCXXMemberOrOperatorCall(
  255. *this, Ctor, This.getPointer(*this), /*ImplicitParam=*/nullptr,
  256. /*ImplicitParamTy=*/QualType(), CE, Args, nullptr);
  257. EmitCXXConstructorCall(Ctor, Ctor_Complete, /*ForVirtualBase=*/false,
  258. /*Delegating=*/false, This.getAddress(*this), Args,
  259. AggValueSlot::DoesNotOverlap, CE->getExprLoc(),
  260. /*NewPointerIsChecked=*/false);
  261. return RValue::get(nullptr);
  262. }
  263. if (TrivialForCodegen) {
  264. if (isa<CXXDestructorDecl>(MD))
  265. return RValue::get(nullptr);
  266. if (TrivialAssignment) {
  267. // We don't like to generate the trivial copy/move assignment operator
  268. // when it isn't necessary; just produce the proper effect here.
  269. // It's important that we use the result of EmitLValue here rather than
  270. // emitting call arguments, in order to preserve TBAA information from
  271. // the RHS.
  272. LValue RHS = isa<CXXOperatorCallExpr>(CE)
  273. ? TrivialAssignmentRHS
  274. : EmitLValue(*CE->arg_begin());
  275. EmitAggregateAssign(This, RHS, CE->getType());
  276. return RValue::get(This.getPointer(*this));
  277. }
  278. assert(MD->getParent()->mayInsertExtraPadding() &&
  279. "unknown trivial member function");
  280. }
  281. // Compute the function type we're calling.
  282. const CXXMethodDecl *CalleeDecl =
  283. DevirtualizedMethod ? DevirtualizedMethod : MD;
  284. const CGFunctionInfo *FInfo = nullptr;
  285. if (const auto *Dtor = dyn_cast<CXXDestructorDecl>(CalleeDecl))
  286. FInfo = &CGM.getTypes().arrangeCXXStructorDeclaration(
  287. GlobalDecl(Dtor, Dtor_Complete));
  288. else
  289. FInfo = &CGM.getTypes().arrangeCXXMethodDeclaration(CalleeDecl);
  290. llvm::FunctionType *Ty = CGM.getTypes().GetFunctionType(*FInfo);
  291. // C++11 [class.mfct.non-static]p2:
  292. // If a non-static member function of a class X is called for an object that
  293. // is not of type X, or of a type derived from X, the behavior is undefined.
  294. SourceLocation CallLoc;
  295. ASTContext &C = getContext();
  296. if (CE)
  297. CallLoc = CE->getExprLoc();
  298. SanitizerSet SkippedChecks;
  299. if (const auto *CMCE = dyn_cast<CXXMemberCallExpr>(CE)) {
  300. auto *IOA = CMCE->getImplicitObjectArgument();
  301. bool IsImplicitObjectCXXThis = IsWrappedCXXThis(IOA);
  302. if (IsImplicitObjectCXXThis)
  303. SkippedChecks.set(SanitizerKind::Alignment, true);
  304. if (IsImplicitObjectCXXThis || isa<DeclRefExpr>(IOA))
  305. SkippedChecks.set(SanitizerKind::Null, true);
  306. }
  307. EmitTypeCheck(CodeGenFunction::TCK_MemberCall, CallLoc,
  308. This.getPointer(*this),
  309. C.getRecordType(CalleeDecl->getParent()),
  310. /*Alignment=*/CharUnits::Zero(), SkippedChecks);
  311. // C++ [class.virtual]p12:
  312. // Explicit qualification with the scope operator (5.1) suppresses the
  313. // virtual call mechanism.
  314. //
  315. // We also don't emit a virtual call if the base expression has a record type
  316. // because then we know what the type is.
  317. bool UseVirtualCall = CanUseVirtualCall && !DevirtualizedMethod;
  318. if (const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(CalleeDecl)) {
  319. assert(CE->arg_begin() == CE->arg_end() &&
  320. "Destructor shouldn't have explicit parameters");
  321. assert(ReturnValue.isNull() && "Destructor shouldn't have return value");
  322. if (UseVirtualCall) {
  323. CGM.getCXXABI().EmitVirtualDestructorCall(*this, Dtor, Dtor_Complete,
  324. This.getAddress(*this),
  325. cast<CXXMemberCallExpr>(CE));
  326. } else {
  327. GlobalDecl GD(Dtor, Dtor_Complete);
  328. CGCallee Callee;
  329. if (getLangOpts().AppleKext && Dtor->isVirtual() && HasQualifier)
  330. Callee = BuildAppleKextVirtualCall(Dtor, Qualifier, Ty);
  331. else if (!DevirtualizedMethod)
  332. Callee =
  333. CGCallee::forDirect(CGM.getAddrOfCXXStructor(GD, FInfo, Ty), GD);
  334. else {
  335. Callee = CGCallee::forDirect(CGM.GetAddrOfFunction(GD, Ty), GD);
  336. }
  337. QualType ThisTy =
  338. IsArrow ? Base->getType()->getPointeeType() : Base->getType();
  339. EmitCXXDestructorCall(GD, Callee, This.getPointer(*this), ThisTy,
  340. /*ImplicitParam=*/nullptr,
  341. /*ImplicitParamTy=*/QualType(), CE);
  342. }
  343. return RValue::get(nullptr);
  344. }
  345. // FIXME: Uses of 'MD' past this point need to be audited. We may need to use
  346. // 'CalleeDecl' instead.
  347. CGCallee Callee;
  348. if (UseVirtualCall) {
  349. Callee = CGCallee::forVirtual(CE, MD, This.getAddress(*this), Ty);
  350. } else {
  351. if (SanOpts.has(SanitizerKind::CFINVCall) &&
  352. MD->getParent()->isDynamicClass()) {
  353. llvm::Value *VTable;
  354. const CXXRecordDecl *RD;
  355. std::tie(VTable, RD) = CGM.getCXXABI().LoadVTablePtr(
  356. *this, This.getAddress(*this), CalleeDecl->getParent());
  357. EmitVTablePtrCheckForCall(RD, VTable, CFITCK_NVCall, CE->getBeginLoc());
  358. }
  359. if (getLangOpts().AppleKext && MD->isVirtual() && HasQualifier)
  360. Callee = BuildAppleKextVirtualCall(MD, Qualifier, Ty);
  361. else if (!DevirtualizedMethod)
  362. Callee =
  363. CGCallee::forDirect(CGM.GetAddrOfFunction(MD, Ty), GlobalDecl(MD));
  364. else {
  365. Callee =
  366. CGCallee::forDirect(CGM.GetAddrOfFunction(DevirtualizedMethod, Ty),
  367. GlobalDecl(DevirtualizedMethod));
  368. }
  369. }
  370. if (MD->isVirtual()) {
  371. Address NewThisAddr =
  372. CGM.getCXXABI().adjustThisArgumentForVirtualFunctionCall(
  373. *this, CalleeDecl, This.getAddress(*this), UseVirtualCall);
  374. This.setAddress(NewThisAddr);
  375. }
  376. return EmitCXXMemberOrOperatorCall(
  377. CalleeDecl, Callee, ReturnValue, This.getPointer(*this),
  378. /*ImplicitParam=*/nullptr, QualType(), CE, RtlArgs);
  379. }
  380. RValue
  381. CodeGenFunction::EmitCXXMemberPointerCallExpr(const CXXMemberCallExpr *E,
  382. ReturnValueSlot ReturnValue) {
  383. const BinaryOperator *BO =
  384. cast<BinaryOperator>(E->getCallee()->IgnoreParens());
  385. const Expr *BaseExpr = BO->getLHS();
  386. const Expr *MemFnExpr = BO->getRHS();
  387. const auto *MPT = MemFnExpr->getType()->castAs<MemberPointerType>();
  388. const auto *FPT = MPT->getPointeeType()->castAs<FunctionProtoType>();
  389. const auto *RD =
  390. cast<CXXRecordDecl>(MPT->getClass()->castAs<RecordType>()->getDecl());
  391. // Emit the 'this' pointer.
  392. Address This = Address::invalid();
  393. if (BO->getOpcode() == BO_PtrMemI)
  394. This = EmitPointerWithAlignment(BaseExpr);
  395. else
  396. This = EmitLValue(BaseExpr).getAddress(*this);
  397. EmitTypeCheck(TCK_MemberCall, E->getExprLoc(), This.getPointer(),
  398. QualType(MPT->getClass(), 0));
  399. // Get the member function pointer.
  400. llvm::Value *MemFnPtr = EmitScalarExpr(MemFnExpr);
  401. // Ask the ABI to load the callee. Note that This is modified.
  402. llvm::Value *ThisPtrForCall = nullptr;
  403. CGCallee Callee =
  404. CGM.getCXXABI().EmitLoadOfMemberFunctionPointer(*this, BO, This,
  405. ThisPtrForCall, MemFnPtr, MPT);
  406. CallArgList Args;
  407. QualType ThisType =
  408. getContext().getPointerType(getContext().getTagDeclType(RD));
  409. // Push the this ptr.
  410. Args.add(RValue::get(ThisPtrForCall), ThisType);
  411. RequiredArgs required = RequiredArgs::forPrototypePlus(FPT, 1);
  412. // And the rest of the call args
  413. EmitCallArgs(Args, FPT, E->arguments());
  414. return EmitCall(CGM.getTypes().arrangeCXXMethodCall(Args, FPT, required,
  415. /*PrefixSize=*/0),
  416. Callee, ReturnValue, Args, nullptr, E == MustTailCall,
  417. E->getExprLoc());
  418. }
  419. RValue
  420. CodeGenFunction::EmitCXXOperatorMemberCallExpr(const CXXOperatorCallExpr *E,
  421. const CXXMethodDecl *MD,
  422. ReturnValueSlot ReturnValue) {
  423. assert(MD->isInstance() &&
  424. "Trying to emit a member call expr on a static method!");
  425. return EmitCXXMemberOrOperatorMemberCallExpr(
  426. E, MD, ReturnValue, /*HasQualifier=*/false, /*Qualifier=*/nullptr,
  427. /*IsArrow=*/false, E->getArg(0));
  428. }
  429. RValue CodeGenFunction::EmitCUDAKernelCallExpr(const CUDAKernelCallExpr *E,
  430. ReturnValueSlot ReturnValue) {
  431. return CGM.getCUDARuntime().EmitCUDAKernelCallExpr(*this, E, ReturnValue);
  432. }
  433. static void EmitNullBaseClassInitialization(CodeGenFunction &CGF,
  434. Address DestPtr,
  435. const CXXRecordDecl *Base) {
  436. if (Base->isEmpty())
  437. return;
  438. DestPtr = CGF.Builder.CreateElementBitCast(DestPtr, CGF.Int8Ty);
  439. const ASTRecordLayout &Layout = CGF.getContext().getASTRecordLayout(Base);
  440. CharUnits NVSize = Layout.getNonVirtualSize();
  441. // We cannot simply zero-initialize the entire base sub-object if vbptrs are
  442. // present, they are initialized by the most derived class before calling the
  443. // constructor.
  444. SmallVector<std::pair<CharUnits, CharUnits>, 1> Stores;
  445. Stores.emplace_back(CharUnits::Zero(), NVSize);
  446. // Each store is split by the existence of a vbptr.
  447. CharUnits VBPtrWidth = CGF.getPointerSize();
  448. std::vector<CharUnits> VBPtrOffsets =
  449. CGF.CGM.getCXXABI().getVBPtrOffsets(Base);
  450. for (CharUnits VBPtrOffset : VBPtrOffsets) {
  451. // Stop before we hit any virtual base pointers located in virtual bases.
  452. if (VBPtrOffset >= NVSize)
  453. break;
  454. std::pair<CharUnits, CharUnits> LastStore = Stores.pop_back_val();
  455. CharUnits LastStoreOffset = LastStore.first;
  456. CharUnits LastStoreSize = LastStore.second;
  457. CharUnits SplitBeforeOffset = LastStoreOffset;
  458. CharUnits SplitBeforeSize = VBPtrOffset - SplitBeforeOffset;
  459. assert(!SplitBeforeSize.isNegative() && "negative store size!");
  460. if (!SplitBeforeSize.isZero())
  461. Stores.emplace_back(SplitBeforeOffset, SplitBeforeSize);
  462. CharUnits SplitAfterOffset = VBPtrOffset + VBPtrWidth;
  463. CharUnits SplitAfterSize = LastStoreSize - SplitAfterOffset;
  464. assert(!SplitAfterSize.isNegative() && "negative store size!");
  465. if (!SplitAfterSize.isZero())
  466. Stores.emplace_back(SplitAfterOffset, SplitAfterSize);
  467. }
  468. // If the type contains a pointer to data member we can't memset it to zero.
  469. // Instead, create a null constant and copy it to the destination.
  470. // TODO: there are other patterns besides zero that we can usefully memset,
  471. // like -1, which happens to be the pattern used by member-pointers.
  472. // TODO: isZeroInitializable can be over-conservative in the case where a
  473. // virtual base contains a member pointer.
  474. llvm::Constant *NullConstantForBase = CGF.CGM.EmitNullConstantForBase(Base);
  475. if (!NullConstantForBase->isNullValue()) {
  476. llvm::GlobalVariable *NullVariable = new llvm::GlobalVariable(
  477. CGF.CGM.getModule(), NullConstantForBase->getType(),
  478. /*isConstant=*/true, llvm::GlobalVariable::PrivateLinkage,
  479. NullConstantForBase, Twine());
  480. CharUnits Align = std::max(Layout.getNonVirtualAlignment(),
  481. DestPtr.getAlignment());
  482. NullVariable->setAlignment(Align.getAsAlign());
  483. Address SrcPtr = Address(CGF.EmitCastToVoidPtr(NullVariable), Align);
  484. // Get and call the appropriate llvm.memcpy overload.
  485. for (std::pair<CharUnits, CharUnits> Store : Stores) {
  486. CharUnits StoreOffset = Store.first;
  487. CharUnits StoreSize = Store.second;
  488. llvm::Value *StoreSizeVal = CGF.CGM.getSize(StoreSize);
  489. CGF.Builder.CreateMemCpy(
  490. CGF.Builder.CreateConstInBoundsByteGEP(DestPtr, StoreOffset),
  491. CGF.Builder.CreateConstInBoundsByteGEP(SrcPtr, StoreOffset),
  492. StoreSizeVal);
  493. }
  494. // Otherwise, just memset the whole thing to zero. This is legal
  495. // because in LLVM, all default initializers (other than the ones we just
  496. // handled above) are guaranteed to have a bit pattern of all zeros.
  497. } else {
  498. for (std::pair<CharUnits, CharUnits> Store : Stores) {
  499. CharUnits StoreOffset = Store.first;
  500. CharUnits StoreSize = Store.second;
  501. llvm::Value *StoreSizeVal = CGF.CGM.getSize(StoreSize);
  502. CGF.Builder.CreateMemSet(
  503. CGF.Builder.CreateConstInBoundsByteGEP(DestPtr, StoreOffset),
  504. CGF.Builder.getInt8(0), StoreSizeVal);
  505. }
  506. }
  507. }
  508. void
  509. CodeGenFunction::EmitCXXConstructExpr(const CXXConstructExpr *E,
  510. AggValueSlot Dest) {
  511. assert(!Dest.isIgnored() && "Must have a destination!");
  512. const CXXConstructorDecl *CD = E->getConstructor();
  513. // If we require zero initialization before (or instead of) calling the
  514. // constructor, as can be the case with a non-user-provided default
  515. // constructor, emit the zero initialization now, unless destination is
  516. // already zeroed.
  517. if (E->requiresZeroInitialization() && !Dest.isZeroed()) {
  518. switch (E->getConstructionKind()) {
  519. case CXXConstructExpr::CK_Delegating:
  520. case CXXConstructExpr::CK_Complete:
  521. EmitNullInitialization(Dest.getAddress(), E->getType());
  522. break;
  523. case CXXConstructExpr::CK_VirtualBase:
  524. case CXXConstructExpr::CK_NonVirtualBase:
  525. EmitNullBaseClassInitialization(*this, Dest.getAddress(),
  526. CD->getParent());
  527. break;
  528. }
  529. }
  530. // If this is a call to a trivial default constructor, do nothing.
  531. if (CD->isTrivial() && CD->isDefaultConstructor())
  532. return;
  533. // Elide the constructor if we're constructing from a temporary.
  534. if (getLangOpts().ElideConstructors && E->isElidable()) {
  535. // FIXME: This only handles the simplest case, where the source object
  536. // is passed directly as the first argument to the constructor.
  537. // This should also handle stepping though implicit casts and
  538. // conversion sequences which involve two steps, with a
  539. // conversion operator followed by a converting constructor.
  540. const Expr *SrcObj = E->getArg(0);
  541. assert(SrcObj->isTemporaryObject(getContext(), CD->getParent()));
  542. assert(
  543. getContext().hasSameUnqualifiedType(E->getType(), SrcObj->getType()));
  544. EmitAggExpr(SrcObj, Dest);
  545. return;
  546. }
  547. if (const ArrayType *arrayType
  548. = getContext().getAsArrayType(E->getType())) {
  549. EmitCXXAggrConstructorCall(CD, arrayType, Dest.getAddress(), E,
  550. Dest.isSanitizerChecked());
  551. } else {
  552. CXXCtorType Type = Ctor_Complete;
  553. bool ForVirtualBase = false;
  554. bool Delegating = false;
  555. switch (E->getConstructionKind()) {
  556. case CXXConstructExpr::CK_Delegating:
  557. // We should be emitting a constructor; GlobalDecl will assert this
  558. Type = CurGD.getCtorType();
  559. Delegating = true;
  560. break;
  561. case CXXConstructExpr::CK_Complete:
  562. Type = Ctor_Complete;
  563. break;
  564. case CXXConstructExpr::CK_VirtualBase:
  565. ForVirtualBase = true;
  566. LLVM_FALLTHROUGH;
  567. case CXXConstructExpr::CK_NonVirtualBase:
  568. Type = Ctor_Base;
  569. }
  570. // Call the constructor.
  571. EmitCXXConstructorCall(CD, Type, ForVirtualBase, Delegating, Dest, E);
  572. }
  573. }
  574. void CodeGenFunction::EmitSynthesizedCXXCopyCtor(Address Dest, Address Src,
  575. const Expr *Exp) {
  576. if (const ExprWithCleanups *E = dyn_cast<ExprWithCleanups>(Exp))
  577. Exp = E->getSubExpr();
  578. assert(isa<CXXConstructExpr>(Exp) &&
  579. "EmitSynthesizedCXXCopyCtor - unknown copy ctor expr");
  580. const CXXConstructExpr* E = cast<CXXConstructExpr>(Exp);
  581. const CXXConstructorDecl *CD = E->getConstructor();
  582. RunCleanupsScope Scope(*this);
  583. // If we require zero initialization before (or instead of) calling the
  584. // constructor, as can be the case with a non-user-provided default
  585. // constructor, emit the zero initialization now.
  586. // FIXME. Do I still need this for a copy ctor synthesis?
  587. if (E->requiresZeroInitialization())
  588. EmitNullInitialization(Dest, E->getType());
  589. assert(!getContext().getAsConstantArrayType(E->getType())
  590. && "EmitSynthesizedCXXCopyCtor - Copied-in Array");
  591. EmitSynthesizedCXXCopyCtorCall(CD, Dest, Src, E);
  592. }
  593. static CharUnits CalculateCookiePadding(CodeGenFunction &CGF,
  594. const CXXNewExpr *E) {
  595. if (!E->isArray())
  596. return CharUnits::Zero();
  597. // No cookie is required if the operator new[] being used is the
  598. // reserved placement operator new[].
  599. if (E->getOperatorNew()->isReservedGlobalPlacementOperator())
  600. return CharUnits::Zero();
  601. return CGF.CGM.getCXXABI().GetArrayCookieSize(E);
  602. }
  603. static llvm::Value *EmitCXXNewAllocSize(CodeGenFunction &CGF,
  604. const CXXNewExpr *e,
  605. unsigned minElements,
  606. llvm::Value *&numElements,
  607. llvm::Value *&sizeWithoutCookie) {
  608. QualType type = e->getAllocatedType();
  609. if (!e->isArray()) {
  610. CharUnits typeSize = CGF.getContext().getTypeSizeInChars(type);
  611. sizeWithoutCookie
  612. = llvm::ConstantInt::get(CGF.SizeTy, typeSize.getQuantity());
  613. return sizeWithoutCookie;
  614. }
  615. // The width of size_t.
  616. unsigned sizeWidth = CGF.SizeTy->getBitWidth();
  617. // Figure out the cookie size.
  618. llvm::APInt cookieSize(sizeWidth,
  619. CalculateCookiePadding(CGF, e).getQuantity());
  620. // Emit the array size expression.
  621. // We multiply the size of all dimensions for NumElements.
  622. // e.g for 'int[2][3]', ElemType is 'int' and NumElements is 6.
  623. numElements =
  624. ConstantEmitter(CGF).tryEmitAbstract(*e->getArraySize(), e->getType());
  625. if (!numElements)
  626. numElements = CGF.EmitScalarExpr(*e->getArraySize());
  627. assert(isa<llvm::IntegerType>(numElements->getType()));
  628. // The number of elements can be have an arbitrary integer type;
  629. // essentially, we need to multiply it by a constant factor, add a
  630. // cookie size, and verify that the result is representable as a
  631. // size_t. That's just a gloss, though, and it's wrong in one
  632. // important way: if the count is negative, it's an error even if
  633. // the cookie size would bring the total size >= 0.
  634. bool isSigned
  635. = (*e->getArraySize())->getType()->isSignedIntegerOrEnumerationType();
  636. llvm::IntegerType *numElementsType
  637. = cast<llvm::IntegerType>(numElements->getType());
  638. unsigned numElementsWidth = numElementsType->getBitWidth();
  639. // Compute the constant factor.
  640. llvm::APInt arraySizeMultiplier(sizeWidth, 1);
  641. while (const ConstantArrayType *CAT
  642. = CGF.getContext().getAsConstantArrayType(type)) {
  643. type = CAT->getElementType();
  644. arraySizeMultiplier *= CAT->getSize();
  645. }
  646. CharUnits typeSize = CGF.getContext().getTypeSizeInChars(type);
  647. llvm::APInt typeSizeMultiplier(sizeWidth, typeSize.getQuantity());
  648. typeSizeMultiplier *= arraySizeMultiplier;
  649. // This will be a size_t.
  650. llvm::Value *size;
  651. // If someone is doing 'new int[42]' there is no need to do a dynamic check.
  652. // Don't bloat the -O0 code.
  653. if (llvm::ConstantInt *numElementsC =
  654. dyn_cast<llvm::ConstantInt>(numElements)) {
  655. const llvm::APInt &count = numElementsC->getValue();
  656. bool hasAnyOverflow = false;
  657. // If 'count' was a negative number, it's an overflow.
  658. if (isSigned && count.isNegative())
  659. hasAnyOverflow = true;
  660. // We want to do all this arithmetic in size_t. If numElements is
  661. // wider than that, check whether it's already too big, and if so,
  662. // overflow.
  663. else if (numElementsWidth > sizeWidth &&
  664. numElementsWidth - sizeWidth > count.countLeadingZeros())
  665. hasAnyOverflow = true;
  666. // Okay, compute a count at the right width.
  667. llvm::APInt adjustedCount = count.zextOrTrunc(sizeWidth);
  668. // If there is a brace-initializer, we cannot allocate fewer elements than
  669. // there are initializers. If we do, that's treated like an overflow.
  670. if (adjustedCount.ult(minElements))
  671. hasAnyOverflow = true;
  672. // Scale numElements by that. This might overflow, but we don't
  673. // care because it only overflows if allocationSize does, too, and
  674. // if that overflows then we shouldn't use this.
  675. numElements = llvm::ConstantInt::get(CGF.SizeTy,
  676. adjustedCount * arraySizeMultiplier);
  677. // Compute the size before cookie, and track whether it overflowed.
  678. bool overflow;
  679. llvm::APInt allocationSize
  680. = adjustedCount.umul_ov(typeSizeMultiplier, overflow);
  681. hasAnyOverflow |= overflow;
  682. // Add in the cookie, and check whether it's overflowed.
  683. if (cookieSize != 0) {
  684. // Save the current size without a cookie. This shouldn't be
  685. // used if there was overflow.
  686. sizeWithoutCookie = llvm::ConstantInt::get(CGF.SizeTy, allocationSize);
  687. allocationSize = allocationSize.uadd_ov(cookieSize, overflow);
  688. hasAnyOverflow |= overflow;
  689. }
  690. // On overflow, produce a -1 so operator new will fail.
  691. if (hasAnyOverflow) {
  692. size = llvm::Constant::getAllOnesValue(CGF.SizeTy);
  693. } else {
  694. size = llvm::ConstantInt::get(CGF.SizeTy, allocationSize);
  695. }
  696. // Otherwise, we might need to use the overflow intrinsics.
  697. } else {
  698. // There are up to five conditions we need to test for:
  699. // 1) if isSigned, we need to check whether numElements is negative;
  700. // 2) if numElementsWidth > sizeWidth, we need to check whether
  701. // numElements is larger than something representable in size_t;
  702. // 3) if minElements > 0, we need to check whether numElements is smaller
  703. // than that.
  704. // 4) we need to compute
  705. // sizeWithoutCookie := numElements * typeSizeMultiplier
  706. // and check whether it overflows; and
  707. // 5) if we need a cookie, we need to compute
  708. // size := sizeWithoutCookie + cookieSize
  709. // and check whether it overflows.
  710. llvm::Value *hasOverflow = nullptr;
  711. // If numElementsWidth > sizeWidth, then one way or another, we're
  712. // going to have to do a comparison for (2), and this happens to
  713. // take care of (1), too.
  714. if (numElementsWidth > sizeWidth) {
  715. llvm::APInt threshold(numElementsWidth, 1);
  716. threshold <<= sizeWidth;
  717. llvm::Value *thresholdV
  718. = llvm::ConstantInt::get(numElementsType, threshold);
  719. hasOverflow = CGF.Builder.CreateICmpUGE(numElements, thresholdV);
  720. numElements = CGF.Builder.CreateTrunc(numElements, CGF.SizeTy);
  721. // Otherwise, if we're signed, we want to sext up to size_t.
  722. } else if (isSigned) {
  723. if (numElementsWidth < sizeWidth)
  724. numElements = CGF.Builder.CreateSExt(numElements, CGF.SizeTy);
  725. // If there's a non-1 type size multiplier, then we can do the
  726. // signedness check at the same time as we do the multiply
  727. // because a negative number times anything will cause an
  728. // unsigned overflow. Otherwise, we have to do it here. But at least
  729. // in this case, we can subsume the >= minElements check.
  730. if (typeSizeMultiplier == 1)
  731. hasOverflow = CGF.Builder.CreateICmpSLT(numElements,
  732. llvm::ConstantInt::get(CGF.SizeTy, minElements));
  733. // Otherwise, zext up to size_t if necessary.
  734. } else if (numElementsWidth < sizeWidth) {
  735. numElements = CGF.Builder.CreateZExt(numElements, CGF.SizeTy);
  736. }
  737. assert(numElements->getType() == CGF.SizeTy);
  738. if (minElements) {
  739. // Don't allow allocation of fewer elements than we have initializers.
  740. if (!hasOverflow) {
  741. hasOverflow = CGF.Builder.CreateICmpULT(numElements,
  742. llvm::ConstantInt::get(CGF.SizeTy, minElements));
  743. } else if (numElementsWidth > sizeWidth) {
  744. // The other existing overflow subsumes this check.
  745. // We do an unsigned comparison, since any signed value < -1 is
  746. // taken care of either above or below.
  747. hasOverflow = CGF.Builder.CreateOr(hasOverflow,
  748. CGF.Builder.CreateICmpULT(numElements,
  749. llvm::ConstantInt::get(CGF.SizeTy, minElements)));
  750. }
  751. }
  752. size = numElements;
  753. // Multiply by the type size if necessary. This multiplier
  754. // includes all the factors for nested arrays.
  755. //
  756. // This step also causes numElements to be scaled up by the
  757. // nested-array factor if necessary. Overflow on this computation
  758. // can be ignored because the result shouldn't be used if
  759. // allocation fails.
  760. if (typeSizeMultiplier != 1) {
  761. llvm::Function *umul_with_overflow
  762. = CGF.CGM.getIntrinsic(llvm::Intrinsic::umul_with_overflow, CGF.SizeTy);
  763. llvm::Value *tsmV =
  764. llvm::ConstantInt::get(CGF.SizeTy, typeSizeMultiplier);
  765. llvm::Value *result =
  766. CGF.Builder.CreateCall(umul_with_overflow, {size, tsmV});
  767. llvm::Value *overflowed = CGF.Builder.CreateExtractValue(result, 1);
  768. if (hasOverflow)
  769. hasOverflow = CGF.Builder.CreateOr(hasOverflow, overflowed);
  770. else
  771. hasOverflow = overflowed;
  772. size = CGF.Builder.CreateExtractValue(result, 0);
  773. // Also scale up numElements by the array size multiplier.
  774. if (arraySizeMultiplier != 1) {
  775. // If the base element type size is 1, then we can re-use the
  776. // multiply we just did.
  777. if (typeSize.isOne()) {
  778. assert(arraySizeMultiplier == typeSizeMultiplier);
  779. numElements = size;
  780. // Otherwise we need a separate multiply.
  781. } else {
  782. llvm::Value *asmV =
  783. llvm::ConstantInt::get(CGF.SizeTy, arraySizeMultiplier);
  784. numElements = CGF.Builder.CreateMul(numElements, asmV);
  785. }
  786. }
  787. } else {
  788. // numElements doesn't need to be scaled.
  789. assert(arraySizeMultiplier == 1);
  790. }
  791. // Add in the cookie size if necessary.
  792. if (cookieSize != 0) {
  793. sizeWithoutCookie = size;
  794. llvm::Function *uadd_with_overflow
  795. = CGF.CGM.getIntrinsic(llvm::Intrinsic::uadd_with_overflow, CGF.SizeTy);
  796. llvm::Value *cookieSizeV = llvm::ConstantInt::get(CGF.SizeTy, cookieSize);
  797. llvm::Value *result =
  798. CGF.Builder.CreateCall(uadd_with_overflow, {size, cookieSizeV});
  799. llvm::Value *overflowed = CGF.Builder.CreateExtractValue(result, 1);
  800. if (hasOverflow)
  801. hasOverflow = CGF.Builder.CreateOr(hasOverflow, overflowed);
  802. else
  803. hasOverflow = overflowed;
  804. size = CGF.Builder.CreateExtractValue(result, 0);
  805. }
  806. // If we had any possibility of dynamic overflow, make a select to
  807. // overwrite 'size' with an all-ones value, which should cause
  808. // operator new to throw.
  809. if (hasOverflow)
  810. size = CGF.Builder.CreateSelect(hasOverflow,
  811. llvm::Constant::getAllOnesValue(CGF.SizeTy),
  812. size);
  813. }
  814. if (cookieSize == 0)
  815. sizeWithoutCookie = size;
  816. else
  817. assert(sizeWithoutCookie && "didn't set sizeWithoutCookie?");
  818. return size;
  819. }
  820. static void StoreAnyExprIntoOneUnit(CodeGenFunction &CGF, const Expr *Init,
  821. QualType AllocType, Address NewPtr,
  822. AggValueSlot::Overlap_t MayOverlap) {
  823. // FIXME: Refactor with EmitExprAsInit.
  824. switch (CGF.getEvaluationKind(AllocType)) {
  825. case TEK_Scalar:
  826. CGF.EmitScalarInit(Init, nullptr,
  827. CGF.MakeAddrLValue(NewPtr, AllocType), false);
  828. return;
  829. case TEK_Complex:
  830. CGF.EmitComplexExprIntoLValue(Init, CGF.MakeAddrLValue(NewPtr, AllocType),
  831. /*isInit*/ true);
  832. return;
  833. case TEK_Aggregate: {
  834. AggValueSlot Slot
  835. = AggValueSlot::forAddr(NewPtr, AllocType.getQualifiers(),
  836. AggValueSlot::IsDestructed,
  837. AggValueSlot::DoesNotNeedGCBarriers,
  838. AggValueSlot::IsNotAliased,
  839. MayOverlap, AggValueSlot::IsNotZeroed,
  840. AggValueSlot::IsSanitizerChecked);
  841. CGF.EmitAggExpr(Init, Slot);
  842. return;
  843. }
  844. }
  845. llvm_unreachable("bad evaluation kind");
  846. }
  847. void CodeGenFunction::EmitNewArrayInitializer(
  848. const CXXNewExpr *E, QualType ElementType, llvm::Type *ElementTy,
  849. Address BeginPtr, llvm::Value *NumElements,
  850. llvm::Value *AllocSizeWithoutCookie) {
  851. // If we have a type with trivial initialization and no initializer,
  852. // there's nothing to do.
  853. if (!E->hasInitializer())
  854. return;
  855. Address CurPtr = BeginPtr;
  856. unsigned InitListElements = 0;
  857. const Expr *Init = E->getInitializer();
  858. Address EndOfInit = Address::invalid();
  859. QualType::DestructionKind DtorKind = ElementType.isDestructedType();
  860. EHScopeStack::stable_iterator Cleanup;
  861. llvm::Instruction *CleanupDominator = nullptr;
  862. CharUnits ElementSize = getContext().getTypeSizeInChars(ElementType);
  863. CharUnits ElementAlign =
  864. BeginPtr.getAlignment().alignmentOfArrayElement(ElementSize);
  865. // Attempt to perform zero-initialization using memset.
  866. auto TryMemsetInitialization = [&]() -> bool {
  867. // FIXME: If the type is a pointer-to-data-member under the Itanium ABI,
  868. // we can initialize with a memset to -1.
  869. if (!CGM.getTypes().isZeroInitializable(ElementType))
  870. return false;
  871. // Optimization: since zero initialization will just set the memory
  872. // to all zeroes, generate a single memset to do it in one shot.
  873. // Subtract out the size of any elements we've already initialized.
  874. auto *RemainingSize = AllocSizeWithoutCookie;
  875. if (InitListElements) {
  876. // We know this can't overflow; we check this when doing the allocation.
  877. auto *InitializedSize = llvm::ConstantInt::get(
  878. RemainingSize->getType(),
  879. getContext().getTypeSizeInChars(ElementType).getQuantity() *
  880. InitListElements);
  881. RemainingSize = Builder.CreateSub(RemainingSize, InitializedSize);
  882. }
  883. // Create the memset.
  884. Builder.CreateMemSet(CurPtr, Builder.getInt8(0), RemainingSize, false);
  885. return true;
  886. };
  887. // If the initializer is an initializer list, first do the explicit elements.
  888. if (const InitListExpr *ILE = dyn_cast<InitListExpr>(Init)) {
  889. // Initializing from a (braced) string literal is a special case; the init
  890. // list element does not initialize a (single) array element.
  891. if (ILE->isStringLiteralInit()) {
  892. // Initialize the initial portion of length equal to that of the string
  893. // literal. The allocation must be for at least this much; we emitted a
  894. // check for that earlier.
  895. AggValueSlot Slot =
  896. AggValueSlot::forAddr(CurPtr, ElementType.getQualifiers(),
  897. AggValueSlot::IsDestructed,
  898. AggValueSlot::DoesNotNeedGCBarriers,
  899. AggValueSlot::IsNotAliased,
  900. AggValueSlot::DoesNotOverlap,
  901. AggValueSlot::IsNotZeroed,
  902. AggValueSlot::IsSanitizerChecked);
  903. EmitAggExpr(ILE->getInit(0), Slot);
  904. // Move past these elements.
  905. InitListElements =
  906. cast<ConstantArrayType>(ILE->getType()->getAsArrayTypeUnsafe())
  907. ->getSize().getZExtValue();
  908. CurPtr = Builder.CreateConstInBoundsGEP(
  909. CurPtr, InitListElements, "string.init.end");
  910. // Zero out the rest, if any remain.
  911. llvm::ConstantInt *ConstNum = dyn_cast<llvm::ConstantInt>(NumElements);
  912. if (!ConstNum || !ConstNum->equalsInt(InitListElements)) {
  913. bool OK = TryMemsetInitialization();
  914. (void)OK;
  915. assert(OK && "couldn't memset character type?");
  916. }
  917. return;
  918. }
  919. InitListElements = ILE->getNumInits();
  920. // If this is a multi-dimensional array new, we will initialize multiple
  921. // elements with each init list element.
  922. QualType AllocType = E->getAllocatedType();
  923. if (const ConstantArrayType *CAT = dyn_cast_or_null<ConstantArrayType>(
  924. AllocType->getAsArrayTypeUnsafe())) {
  925. ElementTy = ConvertTypeForMem(AllocType);
  926. CurPtr = Builder.CreateElementBitCast(CurPtr, ElementTy);
  927. InitListElements *= getContext().getConstantArrayElementCount(CAT);
  928. }
  929. // Enter a partial-destruction Cleanup if necessary.
  930. if (needsEHCleanup(DtorKind)) {
  931. // In principle we could tell the Cleanup where we are more
  932. // directly, but the control flow can get so varied here that it
  933. // would actually be quite complex. Therefore we go through an
  934. // alloca.
  935. EndOfInit = CreateTempAlloca(BeginPtr.getType(), getPointerAlign(),
  936. "array.init.end");
  937. CleanupDominator = Builder.CreateStore(BeginPtr.getPointer(), EndOfInit);
  938. pushIrregularPartialArrayCleanup(BeginPtr.getPointer(), EndOfInit,
  939. ElementType, ElementAlign,
  940. getDestroyer(DtorKind));
  941. Cleanup = EHStack.stable_begin();
  942. }
  943. CharUnits StartAlign = CurPtr.getAlignment();
  944. for (unsigned i = 0, e = ILE->getNumInits(); i != e; ++i) {
  945. // Tell the cleanup that it needs to destroy up to this
  946. // element. TODO: some of these stores can be trivially
  947. // observed to be unnecessary.
  948. if (EndOfInit.isValid()) {
  949. auto FinishedPtr =
  950. Builder.CreateBitCast(CurPtr.getPointer(), BeginPtr.getType());
  951. Builder.CreateStore(FinishedPtr, EndOfInit);
  952. }
  953. // FIXME: If the last initializer is an incomplete initializer list for
  954. // an array, and we have an array filler, we can fold together the two
  955. // initialization loops.
  956. StoreAnyExprIntoOneUnit(*this, ILE->getInit(i),
  957. ILE->getInit(i)->getType(), CurPtr,
  958. AggValueSlot::DoesNotOverlap);
  959. CurPtr = Address(Builder.CreateInBoundsGEP(
  960. CurPtr.getElementType(), CurPtr.getPointer(),
  961. Builder.getSize(1), "array.exp.next"),
  962. CurPtr.getElementType(),
  963. StartAlign.alignmentAtOffset((i + 1) * ElementSize));
  964. }
  965. // The remaining elements are filled with the array filler expression.
  966. Init = ILE->getArrayFiller();
  967. // Extract the initializer for the individual array elements by pulling
  968. // out the array filler from all the nested initializer lists. This avoids
  969. // generating a nested loop for the initialization.
  970. while (Init && Init->getType()->isConstantArrayType()) {
  971. auto *SubILE = dyn_cast<InitListExpr>(Init);
  972. if (!SubILE)
  973. break;
  974. assert(SubILE->getNumInits() == 0 && "explicit inits in array filler?");
  975. Init = SubILE->getArrayFiller();
  976. }
  977. // Switch back to initializing one base element at a time.
  978. CurPtr = Builder.CreateElementBitCast(CurPtr, BeginPtr.getElementType());
  979. }
  980. // If all elements have already been initialized, skip any further
  981. // initialization.
  982. llvm::ConstantInt *ConstNum = dyn_cast<llvm::ConstantInt>(NumElements);
  983. if (ConstNum && ConstNum->getZExtValue() <= InitListElements) {
  984. // If there was a Cleanup, deactivate it.
  985. if (CleanupDominator)
  986. DeactivateCleanupBlock(Cleanup, CleanupDominator);
  987. return;
  988. }
  989. assert(Init && "have trailing elements to initialize but no initializer");
  990. // If this is a constructor call, try to optimize it out, and failing that
  991. // emit a single loop to initialize all remaining elements.
  992. if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(Init)) {
  993. CXXConstructorDecl *Ctor = CCE->getConstructor();
  994. if (Ctor->isTrivial()) {
  995. // If new expression did not specify value-initialization, then there
  996. // is no initialization.
  997. if (!CCE->requiresZeroInitialization() || Ctor->getParent()->isEmpty())
  998. return;
  999. if (TryMemsetInitialization())
  1000. return;
  1001. }
  1002. // Store the new Cleanup position for irregular Cleanups.
  1003. //
  1004. // FIXME: Share this cleanup with the constructor call emission rather than
  1005. // having it create a cleanup of its own.
  1006. if (EndOfInit.isValid())
  1007. Builder.CreateStore(CurPtr.getPointer(), EndOfInit);
  1008. // Emit a constructor call loop to initialize the remaining elements.
  1009. if (InitListElements)
  1010. NumElements = Builder.CreateSub(
  1011. NumElements,
  1012. llvm::ConstantInt::get(NumElements->getType(), InitListElements));
  1013. EmitCXXAggrConstructorCall(Ctor, NumElements, CurPtr, CCE,
  1014. /*NewPointerIsChecked*/true,
  1015. CCE->requiresZeroInitialization());
  1016. return;
  1017. }
  1018. // If this is value-initialization, we can usually use memset.
  1019. ImplicitValueInitExpr IVIE(ElementType);
  1020. if (isa<ImplicitValueInitExpr>(Init)) {
  1021. if (TryMemsetInitialization())
  1022. return;
  1023. // Switch to an ImplicitValueInitExpr for the element type. This handles
  1024. // only one case: multidimensional array new of pointers to members. In
  1025. // all other cases, we already have an initializer for the array element.
  1026. Init = &IVIE;
  1027. }
  1028. // At this point we should have found an initializer for the individual
  1029. // elements of the array.
  1030. assert(getContext().hasSameUnqualifiedType(ElementType, Init->getType()) &&
  1031. "got wrong type of element to initialize");
  1032. // If we have an empty initializer list, we can usually use memset.
  1033. if (auto *ILE = dyn_cast<InitListExpr>(Init))
  1034. if (ILE->getNumInits() == 0 && TryMemsetInitialization())
  1035. return;
  1036. // If we have a struct whose every field is value-initialized, we can
  1037. // usually use memset.
  1038. if (auto *ILE = dyn_cast<InitListExpr>(Init)) {
  1039. if (const RecordType *RType = ILE->getType()->getAs<RecordType>()) {
  1040. if (RType->getDecl()->isStruct()) {
  1041. unsigned NumElements = 0;
  1042. if (auto *CXXRD = dyn_cast<CXXRecordDecl>(RType->getDecl()))
  1043. NumElements = CXXRD->getNumBases();
  1044. for (auto *Field : RType->getDecl()->fields())
  1045. if (!Field->isUnnamedBitfield())
  1046. ++NumElements;
  1047. // FIXME: Recurse into nested InitListExprs.
  1048. if (ILE->getNumInits() == NumElements)
  1049. for (unsigned i = 0, e = ILE->getNumInits(); i != e; ++i)
  1050. if (!isa<ImplicitValueInitExpr>(ILE->getInit(i)))
  1051. --NumElements;
  1052. if (ILE->getNumInits() == NumElements && TryMemsetInitialization())
  1053. return;
  1054. }
  1055. }
  1056. }
  1057. // Create the loop blocks.
  1058. llvm::BasicBlock *EntryBB = Builder.GetInsertBlock();
  1059. llvm::BasicBlock *LoopBB = createBasicBlock("new.loop");
  1060. llvm::BasicBlock *ContBB = createBasicBlock("new.loop.end");
  1061. // Find the end of the array, hoisted out of the loop.
  1062. llvm::Value *EndPtr =
  1063. Builder.CreateInBoundsGEP(BeginPtr.getElementType(), BeginPtr.getPointer(),
  1064. NumElements, "array.end");
  1065. // If the number of elements isn't constant, we have to now check if there is
  1066. // anything left to initialize.
  1067. if (!ConstNum) {
  1068. llvm::Value *IsEmpty =
  1069. Builder.CreateICmpEQ(CurPtr.getPointer(), EndPtr, "array.isempty");
  1070. Builder.CreateCondBr(IsEmpty, ContBB, LoopBB);
  1071. }
  1072. // Enter the loop.
  1073. EmitBlock(LoopBB);
  1074. // Set up the current-element phi.
  1075. llvm::PHINode *CurPtrPhi =
  1076. Builder.CreatePHI(CurPtr.getType(), 2, "array.cur");
  1077. CurPtrPhi->addIncoming(CurPtr.getPointer(), EntryBB);
  1078. CurPtr = Address(CurPtrPhi, ElementAlign);
  1079. // Store the new Cleanup position for irregular Cleanups.
  1080. if (EndOfInit.isValid())
  1081. Builder.CreateStore(CurPtr.getPointer(), EndOfInit);
  1082. // Enter a partial-destruction Cleanup if necessary.
  1083. if (!CleanupDominator && needsEHCleanup(DtorKind)) {
  1084. pushRegularPartialArrayCleanup(BeginPtr.getPointer(), CurPtr.getPointer(),
  1085. ElementType, ElementAlign,
  1086. getDestroyer(DtorKind));
  1087. Cleanup = EHStack.stable_begin();
  1088. CleanupDominator = Builder.CreateUnreachable();
  1089. }
  1090. // Emit the initializer into this element.
  1091. StoreAnyExprIntoOneUnit(*this, Init, Init->getType(), CurPtr,
  1092. AggValueSlot::DoesNotOverlap);
  1093. // Leave the Cleanup if we entered one.
  1094. if (CleanupDominator) {
  1095. DeactivateCleanupBlock(Cleanup, CleanupDominator);
  1096. CleanupDominator->eraseFromParent();
  1097. }
  1098. // Advance to the next element by adjusting the pointer type as necessary.
  1099. llvm::Value *NextPtr =
  1100. Builder.CreateConstInBoundsGEP1_32(ElementTy, CurPtr.getPointer(), 1,
  1101. "array.next");
  1102. // Check whether we've gotten to the end of the array and, if so,
  1103. // exit the loop.
  1104. llvm::Value *IsEnd = Builder.CreateICmpEQ(NextPtr, EndPtr, "array.atend");
  1105. Builder.CreateCondBr(IsEnd, ContBB, LoopBB);
  1106. CurPtrPhi->addIncoming(NextPtr, Builder.GetInsertBlock());
  1107. EmitBlock(ContBB);
  1108. }
  1109. static void EmitNewInitializer(CodeGenFunction &CGF, const CXXNewExpr *E,
  1110. QualType ElementType, llvm::Type *ElementTy,
  1111. Address NewPtr, llvm::Value *NumElements,
  1112. llvm::Value *AllocSizeWithoutCookie) {
  1113. ApplyDebugLocation DL(CGF, E);
  1114. if (E->isArray())
  1115. CGF.EmitNewArrayInitializer(E, ElementType, ElementTy, NewPtr, NumElements,
  1116. AllocSizeWithoutCookie);
  1117. else if (const Expr *Init = E->getInitializer())
  1118. StoreAnyExprIntoOneUnit(CGF, Init, E->getAllocatedType(), NewPtr,
  1119. AggValueSlot::DoesNotOverlap);
  1120. }
  1121. /// Emit a call to an operator new or operator delete function, as implicitly
  1122. /// created by new-expressions and delete-expressions.
  1123. static RValue EmitNewDeleteCall(CodeGenFunction &CGF,
  1124. const FunctionDecl *CalleeDecl,
  1125. const FunctionProtoType *CalleeType,
  1126. const CallArgList &Args) {
  1127. llvm::CallBase *CallOrInvoke;
  1128. llvm::Constant *CalleePtr = CGF.CGM.GetAddrOfFunction(CalleeDecl);
  1129. CGCallee Callee = CGCallee::forDirect(CalleePtr, GlobalDecl(CalleeDecl));
  1130. RValue RV =
  1131. CGF.EmitCall(CGF.CGM.getTypes().arrangeFreeFunctionCall(
  1132. Args, CalleeType, /*ChainCall=*/false),
  1133. Callee, ReturnValueSlot(), Args, &CallOrInvoke);
  1134. /// C++1y [expr.new]p10:
  1135. /// [In a new-expression,] an implementation is allowed to omit a call
  1136. /// to a replaceable global allocation function.
  1137. ///
  1138. /// We model such elidable calls with the 'builtin' attribute.
  1139. llvm::Function *Fn = dyn_cast<llvm::Function>(CalleePtr);
  1140. if (CalleeDecl->isReplaceableGlobalAllocationFunction() &&
  1141. Fn && Fn->hasFnAttribute(llvm::Attribute::NoBuiltin)) {
  1142. CallOrInvoke->addFnAttr(llvm::Attribute::Builtin);
  1143. }
  1144. return RV;
  1145. }
  1146. RValue CodeGenFunction::EmitBuiltinNewDeleteCall(const FunctionProtoType *Type,
  1147. const CallExpr *TheCall,
  1148. bool IsDelete) {
  1149. CallArgList Args;
  1150. EmitCallArgs(Args, Type, TheCall->arguments());
  1151. // Find the allocation or deallocation function that we're calling.
  1152. ASTContext &Ctx = getContext();
  1153. DeclarationName Name = Ctx.DeclarationNames
  1154. .getCXXOperatorName(IsDelete ? OO_Delete : OO_New);
  1155. for (auto *Decl : Ctx.getTranslationUnitDecl()->lookup(Name))
  1156. if (auto *FD = dyn_cast<FunctionDecl>(Decl))
  1157. if (Ctx.hasSameType(FD->getType(), QualType(Type, 0)))
  1158. return EmitNewDeleteCall(*this, FD, Type, Args);
  1159. llvm_unreachable("predeclared global operator new/delete is missing");
  1160. }
  1161. namespace {
  1162. /// The parameters to pass to a usual operator delete.
  1163. struct UsualDeleteParams {
  1164. bool DestroyingDelete = false;
  1165. bool Size = false;
  1166. bool Alignment = false;
  1167. };
  1168. }
  1169. static UsualDeleteParams getUsualDeleteParams(const FunctionDecl *FD) {
  1170. UsualDeleteParams Params;
  1171. const FunctionProtoType *FPT = FD->getType()->castAs<FunctionProtoType>();
  1172. auto AI = FPT->param_type_begin(), AE = FPT->param_type_end();
  1173. // The first argument is always a void*.
  1174. ++AI;
  1175. // The next parameter may be a std::destroying_delete_t.
  1176. if (FD->isDestroyingOperatorDelete()) {
  1177. Params.DestroyingDelete = true;
  1178. assert(AI != AE);
  1179. ++AI;
  1180. }
  1181. // Figure out what other parameters we should be implicitly passing.
  1182. if (AI != AE && (*AI)->isIntegerType()) {
  1183. Params.Size = true;
  1184. ++AI;
  1185. }
  1186. if (AI != AE && (*AI)->isAlignValT()) {
  1187. Params.Alignment = true;
  1188. ++AI;
  1189. }
  1190. assert(AI == AE && "unexpected usual deallocation function parameter");
  1191. return Params;
  1192. }
  1193. namespace {
  1194. /// A cleanup to call the given 'operator delete' function upon abnormal
  1195. /// exit from a new expression. Templated on a traits type that deals with
  1196. /// ensuring that the arguments dominate the cleanup if necessary.
  1197. template<typename Traits>
  1198. class CallDeleteDuringNew final : public EHScopeStack::Cleanup {
  1199. /// Type used to hold llvm::Value*s.
  1200. typedef typename Traits::ValueTy ValueTy;
  1201. /// Type used to hold RValues.
  1202. typedef typename Traits::RValueTy RValueTy;
  1203. struct PlacementArg {
  1204. RValueTy ArgValue;
  1205. QualType ArgType;
  1206. };
  1207. unsigned NumPlacementArgs : 31;
  1208. unsigned PassAlignmentToPlacementDelete : 1;
  1209. const FunctionDecl *OperatorDelete;
  1210. ValueTy Ptr;
  1211. ValueTy AllocSize;
  1212. CharUnits AllocAlign;
  1213. PlacementArg *getPlacementArgs() {
  1214. return reinterpret_cast<PlacementArg *>(this + 1);
  1215. }
  1216. public:
  1217. static size_t getExtraSize(size_t NumPlacementArgs) {
  1218. return NumPlacementArgs * sizeof(PlacementArg);
  1219. }
  1220. CallDeleteDuringNew(size_t NumPlacementArgs,
  1221. const FunctionDecl *OperatorDelete, ValueTy Ptr,
  1222. ValueTy AllocSize, bool PassAlignmentToPlacementDelete,
  1223. CharUnits AllocAlign)
  1224. : NumPlacementArgs(NumPlacementArgs),
  1225. PassAlignmentToPlacementDelete(PassAlignmentToPlacementDelete),
  1226. OperatorDelete(OperatorDelete), Ptr(Ptr), AllocSize(AllocSize),
  1227. AllocAlign(AllocAlign) {}
  1228. void setPlacementArg(unsigned I, RValueTy Arg, QualType Type) {
  1229. assert(I < NumPlacementArgs && "index out of range");
  1230. getPlacementArgs()[I] = {Arg, Type};
  1231. }
  1232. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1233. const auto *FPT = OperatorDelete->getType()->castAs<FunctionProtoType>();
  1234. CallArgList DeleteArgs;
  1235. // The first argument is always a void* (or C* for a destroying operator
  1236. // delete for class type C).
  1237. DeleteArgs.add(Traits::get(CGF, Ptr), FPT->getParamType(0));
  1238. // Figure out what other parameters we should be implicitly passing.
  1239. UsualDeleteParams Params;
  1240. if (NumPlacementArgs) {
  1241. // A placement deallocation function is implicitly passed an alignment
  1242. // if the placement allocation function was, but is never passed a size.
  1243. Params.Alignment = PassAlignmentToPlacementDelete;
  1244. } else {
  1245. // For a non-placement new-expression, 'operator delete' can take a
  1246. // size and/or an alignment if it has the right parameters.
  1247. Params = getUsualDeleteParams(OperatorDelete);
  1248. }
  1249. assert(!Params.DestroyingDelete &&
  1250. "should not call destroying delete in a new-expression");
  1251. // The second argument can be a std::size_t (for non-placement delete).
  1252. if (Params.Size)
  1253. DeleteArgs.add(Traits::get(CGF, AllocSize),
  1254. CGF.getContext().getSizeType());
  1255. // The next (second or third) argument can be a std::align_val_t, which
  1256. // is an enum whose underlying type is std::size_t.
  1257. // FIXME: Use the right type as the parameter type. Note that in a call
  1258. // to operator delete(size_t, ...), we may not have it available.
  1259. if (Params.Alignment)
  1260. DeleteArgs.add(RValue::get(llvm::ConstantInt::get(
  1261. CGF.SizeTy, AllocAlign.getQuantity())),
  1262. CGF.getContext().getSizeType());
  1263. // Pass the rest of the arguments, which must match exactly.
  1264. for (unsigned I = 0; I != NumPlacementArgs; ++I) {
  1265. auto Arg = getPlacementArgs()[I];
  1266. DeleteArgs.add(Traits::get(CGF, Arg.ArgValue), Arg.ArgType);
  1267. }
  1268. // Call 'operator delete'.
  1269. EmitNewDeleteCall(CGF, OperatorDelete, FPT, DeleteArgs);
  1270. }
  1271. };
  1272. }
  1273. /// Enter a cleanup to call 'operator delete' if the initializer in a
  1274. /// new-expression throws.
  1275. static void EnterNewDeleteCleanup(CodeGenFunction &CGF,
  1276. const CXXNewExpr *E,
  1277. Address NewPtr,
  1278. llvm::Value *AllocSize,
  1279. CharUnits AllocAlign,
  1280. const CallArgList &NewArgs) {
  1281. unsigned NumNonPlacementArgs = E->passAlignment() ? 2 : 1;
  1282. // If we're not inside a conditional branch, then the cleanup will
  1283. // dominate and we can do the easier (and more efficient) thing.
  1284. if (!CGF.isInConditionalBranch()) {
  1285. struct DirectCleanupTraits {
  1286. typedef llvm::Value *ValueTy;
  1287. typedef RValue RValueTy;
  1288. static RValue get(CodeGenFunction &, ValueTy V) { return RValue::get(V); }
  1289. static RValue get(CodeGenFunction &, RValueTy V) { return V; }
  1290. };
  1291. typedef CallDeleteDuringNew<DirectCleanupTraits> DirectCleanup;
  1292. DirectCleanup *Cleanup = CGF.EHStack
  1293. .pushCleanupWithExtra<DirectCleanup>(EHCleanup,
  1294. E->getNumPlacementArgs(),
  1295. E->getOperatorDelete(),
  1296. NewPtr.getPointer(),
  1297. AllocSize,
  1298. E->passAlignment(),
  1299. AllocAlign);
  1300. for (unsigned I = 0, N = E->getNumPlacementArgs(); I != N; ++I) {
  1301. auto &Arg = NewArgs[I + NumNonPlacementArgs];
  1302. Cleanup->setPlacementArg(I, Arg.getRValue(CGF), Arg.Ty);
  1303. }
  1304. return;
  1305. }
  1306. // Otherwise, we need to save all this stuff.
  1307. DominatingValue<RValue>::saved_type SavedNewPtr =
  1308. DominatingValue<RValue>::save(CGF, RValue::get(NewPtr.getPointer()));
  1309. DominatingValue<RValue>::saved_type SavedAllocSize =
  1310. DominatingValue<RValue>::save(CGF, RValue::get(AllocSize));
  1311. struct ConditionalCleanupTraits {
  1312. typedef DominatingValue<RValue>::saved_type ValueTy;
  1313. typedef DominatingValue<RValue>::saved_type RValueTy;
  1314. static RValue get(CodeGenFunction &CGF, ValueTy V) {
  1315. return V.restore(CGF);
  1316. }
  1317. };
  1318. typedef CallDeleteDuringNew<ConditionalCleanupTraits> ConditionalCleanup;
  1319. ConditionalCleanup *Cleanup = CGF.EHStack
  1320. .pushCleanupWithExtra<ConditionalCleanup>(EHCleanup,
  1321. E->getNumPlacementArgs(),
  1322. E->getOperatorDelete(),
  1323. SavedNewPtr,
  1324. SavedAllocSize,
  1325. E->passAlignment(),
  1326. AllocAlign);
  1327. for (unsigned I = 0, N = E->getNumPlacementArgs(); I != N; ++I) {
  1328. auto &Arg = NewArgs[I + NumNonPlacementArgs];
  1329. Cleanup->setPlacementArg(
  1330. I, DominatingValue<RValue>::save(CGF, Arg.getRValue(CGF)), Arg.Ty);
  1331. }
  1332. CGF.initFullExprCleanup();
  1333. }
  1334. llvm::Value *CodeGenFunction::EmitCXXNewExpr(const CXXNewExpr *E) {
  1335. // The element type being allocated.
  1336. QualType allocType = getContext().getBaseElementType(E->getAllocatedType());
  1337. // 1. Build a call to the allocation function.
  1338. FunctionDecl *allocator = E->getOperatorNew();
  1339. // If there is a brace-initializer, cannot allocate fewer elements than inits.
  1340. unsigned minElements = 0;
  1341. if (E->isArray() && E->hasInitializer()) {
  1342. const InitListExpr *ILE = dyn_cast<InitListExpr>(E->getInitializer());
  1343. if (ILE && ILE->isStringLiteralInit())
  1344. minElements =
  1345. cast<ConstantArrayType>(ILE->getType()->getAsArrayTypeUnsafe())
  1346. ->getSize().getZExtValue();
  1347. else if (ILE)
  1348. minElements = ILE->getNumInits();
  1349. }
  1350. llvm::Value *numElements = nullptr;
  1351. llvm::Value *allocSizeWithoutCookie = nullptr;
  1352. llvm::Value *allocSize =
  1353. EmitCXXNewAllocSize(*this, E, minElements, numElements,
  1354. allocSizeWithoutCookie);
  1355. CharUnits allocAlign = getContext().getTypeAlignInChars(allocType);
  1356. // Emit the allocation call. If the allocator is a global placement
  1357. // operator, just "inline" it directly.
  1358. Address allocation = Address::invalid();
  1359. CallArgList allocatorArgs;
  1360. if (allocator->isReservedGlobalPlacementOperator()) {
  1361. assert(E->getNumPlacementArgs() == 1);
  1362. const Expr *arg = *E->placement_arguments().begin();
  1363. LValueBaseInfo BaseInfo;
  1364. allocation = EmitPointerWithAlignment(arg, &BaseInfo);
  1365. // The pointer expression will, in many cases, be an opaque void*.
  1366. // In these cases, discard the computed alignment and use the
  1367. // formal alignment of the allocated type.
  1368. if (BaseInfo.getAlignmentSource() != AlignmentSource::Decl)
  1369. allocation = allocation.withAlignment(allocAlign);
  1370. // Set up allocatorArgs for the call to operator delete if it's not
  1371. // the reserved global operator.
  1372. if (E->getOperatorDelete() &&
  1373. !E->getOperatorDelete()->isReservedGlobalPlacementOperator()) {
  1374. allocatorArgs.add(RValue::get(allocSize), getContext().getSizeType());
  1375. allocatorArgs.add(RValue::get(allocation.getPointer()), arg->getType());
  1376. }
  1377. } else {
  1378. const FunctionProtoType *allocatorType =
  1379. allocator->getType()->castAs<FunctionProtoType>();
  1380. unsigned ParamsToSkip = 0;
  1381. // The allocation size is the first argument.
  1382. QualType sizeType = getContext().getSizeType();
  1383. allocatorArgs.add(RValue::get(allocSize), sizeType);
  1384. ++ParamsToSkip;
  1385. if (allocSize != allocSizeWithoutCookie) {
  1386. CharUnits cookieAlign = getSizeAlign(); // FIXME: Ask the ABI.
  1387. allocAlign = std::max(allocAlign, cookieAlign);
  1388. }
  1389. // The allocation alignment may be passed as the second argument.
  1390. if (E->passAlignment()) {
  1391. QualType AlignValT = sizeType;
  1392. if (allocatorType->getNumParams() > 1) {
  1393. AlignValT = allocatorType->getParamType(1);
  1394. assert(getContext().hasSameUnqualifiedType(
  1395. AlignValT->castAs<EnumType>()->getDecl()->getIntegerType(),
  1396. sizeType) &&
  1397. "wrong type for alignment parameter");
  1398. ++ParamsToSkip;
  1399. } else {
  1400. // Corner case, passing alignment to 'operator new(size_t, ...)'.
  1401. assert(allocator->isVariadic() && "can't pass alignment to allocator");
  1402. }
  1403. allocatorArgs.add(
  1404. RValue::get(llvm::ConstantInt::get(SizeTy, allocAlign.getQuantity())),
  1405. AlignValT);
  1406. }
  1407. // FIXME: Why do we not pass a CalleeDecl here?
  1408. EmitCallArgs(allocatorArgs, allocatorType, E->placement_arguments(),
  1409. /*AC*/AbstractCallee(), /*ParamsToSkip*/ParamsToSkip);
  1410. RValue RV =
  1411. EmitNewDeleteCall(*this, allocator, allocatorType, allocatorArgs);
  1412. // Set !heapallocsite metadata on the call to operator new.
  1413. if (getDebugInfo())
  1414. if (auto *newCall = dyn_cast<llvm::CallBase>(RV.getScalarVal()))
  1415. getDebugInfo()->addHeapAllocSiteMetadata(newCall, allocType,
  1416. E->getExprLoc());
  1417. // If this was a call to a global replaceable allocation function that does
  1418. // not take an alignment argument, the allocator is known to produce
  1419. // storage that's suitably aligned for any object that fits, up to a known
  1420. // threshold. Otherwise assume it's suitably aligned for the allocated type.
  1421. CharUnits allocationAlign = allocAlign;
  1422. if (!E->passAlignment() &&
  1423. allocator->isReplaceableGlobalAllocationFunction()) {
  1424. unsigned AllocatorAlign = llvm::PowerOf2Floor(std::min<uint64_t>(
  1425. Target.getNewAlign(), getContext().getTypeSize(allocType)));
  1426. allocationAlign = std::max(
  1427. allocationAlign, getContext().toCharUnitsFromBits(AllocatorAlign));
  1428. }
  1429. allocation = Address(RV.getScalarVal(), Int8Ty, allocationAlign);
  1430. }
  1431. // Emit a null check on the allocation result if the allocation
  1432. // function is allowed to return null (because it has a non-throwing
  1433. // exception spec or is the reserved placement new) and we have an
  1434. // interesting initializer will be running sanitizers on the initialization.
  1435. bool nullCheck = E->shouldNullCheckAllocation() &&
  1436. (!allocType.isPODType(getContext()) || E->hasInitializer() ||
  1437. sanitizePerformTypeCheck());
  1438. llvm::BasicBlock *nullCheckBB = nullptr;
  1439. llvm::BasicBlock *contBB = nullptr;
  1440. // The null-check means that the initializer is conditionally
  1441. // evaluated.
  1442. ConditionalEvaluation conditional(*this);
  1443. if (nullCheck) {
  1444. conditional.begin(*this);
  1445. nullCheckBB = Builder.GetInsertBlock();
  1446. llvm::BasicBlock *notNullBB = createBasicBlock("new.notnull");
  1447. contBB = createBasicBlock("new.cont");
  1448. llvm::Value *isNull =
  1449. Builder.CreateIsNull(allocation.getPointer(), "new.isnull");
  1450. Builder.CreateCondBr(isNull, contBB, notNullBB);
  1451. EmitBlock(notNullBB);
  1452. }
  1453. // If there's an operator delete, enter a cleanup to call it if an
  1454. // exception is thrown.
  1455. EHScopeStack::stable_iterator operatorDeleteCleanup;
  1456. llvm::Instruction *cleanupDominator = nullptr;
  1457. if (E->getOperatorDelete() &&
  1458. !E->getOperatorDelete()->isReservedGlobalPlacementOperator()) {
  1459. EnterNewDeleteCleanup(*this, E, allocation, allocSize, allocAlign,
  1460. allocatorArgs);
  1461. operatorDeleteCleanup = EHStack.stable_begin();
  1462. cleanupDominator = Builder.CreateUnreachable();
  1463. }
  1464. assert((allocSize == allocSizeWithoutCookie) ==
  1465. CalculateCookiePadding(*this, E).isZero());
  1466. if (allocSize != allocSizeWithoutCookie) {
  1467. assert(E->isArray());
  1468. allocation = CGM.getCXXABI().InitializeArrayCookie(*this, allocation,
  1469. numElements,
  1470. E, allocType);
  1471. }
  1472. llvm::Type *elementTy = ConvertTypeForMem(allocType);
  1473. Address result = Builder.CreateElementBitCast(allocation, elementTy);
  1474. // Passing pointer through launder.invariant.group to avoid propagation of
  1475. // vptrs information which may be included in previous type.
  1476. // To not break LTO with different optimizations levels, we do it regardless
  1477. // of optimization level.
  1478. if (CGM.getCodeGenOpts().StrictVTablePointers &&
  1479. allocator->isReservedGlobalPlacementOperator())
  1480. result = Builder.CreateLaunderInvariantGroup(result);
  1481. // Emit sanitizer checks for pointer value now, so that in the case of an
  1482. // array it was checked only once and not at each constructor call. We may
  1483. // have already checked that the pointer is non-null.
  1484. // FIXME: If we have an array cookie and a potentially-throwing allocator,
  1485. // we'll null check the wrong pointer here.
  1486. SanitizerSet SkippedChecks;
  1487. SkippedChecks.set(SanitizerKind::Null, nullCheck);
  1488. EmitTypeCheck(CodeGenFunction::TCK_ConstructorCall,
  1489. E->getAllocatedTypeSourceInfo()->getTypeLoc().getBeginLoc(),
  1490. result.getPointer(), allocType, result.getAlignment(),
  1491. SkippedChecks, numElements);
  1492. EmitNewInitializer(*this, E, allocType, elementTy, result, numElements,
  1493. allocSizeWithoutCookie);
  1494. if (E->isArray()) {
  1495. // NewPtr is a pointer to the base element type. If we're
  1496. // allocating an array of arrays, we'll need to cast back to the
  1497. // array pointer type.
  1498. llvm::Type *resultType = ConvertTypeForMem(E->getType());
  1499. if (result.getType() != resultType)
  1500. result = Builder.CreateBitCast(result, resultType);
  1501. }
  1502. // Deactivate the 'operator delete' cleanup if we finished
  1503. // initialization.
  1504. if (operatorDeleteCleanup.isValid()) {
  1505. DeactivateCleanupBlock(operatorDeleteCleanup, cleanupDominator);
  1506. cleanupDominator->eraseFromParent();
  1507. }
  1508. llvm::Value *resultPtr = result.getPointer();
  1509. if (nullCheck) {
  1510. conditional.end(*this);
  1511. llvm::BasicBlock *notNullBB = Builder.GetInsertBlock();
  1512. EmitBlock(contBB);
  1513. llvm::PHINode *PHI = Builder.CreatePHI(resultPtr->getType(), 2);
  1514. PHI->addIncoming(resultPtr, notNullBB);
  1515. PHI->addIncoming(llvm::Constant::getNullValue(resultPtr->getType()),
  1516. nullCheckBB);
  1517. resultPtr = PHI;
  1518. }
  1519. return resultPtr;
  1520. }
  1521. void CodeGenFunction::EmitDeleteCall(const FunctionDecl *DeleteFD,
  1522. llvm::Value *Ptr, QualType DeleteTy,
  1523. llvm::Value *NumElements,
  1524. CharUnits CookieSize) {
  1525. assert((!NumElements && CookieSize.isZero()) ||
  1526. DeleteFD->getOverloadedOperator() == OO_Array_Delete);
  1527. const auto *DeleteFTy = DeleteFD->getType()->castAs<FunctionProtoType>();
  1528. CallArgList DeleteArgs;
  1529. auto Params = getUsualDeleteParams(DeleteFD);
  1530. auto ParamTypeIt = DeleteFTy->param_type_begin();
  1531. // Pass the pointer itself.
  1532. QualType ArgTy = *ParamTypeIt++;
  1533. llvm::Value *DeletePtr = Builder.CreateBitCast(Ptr, ConvertType(ArgTy));
  1534. DeleteArgs.add(RValue::get(DeletePtr), ArgTy);
  1535. // Pass the std::destroying_delete tag if present.
  1536. llvm::AllocaInst *DestroyingDeleteTag = nullptr;
  1537. if (Params.DestroyingDelete) {
  1538. QualType DDTag = *ParamTypeIt++;
  1539. llvm::Type *Ty = getTypes().ConvertType(DDTag);
  1540. CharUnits Align = CGM.getNaturalTypeAlignment(DDTag);
  1541. DestroyingDeleteTag = CreateTempAlloca(Ty, "destroying.delete.tag");
  1542. DestroyingDeleteTag->setAlignment(Align.getAsAlign());
  1543. DeleteArgs.add(RValue::getAggregate(Address(DestroyingDeleteTag, Align)), DDTag);
  1544. }
  1545. // Pass the size if the delete function has a size_t parameter.
  1546. if (Params.Size) {
  1547. QualType SizeType = *ParamTypeIt++;
  1548. CharUnits DeleteTypeSize = getContext().getTypeSizeInChars(DeleteTy);
  1549. llvm::Value *Size = llvm::ConstantInt::get(ConvertType(SizeType),
  1550. DeleteTypeSize.getQuantity());
  1551. // For array new, multiply by the number of elements.
  1552. if (NumElements)
  1553. Size = Builder.CreateMul(Size, NumElements);
  1554. // If there is a cookie, add the cookie size.
  1555. if (!CookieSize.isZero())
  1556. Size = Builder.CreateAdd(
  1557. Size, llvm::ConstantInt::get(SizeTy, CookieSize.getQuantity()));
  1558. DeleteArgs.add(RValue::get(Size), SizeType);
  1559. }
  1560. // Pass the alignment if the delete function has an align_val_t parameter.
  1561. if (Params.Alignment) {
  1562. QualType AlignValType = *ParamTypeIt++;
  1563. CharUnits DeleteTypeAlign =
  1564. getContext().toCharUnitsFromBits(getContext().getTypeAlignIfKnown(
  1565. DeleteTy, true /* NeedsPreferredAlignment */));
  1566. llvm::Value *Align = llvm::ConstantInt::get(ConvertType(AlignValType),
  1567. DeleteTypeAlign.getQuantity());
  1568. DeleteArgs.add(RValue::get(Align), AlignValType);
  1569. }
  1570. assert(ParamTypeIt == DeleteFTy->param_type_end() &&
  1571. "unknown parameter to usual delete function");
  1572. // Emit the call to delete.
  1573. EmitNewDeleteCall(*this, DeleteFD, DeleteFTy, DeleteArgs);
  1574. // If call argument lowering didn't use the destroying_delete_t alloca,
  1575. // remove it again.
  1576. if (DestroyingDeleteTag && DestroyingDeleteTag->use_empty())
  1577. DestroyingDeleteTag->eraseFromParent();
  1578. }
  1579. namespace {
  1580. /// Calls the given 'operator delete' on a single object.
  1581. struct CallObjectDelete final : EHScopeStack::Cleanup {
  1582. llvm::Value *Ptr;
  1583. const FunctionDecl *OperatorDelete;
  1584. QualType ElementType;
  1585. CallObjectDelete(llvm::Value *Ptr,
  1586. const FunctionDecl *OperatorDelete,
  1587. QualType ElementType)
  1588. : Ptr(Ptr), OperatorDelete(OperatorDelete), ElementType(ElementType) {}
  1589. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1590. CGF.EmitDeleteCall(OperatorDelete, Ptr, ElementType);
  1591. }
  1592. };
  1593. }
  1594. void
  1595. CodeGenFunction::pushCallObjectDeleteCleanup(const FunctionDecl *OperatorDelete,
  1596. llvm::Value *CompletePtr,
  1597. QualType ElementType) {
  1598. EHStack.pushCleanup<CallObjectDelete>(NormalAndEHCleanup, CompletePtr,
  1599. OperatorDelete, ElementType);
  1600. }
  1601. /// Emit the code for deleting a single object with a destroying operator
  1602. /// delete. If the element type has a non-virtual destructor, Ptr has already
  1603. /// been converted to the type of the parameter of 'operator delete'. Otherwise
  1604. /// Ptr points to an object of the static type.
  1605. static void EmitDestroyingObjectDelete(CodeGenFunction &CGF,
  1606. const CXXDeleteExpr *DE, Address Ptr,
  1607. QualType ElementType) {
  1608. auto *Dtor = ElementType->getAsCXXRecordDecl()->getDestructor();
  1609. if (Dtor && Dtor->isVirtual())
  1610. CGF.CGM.getCXXABI().emitVirtualObjectDelete(CGF, DE, Ptr, ElementType,
  1611. Dtor);
  1612. else
  1613. CGF.EmitDeleteCall(DE->getOperatorDelete(), Ptr.getPointer(), ElementType);
  1614. }
  1615. /// Emit the code for deleting a single object.
  1616. /// \return \c true if we started emitting UnconditionalDeleteBlock, \c false
  1617. /// if not.
  1618. static bool EmitObjectDelete(CodeGenFunction &CGF,
  1619. const CXXDeleteExpr *DE,
  1620. Address Ptr,
  1621. QualType ElementType,
  1622. llvm::BasicBlock *UnconditionalDeleteBlock) {
  1623. // C++11 [expr.delete]p3:
  1624. // If the static type of the object to be deleted is different from its
  1625. // dynamic type, the static type shall be a base class of the dynamic type
  1626. // of the object to be deleted and the static type shall have a virtual
  1627. // destructor or the behavior is undefined.
  1628. CGF.EmitTypeCheck(CodeGenFunction::TCK_MemberCall,
  1629. DE->getExprLoc(), Ptr.getPointer(),
  1630. ElementType);
  1631. const FunctionDecl *OperatorDelete = DE->getOperatorDelete();
  1632. assert(!OperatorDelete->isDestroyingOperatorDelete());
  1633. // Find the destructor for the type, if applicable. If the
  1634. // destructor is virtual, we'll just emit the vcall and return.
  1635. const CXXDestructorDecl *Dtor = nullptr;
  1636. if (const RecordType *RT = ElementType->getAs<RecordType>()) {
  1637. CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
  1638. if (RD->hasDefinition() && !RD->hasTrivialDestructor()) {
  1639. Dtor = RD->getDestructor();
  1640. if (Dtor->isVirtual()) {
  1641. bool UseVirtualCall = true;
  1642. const Expr *Base = DE->getArgument();
  1643. if (auto *DevirtualizedDtor =
  1644. dyn_cast_or_null<const CXXDestructorDecl>(
  1645. Dtor->getDevirtualizedMethod(
  1646. Base, CGF.CGM.getLangOpts().AppleKext))) {
  1647. UseVirtualCall = false;
  1648. const CXXRecordDecl *DevirtualizedClass =
  1649. DevirtualizedDtor->getParent();
  1650. if (declaresSameEntity(getCXXRecord(Base), DevirtualizedClass)) {
  1651. // Devirtualized to the class of the base type (the type of the
  1652. // whole expression).
  1653. Dtor = DevirtualizedDtor;
  1654. } else {
  1655. // Devirtualized to some other type. Would need to cast the this
  1656. // pointer to that type but we don't have support for that yet, so
  1657. // do a virtual call. FIXME: handle the case where it is
  1658. // devirtualized to the derived type (the type of the inner
  1659. // expression) as in EmitCXXMemberOrOperatorMemberCallExpr.
  1660. UseVirtualCall = true;
  1661. }
  1662. }
  1663. if (UseVirtualCall) {
  1664. CGF.CGM.getCXXABI().emitVirtualObjectDelete(CGF, DE, Ptr, ElementType,
  1665. Dtor);
  1666. return false;
  1667. }
  1668. }
  1669. }
  1670. }
  1671. // Make sure that we call delete even if the dtor throws.
  1672. // This doesn't have to a conditional cleanup because we're going
  1673. // to pop it off in a second.
  1674. CGF.EHStack.pushCleanup<CallObjectDelete>(NormalAndEHCleanup,
  1675. Ptr.getPointer(),
  1676. OperatorDelete, ElementType);
  1677. if (Dtor)
  1678. CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete,
  1679. /*ForVirtualBase=*/false,
  1680. /*Delegating=*/false,
  1681. Ptr, ElementType);
  1682. else if (auto Lifetime = ElementType.getObjCLifetime()) {
  1683. switch (Lifetime) {
  1684. case Qualifiers::OCL_None:
  1685. case Qualifiers::OCL_ExplicitNone:
  1686. case Qualifiers::OCL_Autoreleasing:
  1687. break;
  1688. case Qualifiers::OCL_Strong:
  1689. CGF.EmitARCDestroyStrong(Ptr, ARCPreciseLifetime);
  1690. break;
  1691. case Qualifiers::OCL_Weak:
  1692. CGF.EmitARCDestroyWeak(Ptr);
  1693. break;
  1694. }
  1695. }
  1696. // When optimizing for size, call 'operator delete' unconditionally.
  1697. if (CGF.CGM.getCodeGenOpts().OptimizeSize > 1) {
  1698. CGF.EmitBlock(UnconditionalDeleteBlock);
  1699. CGF.PopCleanupBlock();
  1700. return true;
  1701. }
  1702. CGF.PopCleanupBlock();
  1703. return false;
  1704. }
  1705. namespace {
  1706. /// Calls the given 'operator delete' on an array of objects.
  1707. struct CallArrayDelete final : EHScopeStack::Cleanup {
  1708. llvm::Value *Ptr;
  1709. const FunctionDecl *OperatorDelete;
  1710. llvm::Value *NumElements;
  1711. QualType ElementType;
  1712. CharUnits CookieSize;
  1713. CallArrayDelete(llvm::Value *Ptr,
  1714. const FunctionDecl *OperatorDelete,
  1715. llvm::Value *NumElements,
  1716. QualType ElementType,
  1717. CharUnits CookieSize)
  1718. : Ptr(Ptr), OperatorDelete(OperatorDelete), NumElements(NumElements),
  1719. ElementType(ElementType), CookieSize(CookieSize) {}
  1720. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1721. CGF.EmitDeleteCall(OperatorDelete, Ptr, ElementType, NumElements,
  1722. CookieSize);
  1723. }
  1724. };
  1725. }
  1726. /// Emit the code for deleting an array of objects.
  1727. static void EmitArrayDelete(CodeGenFunction &CGF,
  1728. const CXXDeleteExpr *E,
  1729. Address deletedPtr,
  1730. QualType elementType) {
  1731. llvm::Value *numElements = nullptr;
  1732. llvm::Value *allocatedPtr = nullptr;
  1733. CharUnits cookieSize;
  1734. CGF.CGM.getCXXABI().ReadArrayCookie(CGF, deletedPtr, E, elementType,
  1735. numElements, allocatedPtr, cookieSize);
  1736. assert(allocatedPtr && "ReadArrayCookie didn't set allocated pointer");
  1737. // Make sure that we call delete even if one of the dtors throws.
  1738. const FunctionDecl *operatorDelete = E->getOperatorDelete();
  1739. CGF.EHStack.pushCleanup<CallArrayDelete>(NormalAndEHCleanup,
  1740. allocatedPtr, operatorDelete,
  1741. numElements, elementType,
  1742. cookieSize);
  1743. // Destroy the elements.
  1744. if (QualType::DestructionKind dtorKind = elementType.isDestructedType()) {
  1745. assert(numElements && "no element count for a type with a destructor!");
  1746. CharUnits elementSize = CGF.getContext().getTypeSizeInChars(elementType);
  1747. CharUnits elementAlign =
  1748. deletedPtr.getAlignment().alignmentOfArrayElement(elementSize);
  1749. llvm::Value *arrayBegin = deletedPtr.getPointer();
  1750. llvm::Value *arrayEnd = CGF.Builder.CreateInBoundsGEP(
  1751. deletedPtr.getElementType(), arrayBegin, numElements, "delete.end");
  1752. // Note that it is legal to allocate a zero-length array, and we
  1753. // can never fold the check away because the length should always
  1754. // come from a cookie.
  1755. CGF.emitArrayDestroy(arrayBegin, arrayEnd, elementType, elementAlign,
  1756. CGF.getDestroyer(dtorKind),
  1757. /*checkZeroLength*/ true,
  1758. CGF.needsEHCleanup(dtorKind));
  1759. }
  1760. // Pop the cleanup block.
  1761. CGF.PopCleanupBlock();
  1762. }
  1763. void CodeGenFunction::EmitCXXDeleteExpr(const CXXDeleteExpr *E) {
  1764. const Expr *Arg = E->getArgument();
  1765. Address Ptr = EmitPointerWithAlignment(Arg);
  1766. // Null check the pointer.
  1767. //
  1768. // We could avoid this null check if we can determine that the object
  1769. // destruction is trivial and doesn't require an array cookie; we can
  1770. // unconditionally perform the operator delete call in that case. For now, we
  1771. // assume that deleted pointers are null rarely enough that it's better to
  1772. // keep the branch. This might be worth revisiting for a -O0 code size win.
  1773. llvm::BasicBlock *DeleteNotNull = createBasicBlock("delete.notnull");
  1774. llvm::BasicBlock *DeleteEnd = createBasicBlock("delete.end");
  1775. llvm::Value *IsNull = Builder.CreateIsNull(Ptr.getPointer(), "isnull");
  1776. Builder.CreateCondBr(IsNull, DeleteEnd, DeleteNotNull);
  1777. EmitBlock(DeleteNotNull);
  1778. QualType DeleteTy = E->getDestroyedType();
  1779. // A destroying operator delete overrides the entire operation of the
  1780. // delete expression.
  1781. if (E->getOperatorDelete()->isDestroyingOperatorDelete()) {
  1782. EmitDestroyingObjectDelete(*this, E, Ptr, DeleteTy);
  1783. EmitBlock(DeleteEnd);
  1784. return;
  1785. }
  1786. // We might be deleting a pointer to array. If so, GEP down to the
  1787. // first non-array element.
  1788. // (this assumes that A(*)[3][7] is converted to [3 x [7 x %A]]*)
  1789. if (DeleteTy->isConstantArrayType()) {
  1790. llvm::Value *Zero = Builder.getInt32(0);
  1791. SmallVector<llvm::Value*,8> GEP;
  1792. GEP.push_back(Zero); // point at the outermost array
  1793. // For each layer of array type we're pointing at:
  1794. while (const ConstantArrayType *Arr
  1795. = getContext().getAsConstantArrayType(DeleteTy)) {
  1796. // 1. Unpeel the array type.
  1797. DeleteTy = Arr->getElementType();
  1798. // 2. GEP to the first element of the array.
  1799. GEP.push_back(Zero);
  1800. }
  1801. Ptr = Address(Builder.CreateInBoundsGEP(Ptr.getElementType(),
  1802. Ptr.getPointer(), GEP, "del.first"),
  1803. Ptr.getAlignment());
  1804. }
  1805. assert(ConvertTypeForMem(DeleteTy) == Ptr.getElementType());
  1806. if (E->isArrayForm()) {
  1807. EmitArrayDelete(*this, E, Ptr, DeleteTy);
  1808. EmitBlock(DeleteEnd);
  1809. } else {
  1810. if (!EmitObjectDelete(*this, E, Ptr, DeleteTy, DeleteEnd))
  1811. EmitBlock(DeleteEnd);
  1812. }
  1813. }
  1814. static bool isGLValueFromPointerDeref(const Expr *E) {
  1815. E = E->IgnoreParens();
  1816. if (const auto *CE = dyn_cast<CastExpr>(E)) {
  1817. if (!CE->getSubExpr()->isGLValue())
  1818. return false;
  1819. return isGLValueFromPointerDeref(CE->getSubExpr());
  1820. }
  1821. if (const auto *OVE = dyn_cast<OpaqueValueExpr>(E))
  1822. return isGLValueFromPointerDeref(OVE->getSourceExpr());
  1823. if (const auto *BO = dyn_cast<BinaryOperator>(E))
  1824. if (BO->getOpcode() == BO_Comma)
  1825. return isGLValueFromPointerDeref(BO->getRHS());
  1826. if (const auto *ACO = dyn_cast<AbstractConditionalOperator>(E))
  1827. return isGLValueFromPointerDeref(ACO->getTrueExpr()) ||
  1828. isGLValueFromPointerDeref(ACO->getFalseExpr());
  1829. // C++11 [expr.sub]p1:
  1830. // The expression E1[E2] is identical (by definition) to *((E1)+(E2))
  1831. if (isa<ArraySubscriptExpr>(E))
  1832. return true;
  1833. if (const auto *UO = dyn_cast<UnaryOperator>(E))
  1834. if (UO->getOpcode() == UO_Deref)
  1835. return true;
  1836. return false;
  1837. }
  1838. static llvm::Value *EmitTypeidFromVTable(CodeGenFunction &CGF, const Expr *E,
  1839. llvm::Type *StdTypeInfoPtrTy) {
  1840. // Get the vtable pointer.
  1841. Address ThisPtr = CGF.EmitLValue(E).getAddress(CGF);
  1842. QualType SrcRecordTy = E->getType();
  1843. // C++ [class.cdtor]p4:
  1844. // If the operand of typeid refers to the object under construction or
  1845. // destruction and the static type of the operand is neither the constructor
  1846. // or destructor’s class nor one of its bases, the behavior is undefined.
  1847. CGF.EmitTypeCheck(CodeGenFunction::TCK_DynamicOperation, E->getExprLoc(),
  1848. ThisPtr.getPointer(), SrcRecordTy);
  1849. // C++ [expr.typeid]p2:
  1850. // If the glvalue expression is obtained by applying the unary * operator to
  1851. // a pointer and the pointer is a null pointer value, the typeid expression
  1852. // throws the std::bad_typeid exception.
  1853. //
  1854. // However, this paragraph's intent is not clear. We choose a very generous
  1855. // interpretation which implores us to consider comma operators, conditional
  1856. // operators, parentheses and other such constructs.
  1857. if (CGF.CGM.getCXXABI().shouldTypeidBeNullChecked(
  1858. isGLValueFromPointerDeref(E), SrcRecordTy)) {
  1859. llvm::BasicBlock *BadTypeidBlock =
  1860. CGF.createBasicBlock("typeid.bad_typeid");
  1861. llvm::BasicBlock *EndBlock = CGF.createBasicBlock("typeid.end");
  1862. llvm::Value *IsNull = CGF.Builder.CreateIsNull(ThisPtr.getPointer());
  1863. CGF.Builder.CreateCondBr(IsNull, BadTypeidBlock, EndBlock);
  1864. CGF.EmitBlock(BadTypeidBlock);
  1865. CGF.CGM.getCXXABI().EmitBadTypeidCall(CGF);
  1866. CGF.EmitBlock(EndBlock);
  1867. }
  1868. return CGF.CGM.getCXXABI().EmitTypeid(CGF, SrcRecordTy, ThisPtr,
  1869. StdTypeInfoPtrTy);
  1870. }
  1871. llvm::Value *CodeGenFunction::EmitCXXTypeidExpr(const CXXTypeidExpr *E) {
  1872. llvm::Type *StdTypeInfoPtrTy =
  1873. ConvertType(E->getType())->getPointerTo();
  1874. if (E->isTypeOperand()) {
  1875. llvm::Constant *TypeInfo =
  1876. CGM.GetAddrOfRTTIDescriptor(E->getTypeOperand(getContext()));
  1877. return Builder.CreateBitCast(TypeInfo, StdTypeInfoPtrTy);
  1878. }
  1879. // C++ [expr.typeid]p2:
  1880. // When typeid is applied to a glvalue expression whose type is a
  1881. // polymorphic class type, the result refers to a std::type_info object
  1882. // representing the type of the most derived object (that is, the dynamic
  1883. // type) to which the glvalue refers.
  1884. // If the operand is already most derived object, no need to look up vtable.
  1885. if (E->isPotentiallyEvaluated() && !E->isMostDerived(getContext()))
  1886. return EmitTypeidFromVTable(*this, E->getExprOperand(),
  1887. StdTypeInfoPtrTy);
  1888. QualType OperandTy = E->getExprOperand()->getType();
  1889. return Builder.CreateBitCast(CGM.GetAddrOfRTTIDescriptor(OperandTy),
  1890. StdTypeInfoPtrTy);
  1891. }
  1892. static llvm::Value *EmitDynamicCastToNull(CodeGenFunction &CGF,
  1893. QualType DestTy) {
  1894. llvm::Type *DestLTy = CGF.ConvertType(DestTy);
  1895. if (DestTy->isPointerType())
  1896. return llvm::Constant::getNullValue(DestLTy);
  1897. /// C++ [expr.dynamic.cast]p9:
  1898. /// A failed cast to reference type throws std::bad_cast
  1899. if (!CGF.CGM.getCXXABI().EmitBadCastCall(CGF))
  1900. return nullptr;
  1901. CGF.EmitBlock(CGF.createBasicBlock("dynamic_cast.end"));
  1902. return llvm::UndefValue::get(DestLTy);
  1903. }
  1904. llvm::Value *CodeGenFunction::EmitDynamicCast(Address ThisAddr,
  1905. const CXXDynamicCastExpr *DCE) {
  1906. CGM.EmitExplicitCastExprType(DCE, this);
  1907. QualType DestTy = DCE->getTypeAsWritten();
  1908. QualType SrcTy = DCE->getSubExpr()->getType();
  1909. // C++ [expr.dynamic.cast]p7:
  1910. // If T is "pointer to cv void," then the result is a pointer to the most
  1911. // derived object pointed to by v.
  1912. const PointerType *DestPTy = DestTy->getAs<PointerType>();
  1913. bool isDynamicCastToVoid;
  1914. QualType SrcRecordTy;
  1915. QualType DestRecordTy;
  1916. if (DestPTy) {
  1917. isDynamicCastToVoid = DestPTy->getPointeeType()->isVoidType();
  1918. SrcRecordTy = SrcTy->castAs<PointerType>()->getPointeeType();
  1919. DestRecordTy = DestPTy->getPointeeType();
  1920. } else {
  1921. isDynamicCastToVoid = false;
  1922. SrcRecordTy = SrcTy;
  1923. DestRecordTy = DestTy->castAs<ReferenceType>()->getPointeeType();
  1924. }
  1925. // C++ [class.cdtor]p5:
  1926. // If the operand of the dynamic_cast refers to the object under
  1927. // construction or destruction and the static type of the operand is not a
  1928. // pointer to or object of the constructor or destructor’s own class or one
  1929. // of its bases, the dynamic_cast results in undefined behavior.
  1930. EmitTypeCheck(TCK_DynamicOperation, DCE->getExprLoc(), ThisAddr.getPointer(),
  1931. SrcRecordTy);
  1932. if (DCE->isAlwaysNull())
  1933. if (llvm::Value *T = EmitDynamicCastToNull(*this, DestTy))
  1934. return T;
  1935. assert(SrcRecordTy->isRecordType() && "source type must be a record type!");
  1936. // C++ [expr.dynamic.cast]p4:
  1937. // If the value of v is a null pointer value in the pointer case, the result
  1938. // is the null pointer value of type T.
  1939. bool ShouldNullCheckSrcValue =
  1940. CGM.getCXXABI().shouldDynamicCastCallBeNullChecked(SrcTy->isPointerType(),
  1941. SrcRecordTy);
  1942. llvm::BasicBlock *CastNull = nullptr;
  1943. llvm::BasicBlock *CastNotNull = nullptr;
  1944. llvm::BasicBlock *CastEnd = createBasicBlock("dynamic_cast.end");
  1945. if (ShouldNullCheckSrcValue) {
  1946. CastNull = createBasicBlock("dynamic_cast.null");
  1947. CastNotNull = createBasicBlock("dynamic_cast.notnull");
  1948. llvm::Value *IsNull = Builder.CreateIsNull(ThisAddr.getPointer());
  1949. Builder.CreateCondBr(IsNull, CastNull, CastNotNull);
  1950. EmitBlock(CastNotNull);
  1951. }
  1952. llvm::Value *Value;
  1953. if (isDynamicCastToVoid) {
  1954. Value = CGM.getCXXABI().EmitDynamicCastToVoid(*this, ThisAddr, SrcRecordTy,
  1955. DestTy);
  1956. } else {
  1957. assert(DestRecordTy->isRecordType() &&
  1958. "destination type must be a record type!");
  1959. Value = CGM.getCXXABI().EmitDynamicCastCall(*this, ThisAddr, SrcRecordTy,
  1960. DestTy, DestRecordTy, CastEnd);
  1961. CastNotNull = Builder.GetInsertBlock();
  1962. }
  1963. if (ShouldNullCheckSrcValue) {
  1964. EmitBranch(CastEnd);
  1965. EmitBlock(CastNull);
  1966. EmitBranch(CastEnd);
  1967. }
  1968. EmitBlock(CastEnd);
  1969. if (ShouldNullCheckSrcValue) {
  1970. llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2);
  1971. PHI->addIncoming(Value, CastNotNull);
  1972. PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), CastNull);
  1973. Value = PHI;
  1974. }
  1975. return Value;
  1976. }