CGExprCXX.cpp 91 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324
  1. //===--- CGExprCXX.cpp - Emit LLVM Code for C++ expressions ---------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This contains code dealing with code generation of C++ expressions
  10. //
  11. //===----------------------------------------------------------------------===//
  12. #include "CGCUDARuntime.h"
  13. #include "CGCXXABI.h"
  14. #include "CGDebugInfo.h"
  15. #include "CGObjCRuntime.h"
  16. #include "CodeGenFunction.h"
  17. #include "ConstantEmitter.h"
  18. #include "TargetInfo.h"
  19. #include "clang/Basic/CodeGenOptions.h"
  20. #include "clang/CodeGen/CGFunctionInfo.h"
  21. #include "llvm/IR/Intrinsics.h"
  22. using namespace clang;
  23. using namespace CodeGen;
  24. namespace {
  25. struct MemberCallInfo {
  26. RequiredArgs ReqArgs;
  27. // Number of prefix arguments for the call. Ignores the `this` pointer.
  28. unsigned PrefixSize;
  29. };
  30. }
  31. static MemberCallInfo
  32. commonEmitCXXMemberOrOperatorCall(CodeGenFunction &CGF, GlobalDecl GD,
  33. llvm::Value *This, llvm::Value *ImplicitParam,
  34. QualType ImplicitParamTy, const CallExpr *CE,
  35. CallArgList &Args, CallArgList *RtlArgs) {
  36. auto *MD = cast<CXXMethodDecl>(GD.getDecl());
  37. assert(CE == nullptr || isa<CXXMemberCallExpr>(CE) ||
  38. isa<CXXOperatorCallExpr>(CE));
  39. assert(MD->isInstance() &&
  40. "Trying to emit a member or operator call expr on a static method!");
  41. // Push the this ptr.
  42. const CXXRecordDecl *RD =
  43. CGF.CGM.getCXXABI().getThisArgumentTypeForMethod(GD);
  44. Args.add(RValue::get(This), CGF.getTypes().DeriveThisType(RD, MD));
  45. // If there is an implicit parameter (e.g. VTT), emit it.
  46. if (ImplicitParam) {
  47. Args.add(RValue::get(ImplicitParam), ImplicitParamTy);
  48. }
  49. const FunctionProtoType *FPT = MD->getType()->castAs<FunctionProtoType>();
  50. RequiredArgs required = RequiredArgs::forPrototypePlus(FPT, Args.size());
  51. unsigned PrefixSize = Args.size() - 1;
  52. // And the rest of the call args.
  53. if (RtlArgs) {
  54. // Special case: if the caller emitted the arguments right-to-left already
  55. // (prior to emitting the *this argument), we're done. This happens for
  56. // assignment operators.
  57. Args.addFrom(*RtlArgs);
  58. } else if (CE) {
  59. // Special case: skip first argument of CXXOperatorCall (it is "this").
  60. unsigned ArgsToSkip = isa<CXXOperatorCallExpr>(CE) ? 1 : 0;
  61. CGF.EmitCallArgs(Args, FPT, drop_begin(CE->arguments(), ArgsToSkip),
  62. CE->getDirectCallee());
  63. } else {
  64. assert(
  65. FPT->getNumParams() == 0 &&
  66. "No CallExpr specified for function with non-zero number of arguments");
  67. }
  68. return {required, PrefixSize};
  69. }
  70. RValue CodeGenFunction::EmitCXXMemberOrOperatorCall(
  71. const CXXMethodDecl *MD, const CGCallee &Callee,
  72. ReturnValueSlot ReturnValue,
  73. llvm::Value *This, llvm::Value *ImplicitParam, QualType ImplicitParamTy,
  74. const CallExpr *CE, CallArgList *RtlArgs) {
  75. const FunctionProtoType *FPT = MD->getType()->castAs<FunctionProtoType>();
  76. CallArgList Args;
  77. MemberCallInfo CallInfo = commonEmitCXXMemberOrOperatorCall(
  78. *this, MD, This, ImplicitParam, ImplicitParamTy, CE, Args, RtlArgs);
  79. auto &FnInfo = CGM.getTypes().arrangeCXXMethodCall(
  80. Args, FPT, CallInfo.ReqArgs, CallInfo.PrefixSize);
  81. return EmitCall(FnInfo, Callee, ReturnValue, Args, nullptr,
  82. CE && CE == MustTailCall,
  83. CE ? CE->getExprLoc() : SourceLocation());
  84. }
  85. RValue CodeGenFunction::EmitCXXDestructorCall(
  86. GlobalDecl Dtor, const CGCallee &Callee, llvm::Value *This, QualType ThisTy,
  87. llvm::Value *ImplicitParam, QualType ImplicitParamTy, const CallExpr *CE) {
  88. const CXXMethodDecl *DtorDecl = cast<CXXMethodDecl>(Dtor.getDecl());
  89. assert(!ThisTy.isNull());
  90. assert(ThisTy->getAsCXXRecordDecl() == DtorDecl->getParent() &&
  91. "Pointer/Object mixup");
  92. LangAS SrcAS = ThisTy.getAddressSpace();
  93. LangAS DstAS = DtorDecl->getMethodQualifiers().getAddressSpace();
  94. if (SrcAS != DstAS) {
  95. QualType DstTy = DtorDecl->getThisType();
  96. llvm::Type *NewType = CGM.getTypes().ConvertType(DstTy);
  97. This = getTargetHooks().performAddrSpaceCast(*this, This, SrcAS, DstAS,
  98. NewType);
  99. }
  100. CallArgList Args;
  101. commonEmitCXXMemberOrOperatorCall(*this, Dtor, This, ImplicitParam,
  102. ImplicitParamTy, CE, Args, nullptr);
  103. return EmitCall(CGM.getTypes().arrangeCXXStructorDeclaration(Dtor), Callee,
  104. ReturnValueSlot(), Args, nullptr, CE && CE == MustTailCall,
  105. CE ? CE->getExprLoc() : SourceLocation{});
  106. }
  107. RValue CodeGenFunction::EmitCXXPseudoDestructorExpr(
  108. const CXXPseudoDestructorExpr *E) {
  109. QualType DestroyedType = E->getDestroyedType();
  110. if (DestroyedType.hasStrongOrWeakObjCLifetime()) {
  111. // Automatic Reference Counting:
  112. // If the pseudo-expression names a retainable object with weak or
  113. // strong lifetime, the object shall be released.
  114. Expr *BaseExpr = E->getBase();
  115. Address BaseValue = Address::invalid();
  116. Qualifiers BaseQuals;
  117. // If this is s.x, emit s as an lvalue. If it is s->x, emit s as a scalar.
  118. if (E->isArrow()) {
  119. BaseValue = EmitPointerWithAlignment(BaseExpr);
  120. const auto *PTy = BaseExpr->getType()->castAs<PointerType>();
  121. BaseQuals = PTy->getPointeeType().getQualifiers();
  122. } else {
  123. LValue BaseLV = EmitLValue(BaseExpr);
  124. BaseValue = BaseLV.getAddress(*this);
  125. QualType BaseTy = BaseExpr->getType();
  126. BaseQuals = BaseTy.getQualifiers();
  127. }
  128. switch (DestroyedType.getObjCLifetime()) {
  129. case Qualifiers::OCL_None:
  130. case Qualifiers::OCL_ExplicitNone:
  131. case Qualifiers::OCL_Autoreleasing:
  132. break;
  133. case Qualifiers::OCL_Strong:
  134. EmitARCRelease(Builder.CreateLoad(BaseValue,
  135. DestroyedType.isVolatileQualified()),
  136. ARCPreciseLifetime);
  137. break;
  138. case Qualifiers::OCL_Weak:
  139. EmitARCDestroyWeak(BaseValue);
  140. break;
  141. }
  142. } else {
  143. // C++ [expr.pseudo]p1:
  144. // The result shall only be used as the operand for the function call
  145. // operator (), and the result of such a call has type void. The only
  146. // effect is the evaluation of the postfix-expression before the dot or
  147. // arrow.
  148. EmitIgnoredExpr(E->getBase());
  149. }
  150. return RValue::get(nullptr);
  151. }
  152. static CXXRecordDecl *getCXXRecord(const Expr *E) {
  153. QualType T = E->getType();
  154. if (const PointerType *PTy = T->getAs<PointerType>())
  155. T = PTy->getPointeeType();
  156. const RecordType *Ty = T->castAs<RecordType>();
  157. return cast<CXXRecordDecl>(Ty->getDecl());
  158. }
  159. // Note: This function also emit constructor calls to support a MSVC
  160. // extensions allowing explicit constructor function call.
  161. RValue CodeGenFunction::EmitCXXMemberCallExpr(const CXXMemberCallExpr *CE,
  162. ReturnValueSlot ReturnValue) {
  163. const Expr *callee = CE->getCallee()->IgnoreParens();
  164. if (isa<BinaryOperator>(callee))
  165. return EmitCXXMemberPointerCallExpr(CE, ReturnValue);
  166. const MemberExpr *ME = cast<MemberExpr>(callee);
  167. const CXXMethodDecl *MD = cast<CXXMethodDecl>(ME->getMemberDecl());
  168. if (MD->isStatic()) {
  169. // The method is static, emit it as we would a regular call.
  170. CGCallee callee =
  171. CGCallee::forDirect(CGM.GetAddrOfFunction(MD), GlobalDecl(MD));
  172. return EmitCall(getContext().getPointerType(MD->getType()), callee, CE,
  173. ReturnValue);
  174. }
  175. bool HasQualifier = ME->hasQualifier();
  176. NestedNameSpecifier *Qualifier = HasQualifier ? ME->getQualifier() : nullptr;
  177. bool IsArrow = ME->isArrow();
  178. const Expr *Base = ME->getBase();
  179. return EmitCXXMemberOrOperatorMemberCallExpr(
  180. CE, MD, ReturnValue, HasQualifier, Qualifier, IsArrow, Base);
  181. }
  182. RValue CodeGenFunction::EmitCXXMemberOrOperatorMemberCallExpr(
  183. const CallExpr *CE, const CXXMethodDecl *MD, ReturnValueSlot ReturnValue,
  184. bool HasQualifier, NestedNameSpecifier *Qualifier, bool IsArrow,
  185. const Expr *Base) {
  186. assert(isa<CXXMemberCallExpr>(CE) || isa<CXXOperatorCallExpr>(CE));
  187. // Compute the object pointer.
  188. bool CanUseVirtualCall = MD->isVirtual() && !HasQualifier;
  189. const CXXMethodDecl *DevirtualizedMethod = nullptr;
  190. if (CanUseVirtualCall &&
  191. MD->getDevirtualizedMethod(Base, getLangOpts().AppleKext)) {
  192. const CXXRecordDecl *BestDynamicDecl = Base->getBestDynamicClassType();
  193. DevirtualizedMethod = MD->getCorrespondingMethodInClass(BestDynamicDecl);
  194. assert(DevirtualizedMethod);
  195. const CXXRecordDecl *DevirtualizedClass = DevirtualizedMethod->getParent();
  196. const Expr *Inner = Base->IgnoreParenBaseCasts();
  197. if (DevirtualizedMethod->getReturnType().getCanonicalType() !=
  198. MD->getReturnType().getCanonicalType())
  199. // If the return types are not the same, this might be a case where more
  200. // code needs to run to compensate for it. For example, the derived
  201. // method might return a type that inherits form from the return
  202. // type of MD and has a prefix.
  203. // For now we just avoid devirtualizing these covariant cases.
  204. DevirtualizedMethod = nullptr;
  205. else if (getCXXRecord(Inner) == DevirtualizedClass)
  206. // If the class of the Inner expression is where the dynamic method
  207. // is defined, build the this pointer from it.
  208. Base = Inner;
  209. else if (getCXXRecord(Base) != DevirtualizedClass) {
  210. // If the method is defined in a class that is not the best dynamic
  211. // one or the one of the full expression, we would have to build
  212. // a derived-to-base cast to compute the correct this pointer, but
  213. // we don't have support for that yet, so do a virtual call.
  214. DevirtualizedMethod = nullptr;
  215. }
  216. }
  217. bool TrivialForCodegen =
  218. MD->isTrivial() || (MD->isDefaulted() && MD->getParent()->isUnion());
  219. bool TrivialAssignment =
  220. TrivialForCodegen &&
  221. (MD->isCopyAssignmentOperator() || MD->isMoveAssignmentOperator()) &&
  222. !MD->getParent()->mayInsertExtraPadding();
  223. // C++17 demands that we evaluate the RHS of a (possibly-compound) assignment
  224. // operator before the LHS.
  225. CallArgList RtlArgStorage;
  226. CallArgList *RtlArgs = nullptr;
  227. LValue TrivialAssignmentRHS;
  228. if (auto *OCE = dyn_cast<CXXOperatorCallExpr>(CE)) {
  229. if (OCE->isAssignmentOp()) {
  230. if (TrivialAssignment) {
  231. TrivialAssignmentRHS = EmitLValue(CE->getArg(1));
  232. } else {
  233. RtlArgs = &RtlArgStorage;
  234. EmitCallArgs(*RtlArgs, MD->getType()->castAs<FunctionProtoType>(),
  235. drop_begin(CE->arguments(), 1), CE->getDirectCallee(),
  236. /*ParamsToSkip*/0, EvaluationOrder::ForceRightToLeft);
  237. }
  238. }
  239. }
  240. LValue This;
  241. if (IsArrow) {
  242. LValueBaseInfo BaseInfo;
  243. TBAAAccessInfo TBAAInfo;
  244. Address ThisValue = EmitPointerWithAlignment(Base, &BaseInfo, &TBAAInfo);
  245. This = MakeAddrLValue(ThisValue, Base->getType(), BaseInfo, TBAAInfo);
  246. } else {
  247. This = EmitLValue(Base);
  248. }
  249. if (const CXXConstructorDecl *Ctor = dyn_cast<CXXConstructorDecl>(MD)) {
  250. // This is the MSVC p->Ctor::Ctor(...) extension. We assume that's
  251. // constructing a new complete object of type Ctor.
  252. assert(!RtlArgs);
  253. assert(ReturnValue.isNull() && "Constructor shouldn't have return value");
  254. CallArgList Args;
  255. commonEmitCXXMemberOrOperatorCall(
  256. *this, {Ctor, Ctor_Complete}, This.getPointer(*this),
  257. /*ImplicitParam=*/nullptr,
  258. /*ImplicitParamTy=*/QualType(), CE, Args, nullptr);
  259. EmitCXXConstructorCall(Ctor, Ctor_Complete, /*ForVirtualBase=*/false,
  260. /*Delegating=*/false, This.getAddress(*this), Args,
  261. AggValueSlot::DoesNotOverlap, CE->getExprLoc(),
  262. /*NewPointerIsChecked=*/false);
  263. return RValue::get(nullptr);
  264. }
  265. if (TrivialForCodegen) {
  266. if (isa<CXXDestructorDecl>(MD))
  267. return RValue::get(nullptr);
  268. if (TrivialAssignment) {
  269. // We don't like to generate the trivial copy/move assignment operator
  270. // when it isn't necessary; just produce the proper effect here.
  271. // It's important that we use the result of EmitLValue here rather than
  272. // emitting call arguments, in order to preserve TBAA information from
  273. // the RHS.
  274. LValue RHS = isa<CXXOperatorCallExpr>(CE)
  275. ? TrivialAssignmentRHS
  276. : EmitLValue(*CE->arg_begin());
  277. EmitAggregateAssign(This, RHS, CE->getType());
  278. return RValue::get(This.getPointer(*this));
  279. }
  280. assert(MD->getParent()->mayInsertExtraPadding() &&
  281. "unknown trivial member function");
  282. }
  283. // Compute the function type we're calling.
  284. const CXXMethodDecl *CalleeDecl =
  285. DevirtualizedMethod ? DevirtualizedMethod : MD;
  286. const CGFunctionInfo *FInfo = nullptr;
  287. if (const auto *Dtor = dyn_cast<CXXDestructorDecl>(CalleeDecl))
  288. FInfo = &CGM.getTypes().arrangeCXXStructorDeclaration(
  289. GlobalDecl(Dtor, Dtor_Complete));
  290. else
  291. FInfo = &CGM.getTypes().arrangeCXXMethodDeclaration(CalleeDecl);
  292. llvm::FunctionType *Ty = CGM.getTypes().GetFunctionType(*FInfo);
  293. // C++11 [class.mfct.non-static]p2:
  294. // If a non-static member function of a class X is called for an object that
  295. // is not of type X, or of a type derived from X, the behavior is undefined.
  296. SourceLocation CallLoc;
  297. ASTContext &C = getContext();
  298. if (CE)
  299. CallLoc = CE->getExprLoc();
  300. SanitizerSet SkippedChecks;
  301. if (const auto *CMCE = dyn_cast<CXXMemberCallExpr>(CE)) {
  302. auto *IOA = CMCE->getImplicitObjectArgument();
  303. bool IsImplicitObjectCXXThis = IsWrappedCXXThis(IOA);
  304. if (IsImplicitObjectCXXThis)
  305. SkippedChecks.set(SanitizerKind::Alignment, true);
  306. if (IsImplicitObjectCXXThis || isa<DeclRefExpr>(IOA))
  307. SkippedChecks.set(SanitizerKind::Null, true);
  308. }
  309. EmitTypeCheck(CodeGenFunction::TCK_MemberCall, CallLoc,
  310. This.getPointer(*this),
  311. C.getRecordType(CalleeDecl->getParent()),
  312. /*Alignment=*/CharUnits::Zero(), SkippedChecks);
  313. // C++ [class.virtual]p12:
  314. // Explicit qualification with the scope operator (5.1) suppresses the
  315. // virtual call mechanism.
  316. //
  317. // We also don't emit a virtual call if the base expression has a record type
  318. // because then we know what the type is.
  319. bool UseVirtualCall = CanUseVirtualCall && !DevirtualizedMethod;
  320. if (const CXXDestructorDecl *Dtor = dyn_cast<CXXDestructorDecl>(CalleeDecl)) {
  321. assert(CE->arg_begin() == CE->arg_end() &&
  322. "Destructor shouldn't have explicit parameters");
  323. assert(ReturnValue.isNull() && "Destructor shouldn't have return value");
  324. if (UseVirtualCall) {
  325. CGM.getCXXABI().EmitVirtualDestructorCall(*this, Dtor, Dtor_Complete,
  326. This.getAddress(*this),
  327. cast<CXXMemberCallExpr>(CE));
  328. } else {
  329. GlobalDecl GD(Dtor, Dtor_Complete);
  330. CGCallee Callee;
  331. if (getLangOpts().AppleKext && Dtor->isVirtual() && HasQualifier)
  332. Callee = BuildAppleKextVirtualCall(Dtor, Qualifier, Ty);
  333. else if (!DevirtualizedMethod)
  334. Callee =
  335. CGCallee::forDirect(CGM.getAddrOfCXXStructor(GD, FInfo, Ty), GD);
  336. else {
  337. Callee = CGCallee::forDirect(CGM.GetAddrOfFunction(GD, Ty), GD);
  338. }
  339. QualType ThisTy =
  340. IsArrow ? Base->getType()->getPointeeType() : Base->getType();
  341. EmitCXXDestructorCall(GD, Callee, This.getPointer(*this), ThisTy,
  342. /*ImplicitParam=*/nullptr,
  343. /*ImplicitParamTy=*/QualType(), CE);
  344. }
  345. return RValue::get(nullptr);
  346. }
  347. // FIXME: Uses of 'MD' past this point need to be audited. We may need to use
  348. // 'CalleeDecl' instead.
  349. CGCallee Callee;
  350. if (UseVirtualCall) {
  351. Callee = CGCallee::forVirtual(CE, MD, This.getAddress(*this), Ty);
  352. } else {
  353. if (SanOpts.has(SanitizerKind::CFINVCall) &&
  354. MD->getParent()->isDynamicClass()) {
  355. llvm::Value *VTable;
  356. const CXXRecordDecl *RD;
  357. std::tie(VTable, RD) = CGM.getCXXABI().LoadVTablePtr(
  358. *this, This.getAddress(*this), CalleeDecl->getParent());
  359. EmitVTablePtrCheckForCall(RD, VTable, CFITCK_NVCall, CE->getBeginLoc());
  360. }
  361. if (getLangOpts().AppleKext && MD->isVirtual() && HasQualifier)
  362. Callee = BuildAppleKextVirtualCall(MD, Qualifier, Ty);
  363. else if (!DevirtualizedMethod)
  364. Callee =
  365. CGCallee::forDirect(CGM.GetAddrOfFunction(MD, Ty), GlobalDecl(MD));
  366. else {
  367. Callee =
  368. CGCallee::forDirect(CGM.GetAddrOfFunction(DevirtualizedMethod, Ty),
  369. GlobalDecl(DevirtualizedMethod));
  370. }
  371. }
  372. if (MD->isVirtual()) {
  373. Address NewThisAddr =
  374. CGM.getCXXABI().adjustThisArgumentForVirtualFunctionCall(
  375. *this, CalleeDecl, This.getAddress(*this), UseVirtualCall);
  376. This.setAddress(NewThisAddr);
  377. }
  378. return EmitCXXMemberOrOperatorCall(
  379. CalleeDecl, Callee, ReturnValue, This.getPointer(*this),
  380. /*ImplicitParam=*/nullptr, QualType(), CE, RtlArgs);
  381. }
  382. RValue
  383. CodeGenFunction::EmitCXXMemberPointerCallExpr(const CXXMemberCallExpr *E,
  384. ReturnValueSlot ReturnValue) {
  385. const BinaryOperator *BO =
  386. cast<BinaryOperator>(E->getCallee()->IgnoreParens());
  387. const Expr *BaseExpr = BO->getLHS();
  388. const Expr *MemFnExpr = BO->getRHS();
  389. const auto *MPT = MemFnExpr->getType()->castAs<MemberPointerType>();
  390. const auto *FPT = MPT->getPointeeType()->castAs<FunctionProtoType>();
  391. const auto *RD =
  392. cast<CXXRecordDecl>(MPT->getClass()->castAs<RecordType>()->getDecl());
  393. // Emit the 'this' pointer.
  394. Address This = Address::invalid();
  395. if (BO->getOpcode() == BO_PtrMemI)
  396. This = EmitPointerWithAlignment(BaseExpr);
  397. else
  398. This = EmitLValue(BaseExpr).getAddress(*this);
  399. EmitTypeCheck(TCK_MemberCall, E->getExprLoc(), This.getPointer(),
  400. QualType(MPT->getClass(), 0));
  401. // Get the member function pointer.
  402. llvm::Value *MemFnPtr = EmitScalarExpr(MemFnExpr);
  403. // Ask the ABI to load the callee. Note that This is modified.
  404. llvm::Value *ThisPtrForCall = nullptr;
  405. CGCallee Callee =
  406. CGM.getCXXABI().EmitLoadOfMemberFunctionPointer(*this, BO, This,
  407. ThisPtrForCall, MemFnPtr, MPT);
  408. CallArgList Args;
  409. QualType ThisType =
  410. getContext().getPointerType(getContext().getTagDeclType(RD));
  411. // Push the this ptr.
  412. Args.add(RValue::get(ThisPtrForCall), ThisType);
  413. RequiredArgs required = RequiredArgs::forPrototypePlus(FPT, 1);
  414. // And the rest of the call args
  415. EmitCallArgs(Args, FPT, E->arguments());
  416. return EmitCall(CGM.getTypes().arrangeCXXMethodCall(Args, FPT, required,
  417. /*PrefixSize=*/0),
  418. Callee, ReturnValue, Args, nullptr, E == MustTailCall,
  419. E->getExprLoc());
  420. }
  421. RValue
  422. CodeGenFunction::EmitCXXOperatorMemberCallExpr(const CXXOperatorCallExpr *E,
  423. const CXXMethodDecl *MD,
  424. ReturnValueSlot ReturnValue) {
  425. assert(MD->isInstance() &&
  426. "Trying to emit a member call expr on a static method!");
  427. return EmitCXXMemberOrOperatorMemberCallExpr(
  428. E, MD, ReturnValue, /*HasQualifier=*/false, /*Qualifier=*/nullptr,
  429. /*IsArrow=*/false, E->getArg(0));
  430. }
  431. RValue CodeGenFunction::EmitCUDAKernelCallExpr(const CUDAKernelCallExpr *E,
  432. ReturnValueSlot ReturnValue) {
  433. return CGM.getCUDARuntime().EmitCUDAKernelCallExpr(*this, E, ReturnValue);
  434. }
  435. static void EmitNullBaseClassInitialization(CodeGenFunction &CGF,
  436. Address DestPtr,
  437. const CXXRecordDecl *Base) {
  438. if (Base->isEmpty())
  439. return;
  440. DestPtr = CGF.Builder.CreateElementBitCast(DestPtr, CGF.Int8Ty);
  441. const ASTRecordLayout &Layout = CGF.getContext().getASTRecordLayout(Base);
  442. CharUnits NVSize = Layout.getNonVirtualSize();
  443. // We cannot simply zero-initialize the entire base sub-object if vbptrs are
  444. // present, they are initialized by the most derived class before calling the
  445. // constructor.
  446. SmallVector<std::pair<CharUnits, CharUnits>, 1> Stores;
  447. Stores.emplace_back(CharUnits::Zero(), NVSize);
  448. // Each store is split by the existence of a vbptr.
  449. CharUnits VBPtrWidth = CGF.getPointerSize();
  450. std::vector<CharUnits> VBPtrOffsets =
  451. CGF.CGM.getCXXABI().getVBPtrOffsets(Base);
  452. for (CharUnits VBPtrOffset : VBPtrOffsets) {
  453. // Stop before we hit any virtual base pointers located in virtual bases.
  454. if (VBPtrOffset >= NVSize)
  455. break;
  456. std::pair<CharUnits, CharUnits> LastStore = Stores.pop_back_val();
  457. CharUnits LastStoreOffset = LastStore.first;
  458. CharUnits LastStoreSize = LastStore.second;
  459. CharUnits SplitBeforeOffset = LastStoreOffset;
  460. CharUnits SplitBeforeSize = VBPtrOffset - SplitBeforeOffset;
  461. assert(!SplitBeforeSize.isNegative() && "negative store size!");
  462. if (!SplitBeforeSize.isZero())
  463. Stores.emplace_back(SplitBeforeOffset, SplitBeforeSize);
  464. CharUnits SplitAfterOffset = VBPtrOffset + VBPtrWidth;
  465. CharUnits SplitAfterSize = LastStoreSize - SplitAfterOffset;
  466. assert(!SplitAfterSize.isNegative() && "negative store size!");
  467. if (!SplitAfterSize.isZero())
  468. Stores.emplace_back(SplitAfterOffset, SplitAfterSize);
  469. }
  470. // If the type contains a pointer to data member we can't memset it to zero.
  471. // Instead, create a null constant and copy it to the destination.
  472. // TODO: there are other patterns besides zero that we can usefully memset,
  473. // like -1, which happens to be the pattern used by member-pointers.
  474. // TODO: isZeroInitializable can be over-conservative in the case where a
  475. // virtual base contains a member pointer.
  476. llvm::Constant *NullConstantForBase = CGF.CGM.EmitNullConstantForBase(Base);
  477. if (!NullConstantForBase->isNullValue()) {
  478. llvm::GlobalVariable *NullVariable = new llvm::GlobalVariable(
  479. CGF.CGM.getModule(), NullConstantForBase->getType(),
  480. /*isConstant=*/true, llvm::GlobalVariable::PrivateLinkage,
  481. NullConstantForBase, Twine());
  482. CharUnits Align =
  483. std::max(Layout.getNonVirtualAlignment(), DestPtr.getAlignment());
  484. NullVariable->setAlignment(Align.getAsAlign());
  485. Address SrcPtr =
  486. Address(CGF.EmitCastToVoidPtr(NullVariable), CGF.Int8Ty, Align);
  487. // Get and call the appropriate llvm.memcpy overload.
  488. for (std::pair<CharUnits, CharUnits> Store : Stores) {
  489. CharUnits StoreOffset = Store.first;
  490. CharUnits StoreSize = Store.second;
  491. llvm::Value *StoreSizeVal = CGF.CGM.getSize(StoreSize);
  492. CGF.Builder.CreateMemCpy(
  493. CGF.Builder.CreateConstInBoundsByteGEP(DestPtr, StoreOffset),
  494. CGF.Builder.CreateConstInBoundsByteGEP(SrcPtr, StoreOffset),
  495. StoreSizeVal);
  496. }
  497. // Otherwise, just memset the whole thing to zero. This is legal
  498. // because in LLVM, all default initializers (other than the ones we just
  499. // handled above) are guaranteed to have a bit pattern of all zeros.
  500. } else {
  501. for (std::pair<CharUnits, CharUnits> Store : Stores) {
  502. CharUnits StoreOffset = Store.first;
  503. CharUnits StoreSize = Store.second;
  504. llvm::Value *StoreSizeVal = CGF.CGM.getSize(StoreSize);
  505. CGF.Builder.CreateMemSet(
  506. CGF.Builder.CreateConstInBoundsByteGEP(DestPtr, StoreOffset),
  507. CGF.Builder.getInt8(0), StoreSizeVal);
  508. }
  509. }
  510. }
  511. void
  512. CodeGenFunction::EmitCXXConstructExpr(const CXXConstructExpr *E,
  513. AggValueSlot Dest) {
  514. assert(!Dest.isIgnored() && "Must have a destination!");
  515. const CXXConstructorDecl *CD = E->getConstructor();
  516. // If we require zero initialization before (or instead of) calling the
  517. // constructor, as can be the case with a non-user-provided default
  518. // constructor, emit the zero initialization now, unless destination is
  519. // already zeroed.
  520. if (E->requiresZeroInitialization() && !Dest.isZeroed()) {
  521. switch (E->getConstructionKind()) {
  522. case CXXConstructExpr::CK_Delegating:
  523. case CXXConstructExpr::CK_Complete:
  524. EmitNullInitialization(Dest.getAddress(), E->getType());
  525. break;
  526. case CXXConstructExpr::CK_VirtualBase:
  527. case CXXConstructExpr::CK_NonVirtualBase:
  528. EmitNullBaseClassInitialization(*this, Dest.getAddress(),
  529. CD->getParent());
  530. break;
  531. }
  532. }
  533. // If this is a call to a trivial default constructor, do nothing.
  534. if (CD->isTrivial() && CD->isDefaultConstructor())
  535. return;
  536. // Elide the constructor if we're constructing from a temporary.
  537. if (getLangOpts().ElideConstructors && E->isElidable()) {
  538. // FIXME: This only handles the simplest case, where the source object
  539. // is passed directly as the first argument to the constructor.
  540. // This should also handle stepping though implicit casts and
  541. // conversion sequences which involve two steps, with a
  542. // conversion operator followed by a converting constructor.
  543. const Expr *SrcObj = E->getArg(0);
  544. assert(SrcObj->isTemporaryObject(getContext(), CD->getParent()));
  545. assert(
  546. getContext().hasSameUnqualifiedType(E->getType(), SrcObj->getType()));
  547. EmitAggExpr(SrcObj, Dest);
  548. return;
  549. }
  550. if (const ArrayType *arrayType
  551. = getContext().getAsArrayType(E->getType())) {
  552. EmitCXXAggrConstructorCall(CD, arrayType, Dest.getAddress(), E,
  553. Dest.isSanitizerChecked());
  554. } else {
  555. CXXCtorType Type = Ctor_Complete;
  556. bool ForVirtualBase = false;
  557. bool Delegating = false;
  558. switch (E->getConstructionKind()) {
  559. case CXXConstructExpr::CK_Delegating:
  560. // We should be emitting a constructor; GlobalDecl will assert this
  561. Type = CurGD.getCtorType();
  562. Delegating = true;
  563. break;
  564. case CXXConstructExpr::CK_Complete:
  565. Type = Ctor_Complete;
  566. break;
  567. case CXXConstructExpr::CK_VirtualBase:
  568. ForVirtualBase = true;
  569. [[fallthrough]];
  570. case CXXConstructExpr::CK_NonVirtualBase:
  571. Type = Ctor_Base;
  572. }
  573. // Call the constructor.
  574. EmitCXXConstructorCall(CD, Type, ForVirtualBase, Delegating, Dest, E);
  575. }
  576. }
  577. void CodeGenFunction::EmitSynthesizedCXXCopyCtor(Address Dest, Address Src,
  578. const Expr *Exp) {
  579. if (const ExprWithCleanups *E = dyn_cast<ExprWithCleanups>(Exp))
  580. Exp = E->getSubExpr();
  581. assert(isa<CXXConstructExpr>(Exp) &&
  582. "EmitSynthesizedCXXCopyCtor - unknown copy ctor expr");
  583. const CXXConstructExpr* E = cast<CXXConstructExpr>(Exp);
  584. const CXXConstructorDecl *CD = E->getConstructor();
  585. RunCleanupsScope Scope(*this);
  586. // If we require zero initialization before (or instead of) calling the
  587. // constructor, as can be the case with a non-user-provided default
  588. // constructor, emit the zero initialization now.
  589. // FIXME. Do I still need this for a copy ctor synthesis?
  590. if (E->requiresZeroInitialization())
  591. EmitNullInitialization(Dest, E->getType());
  592. assert(!getContext().getAsConstantArrayType(E->getType())
  593. && "EmitSynthesizedCXXCopyCtor - Copied-in Array");
  594. EmitSynthesizedCXXCopyCtorCall(CD, Dest, Src, E);
  595. }
  596. static CharUnits CalculateCookiePadding(CodeGenFunction &CGF,
  597. const CXXNewExpr *E) {
  598. if (!E->isArray())
  599. return CharUnits::Zero();
  600. // No cookie is required if the operator new[] being used is the
  601. // reserved placement operator new[].
  602. if (E->getOperatorNew()->isReservedGlobalPlacementOperator())
  603. return CharUnits::Zero();
  604. return CGF.CGM.getCXXABI().GetArrayCookieSize(E);
  605. }
  606. static llvm::Value *EmitCXXNewAllocSize(CodeGenFunction &CGF,
  607. const CXXNewExpr *e,
  608. unsigned minElements,
  609. llvm::Value *&numElements,
  610. llvm::Value *&sizeWithoutCookie) {
  611. QualType type = e->getAllocatedType();
  612. if (!e->isArray()) {
  613. CharUnits typeSize = CGF.getContext().getTypeSizeInChars(type);
  614. sizeWithoutCookie
  615. = llvm::ConstantInt::get(CGF.SizeTy, typeSize.getQuantity());
  616. return sizeWithoutCookie;
  617. }
  618. // The width of size_t.
  619. unsigned sizeWidth = CGF.SizeTy->getBitWidth();
  620. // Figure out the cookie size.
  621. llvm::APInt cookieSize(sizeWidth,
  622. CalculateCookiePadding(CGF, e).getQuantity());
  623. // Emit the array size expression.
  624. // We multiply the size of all dimensions for NumElements.
  625. // e.g for 'int[2][3]', ElemType is 'int' and NumElements is 6.
  626. numElements =
  627. ConstantEmitter(CGF).tryEmitAbstract(*e->getArraySize(), e->getType());
  628. if (!numElements)
  629. numElements = CGF.EmitScalarExpr(*e->getArraySize());
  630. assert(isa<llvm::IntegerType>(numElements->getType()));
  631. // The number of elements can be have an arbitrary integer type;
  632. // essentially, we need to multiply it by a constant factor, add a
  633. // cookie size, and verify that the result is representable as a
  634. // size_t. That's just a gloss, though, and it's wrong in one
  635. // important way: if the count is negative, it's an error even if
  636. // the cookie size would bring the total size >= 0.
  637. bool isSigned
  638. = (*e->getArraySize())->getType()->isSignedIntegerOrEnumerationType();
  639. llvm::IntegerType *numElementsType
  640. = cast<llvm::IntegerType>(numElements->getType());
  641. unsigned numElementsWidth = numElementsType->getBitWidth();
  642. // Compute the constant factor.
  643. llvm::APInt arraySizeMultiplier(sizeWidth, 1);
  644. while (const ConstantArrayType *CAT
  645. = CGF.getContext().getAsConstantArrayType(type)) {
  646. type = CAT->getElementType();
  647. arraySizeMultiplier *= CAT->getSize();
  648. }
  649. CharUnits typeSize = CGF.getContext().getTypeSizeInChars(type);
  650. llvm::APInt typeSizeMultiplier(sizeWidth, typeSize.getQuantity());
  651. typeSizeMultiplier *= arraySizeMultiplier;
  652. // This will be a size_t.
  653. llvm::Value *size;
  654. // If someone is doing 'new int[42]' there is no need to do a dynamic check.
  655. // Don't bloat the -O0 code.
  656. if (llvm::ConstantInt *numElementsC =
  657. dyn_cast<llvm::ConstantInt>(numElements)) {
  658. const llvm::APInt &count = numElementsC->getValue();
  659. bool hasAnyOverflow = false;
  660. // If 'count' was a negative number, it's an overflow.
  661. if (isSigned && count.isNegative())
  662. hasAnyOverflow = true;
  663. // We want to do all this arithmetic in size_t. If numElements is
  664. // wider than that, check whether it's already too big, and if so,
  665. // overflow.
  666. else if (numElementsWidth > sizeWidth &&
  667. numElementsWidth - sizeWidth > count.countLeadingZeros())
  668. hasAnyOverflow = true;
  669. // Okay, compute a count at the right width.
  670. llvm::APInt adjustedCount = count.zextOrTrunc(sizeWidth);
  671. // If there is a brace-initializer, we cannot allocate fewer elements than
  672. // there are initializers. If we do, that's treated like an overflow.
  673. if (adjustedCount.ult(minElements))
  674. hasAnyOverflow = true;
  675. // Scale numElements by that. This might overflow, but we don't
  676. // care because it only overflows if allocationSize does, too, and
  677. // if that overflows then we shouldn't use this.
  678. numElements = llvm::ConstantInt::get(CGF.SizeTy,
  679. adjustedCount * arraySizeMultiplier);
  680. // Compute the size before cookie, and track whether it overflowed.
  681. bool overflow;
  682. llvm::APInt allocationSize
  683. = adjustedCount.umul_ov(typeSizeMultiplier, overflow);
  684. hasAnyOverflow |= overflow;
  685. // Add in the cookie, and check whether it's overflowed.
  686. if (cookieSize != 0) {
  687. // Save the current size without a cookie. This shouldn't be
  688. // used if there was overflow.
  689. sizeWithoutCookie = llvm::ConstantInt::get(CGF.SizeTy, allocationSize);
  690. allocationSize = allocationSize.uadd_ov(cookieSize, overflow);
  691. hasAnyOverflow |= overflow;
  692. }
  693. // On overflow, produce a -1 so operator new will fail.
  694. if (hasAnyOverflow) {
  695. size = llvm::Constant::getAllOnesValue(CGF.SizeTy);
  696. } else {
  697. size = llvm::ConstantInt::get(CGF.SizeTy, allocationSize);
  698. }
  699. // Otherwise, we might need to use the overflow intrinsics.
  700. } else {
  701. // There are up to five conditions we need to test for:
  702. // 1) if isSigned, we need to check whether numElements is negative;
  703. // 2) if numElementsWidth > sizeWidth, we need to check whether
  704. // numElements is larger than something representable in size_t;
  705. // 3) if minElements > 0, we need to check whether numElements is smaller
  706. // than that.
  707. // 4) we need to compute
  708. // sizeWithoutCookie := numElements * typeSizeMultiplier
  709. // and check whether it overflows; and
  710. // 5) if we need a cookie, we need to compute
  711. // size := sizeWithoutCookie + cookieSize
  712. // and check whether it overflows.
  713. llvm::Value *hasOverflow = nullptr;
  714. // If numElementsWidth > sizeWidth, then one way or another, we're
  715. // going to have to do a comparison for (2), and this happens to
  716. // take care of (1), too.
  717. if (numElementsWidth > sizeWidth) {
  718. llvm::APInt threshold(numElementsWidth, 1);
  719. threshold <<= sizeWidth;
  720. llvm::Value *thresholdV
  721. = llvm::ConstantInt::get(numElementsType, threshold);
  722. hasOverflow = CGF.Builder.CreateICmpUGE(numElements, thresholdV);
  723. numElements = CGF.Builder.CreateTrunc(numElements, CGF.SizeTy);
  724. // Otherwise, if we're signed, we want to sext up to size_t.
  725. } else if (isSigned) {
  726. if (numElementsWidth < sizeWidth)
  727. numElements = CGF.Builder.CreateSExt(numElements, CGF.SizeTy);
  728. // If there's a non-1 type size multiplier, then we can do the
  729. // signedness check at the same time as we do the multiply
  730. // because a negative number times anything will cause an
  731. // unsigned overflow. Otherwise, we have to do it here. But at least
  732. // in this case, we can subsume the >= minElements check.
  733. if (typeSizeMultiplier == 1)
  734. hasOverflow = CGF.Builder.CreateICmpSLT(numElements,
  735. llvm::ConstantInt::get(CGF.SizeTy, minElements));
  736. // Otherwise, zext up to size_t if necessary.
  737. } else if (numElementsWidth < sizeWidth) {
  738. numElements = CGF.Builder.CreateZExt(numElements, CGF.SizeTy);
  739. }
  740. assert(numElements->getType() == CGF.SizeTy);
  741. if (minElements) {
  742. // Don't allow allocation of fewer elements than we have initializers.
  743. if (!hasOverflow) {
  744. hasOverflow = CGF.Builder.CreateICmpULT(numElements,
  745. llvm::ConstantInt::get(CGF.SizeTy, minElements));
  746. } else if (numElementsWidth > sizeWidth) {
  747. // The other existing overflow subsumes this check.
  748. // We do an unsigned comparison, since any signed value < -1 is
  749. // taken care of either above or below.
  750. hasOverflow = CGF.Builder.CreateOr(hasOverflow,
  751. CGF.Builder.CreateICmpULT(numElements,
  752. llvm::ConstantInt::get(CGF.SizeTy, minElements)));
  753. }
  754. }
  755. size = numElements;
  756. // Multiply by the type size if necessary. This multiplier
  757. // includes all the factors for nested arrays.
  758. //
  759. // This step also causes numElements to be scaled up by the
  760. // nested-array factor if necessary. Overflow on this computation
  761. // can be ignored because the result shouldn't be used if
  762. // allocation fails.
  763. if (typeSizeMultiplier != 1) {
  764. llvm::Function *umul_with_overflow
  765. = CGF.CGM.getIntrinsic(llvm::Intrinsic::umul_with_overflow, CGF.SizeTy);
  766. llvm::Value *tsmV =
  767. llvm::ConstantInt::get(CGF.SizeTy, typeSizeMultiplier);
  768. llvm::Value *result =
  769. CGF.Builder.CreateCall(umul_with_overflow, {size, tsmV});
  770. llvm::Value *overflowed = CGF.Builder.CreateExtractValue(result, 1);
  771. if (hasOverflow)
  772. hasOverflow = CGF.Builder.CreateOr(hasOverflow, overflowed);
  773. else
  774. hasOverflow = overflowed;
  775. size = CGF.Builder.CreateExtractValue(result, 0);
  776. // Also scale up numElements by the array size multiplier.
  777. if (arraySizeMultiplier != 1) {
  778. // If the base element type size is 1, then we can re-use the
  779. // multiply we just did.
  780. if (typeSize.isOne()) {
  781. assert(arraySizeMultiplier == typeSizeMultiplier);
  782. numElements = size;
  783. // Otherwise we need a separate multiply.
  784. } else {
  785. llvm::Value *asmV =
  786. llvm::ConstantInt::get(CGF.SizeTy, arraySizeMultiplier);
  787. numElements = CGF.Builder.CreateMul(numElements, asmV);
  788. }
  789. }
  790. } else {
  791. // numElements doesn't need to be scaled.
  792. assert(arraySizeMultiplier == 1);
  793. }
  794. // Add in the cookie size if necessary.
  795. if (cookieSize != 0) {
  796. sizeWithoutCookie = size;
  797. llvm::Function *uadd_with_overflow
  798. = CGF.CGM.getIntrinsic(llvm::Intrinsic::uadd_with_overflow, CGF.SizeTy);
  799. llvm::Value *cookieSizeV = llvm::ConstantInt::get(CGF.SizeTy, cookieSize);
  800. llvm::Value *result =
  801. CGF.Builder.CreateCall(uadd_with_overflow, {size, cookieSizeV});
  802. llvm::Value *overflowed = CGF.Builder.CreateExtractValue(result, 1);
  803. if (hasOverflow)
  804. hasOverflow = CGF.Builder.CreateOr(hasOverflow, overflowed);
  805. else
  806. hasOverflow = overflowed;
  807. size = CGF.Builder.CreateExtractValue(result, 0);
  808. }
  809. // If we had any possibility of dynamic overflow, make a select to
  810. // overwrite 'size' with an all-ones value, which should cause
  811. // operator new to throw.
  812. if (hasOverflow)
  813. size = CGF.Builder.CreateSelect(hasOverflow,
  814. llvm::Constant::getAllOnesValue(CGF.SizeTy),
  815. size);
  816. }
  817. if (cookieSize == 0)
  818. sizeWithoutCookie = size;
  819. else
  820. assert(sizeWithoutCookie && "didn't set sizeWithoutCookie?");
  821. return size;
  822. }
  823. static void StoreAnyExprIntoOneUnit(CodeGenFunction &CGF, const Expr *Init,
  824. QualType AllocType, Address NewPtr,
  825. AggValueSlot::Overlap_t MayOverlap) {
  826. // FIXME: Refactor with EmitExprAsInit.
  827. switch (CGF.getEvaluationKind(AllocType)) {
  828. case TEK_Scalar:
  829. CGF.EmitScalarInit(Init, nullptr,
  830. CGF.MakeAddrLValue(NewPtr, AllocType), false);
  831. return;
  832. case TEK_Complex:
  833. CGF.EmitComplexExprIntoLValue(Init, CGF.MakeAddrLValue(NewPtr, AllocType),
  834. /*isInit*/ true);
  835. return;
  836. case TEK_Aggregate: {
  837. AggValueSlot Slot
  838. = AggValueSlot::forAddr(NewPtr, AllocType.getQualifiers(),
  839. AggValueSlot::IsDestructed,
  840. AggValueSlot::DoesNotNeedGCBarriers,
  841. AggValueSlot::IsNotAliased,
  842. MayOverlap, AggValueSlot::IsNotZeroed,
  843. AggValueSlot::IsSanitizerChecked);
  844. CGF.EmitAggExpr(Init, Slot);
  845. return;
  846. }
  847. }
  848. llvm_unreachable("bad evaluation kind");
  849. }
  850. void CodeGenFunction::EmitNewArrayInitializer(
  851. const CXXNewExpr *E, QualType ElementType, llvm::Type *ElementTy,
  852. Address BeginPtr, llvm::Value *NumElements,
  853. llvm::Value *AllocSizeWithoutCookie) {
  854. // If we have a type with trivial initialization and no initializer,
  855. // there's nothing to do.
  856. if (!E->hasInitializer())
  857. return;
  858. Address CurPtr = BeginPtr;
  859. unsigned InitListElements = 0;
  860. const Expr *Init = E->getInitializer();
  861. Address EndOfInit = Address::invalid();
  862. QualType::DestructionKind DtorKind = ElementType.isDestructedType();
  863. EHScopeStack::stable_iterator Cleanup;
  864. llvm::Instruction *CleanupDominator = nullptr;
  865. CharUnits ElementSize = getContext().getTypeSizeInChars(ElementType);
  866. CharUnits ElementAlign =
  867. BeginPtr.getAlignment().alignmentOfArrayElement(ElementSize);
  868. // Attempt to perform zero-initialization using memset.
  869. auto TryMemsetInitialization = [&]() -> bool {
  870. // FIXME: If the type is a pointer-to-data-member under the Itanium ABI,
  871. // we can initialize with a memset to -1.
  872. if (!CGM.getTypes().isZeroInitializable(ElementType))
  873. return false;
  874. // Optimization: since zero initialization will just set the memory
  875. // to all zeroes, generate a single memset to do it in one shot.
  876. // Subtract out the size of any elements we've already initialized.
  877. auto *RemainingSize = AllocSizeWithoutCookie;
  878. if (InitListElements) {
  879. // We know this can't overflow; we check this when doing the allocation.
  880. auto *InitializedSize = llvm::ConstantInt::get(
  881. RemainingSize->getType(),
  882. getContext().getTypeSizeInChars(ElementType).getQuantity() *
  883. InitListElements);
  884. RemainingSize = Builder.CreateSub(RemainingSize, InitializedSize);
  885. }
  886. // Create the memset.
  887. Builder.CreateMemSet(CurPtr, Builder.getInt8(0), RemainingSize, false);
  888. return true;
  889. };
  890. // If the initializer is an initializer list, first do the explicit elements.
  891. if (const InitListExpr *ILE = dyn_cast<InitListExpr>(Init)) {
  892. // Initializing from a (braced) string literal is a special case; the init
  893. // list element does not initialize a (single) array element.
  894. if (ILE->isStringLiteralInit()) {
  895. // Initialize the initial portion of length equal to that of the string
  896. // literal. The allocation must be for at least this much; we emitted a
  897. // check for that earlier.
  898. AggValueSlot Slot =
  899. AggValueSlot::forAddr(CurPtr, ElementType.getQualifiers(),
  900. AggValueSlot::IsDestructed,
  901. AggValueSlot::DoesNotNeedGCBarriers,
  902. AggValueSlot::IsNotAliased,
  903. AggValueSlot::DoesNotOverlap,
  904. AggValueSlot::IsNotZeroed,
  905. AggValueSlot::IsSanitizerChecked);
  906. EmitAggExpr(ILE->getInit(0), Slot);
  907. // Move past these elements.
  908. InitListElements =
  909. cast<ConstantArrayType>(ILE->getType()->getAsArrayTypeUnsafe())
  910. ->getSize().getZExtValue();
  911. CurPtr = Builder.CreateConstInBoundsGEP(
  912. CurPtr, InitListElements, "string.init.end");
  913. // Zero out the rest, if any remain.
  914. llvm::ConstantInt *ConstNum = dyn_cast<llvm::ConstantInt>(NumElements);
  915. if (!ConstNum || !ConstNum->equalsInt(InitListElements)) {
  916. bool OK = TryMemsetInitialization();
  917. (void)OK;
  918. assert(OK && "couldn't memset character type?");
  919. }
  920. return;
  921. }
  922. InitListElements = ILE->getNumInits();
  923. // If this is a multi-dimensional array new, we will initialize multiple
  924. // elements with each init list element.
  925. QualType AllocType = E->getAllocatedType();
  926. if (const ConstantArrayType *CAT = dyn_cast_or_null<ConstantArrayType>(
  927. AllocType->getAsArrayTypeUnsafe())) {
  928. ElementTy = ConvertTypeForMem(AllocType);
  929. CurPtr = Builder.CreateElementBitCast(CurPtr, ElementTy);
  930. InitListElements *= getContext().getConstantArrayElementCount(CAT);
  931. }
  932. // Enter a partial-destruction Cleanup if necessary.
  933. if (needsEHCleanup(DtorKind)) {
  934. // In principle we could tell the Cleanup where we are more
  935. // directly, but the control flow can get so varied here that it
  936. // would actually be quite complex. Therefore we go through an
  937. // alloca.
  938. EndOfInit = CreateTempAlloca(BeginPtr.getType(), getPointerAlign(),
  939. "array.init.end");
  940. CleanupDominator = Builder.CreateStore(BeginPtr.getPointer(), EndOfInit);
  941. pushIrregularPartialArrayCleanup(BeginPtr.getPointer(), EndOfInit,
  942. ElementType, ElementAlign,
  943. getDestroyer(DtorKind));
  944. Cleanup = EHStack.stable_begin();
  945. }
  946. CharUnits StartAlign = CurPtr.getAlignment();
  947. for (unsigned i = 0, e = ILE->getNumInits(); i != e; ++i) {
  948. // Tell the cleanup that it needs to destroy up to this
  949. // element. TODO: some of these stores can be trivially
  950. // observed to be unnecessary.
  951. if (EndOfInit.isValid()) {
  952. auto FinishedPtr =
  953. Builder.CreateBitCast(CurPtr.getPointer(), BeginPtr.getType());
  954. Builder.CreateStore(FinishedPtr, EndOfInit);
  955. }
  956. // FIXME: If the last initializer is an incomplete initializer list for
  957. // an array, and we have an array filler, we can fold together the two
  958. // initialization loops.
  959. StoreAnyExprIntoOneUnit(*this, ILE->getInit(i),
  960. ILE->getInit(i)->getType(), CurPtr,
  961. AggValueSlot::DoesNotOverlap);
  962. CurPtr = Address(Builder.CreateInBoundsGEP(
  963. CurPtr.getElementType(), CurPtr.getPointer(),
  964. Builder.getSize(1), "array.exp.next"),
  965. CurPtr.getElementType(),
  966. StartAlign.alignmentAtOffset((i + 1) * ElementSize));
  967. }
  968. // The remaining elements are filled with the array filler expression.
  969. Init = ILE->getArrayFiller();
  970. // Extract the initializer for the individual array elements by pulling
  971. // out the array filler from all the nested initializer lists. This avoids
  972. // generating a nested loop for the initialization.
  973. while (Init && Init->getType()->isConstantArrayType()) {
  974. auto *SubILE = dyn_cast<InitListExpr>(Init);
  975. if (!SubILE)
  976. break;
  977. assert(SubILE->getNumInits() == 0 && "explicit inits in array filler?");
  978. Init = SubILE->getArrayFiller();
  979. }
  980. // Switch back to initializing one base element at a time.
  981. CurPtr = Builder.CreateElementBitCast(CurPtr, BeginPtr.getElementType());
  982. }
  983. // If all elements have already been initialized, skip any further
  984. // initialization.
  985. llvm::ConstantInt *ConstNum = dyn_cast<llvm::ConstantInt>(NumElements);
  986. if (ConstNum && ConstNum->getZExtValue() <= InitListElements) {
  987. // If there was a Cleanup, deactivate it.
  988. if (CleanupDominator)
  989. DeactivateCleanupBlock(Cleanup, CleanupDominator);
  990. return;
  991. }
  992. assert(Init && "have trailing elements to initialize but no initializer");
  993. // If this is a constructor call, try to optimize it out, and failing that
  994. // emit a single loop to initialize all remaining elements.
  995. if (const CXXConstructExpr *CCE = dyn_cast<CXXConstructExpr>(Init)) {
  996. CXXConstructorDecl *Ctor = CCE->getConstructor();
  997. if (Ctor->isTrivial()) {
  998. // If new expression did not specify value-initialization, then there
  999. // is no initialization.
  1000. if (!CCE->requiresZeroInitialization() || Ctor->getParent()->isEmpty())
  1001. return;
  1002. if (TryMemsetInitialization())
  1003. return;
  1004. }
  1005. // Store the new Cleanup position for irregular Cleanups.
  1006. //
  1007. // FIXME: Share this cleanup with the constructor call emission rather than
  1008. // having it create a cleanup of its own.
  1009. if (EndOfInit.isValid())
  1010. Builder.CreateStore(CurPtr.getPointer(), EndOfInit);
  1011. // Emit a constructor call loop to initialize the remaining elements.
  1012. if (InitListElements)
  1013. NumElements = Builder.CreateSub(
  1014. NumElements,
  1015. llvm::ConstantInt::get(NumElements->getType(), InitListElements));
  1016. EmitCXXAggrConstructorCall(Ctor, NumElements, CurPtr, CCE,
  1017. /*NewPointerIsChecked*/true,
  1018. CCE->requiresZeroInitialization());
  1019. return;
  1020. }
  1021. // If this is value-initialization, we can usually use memset.
  1022. ImplicitValueInitExpr IVIE(ElementType);
  1023. if (isa<ImplicitValueInitExpr>(Init)) {
  1024. if (TryMemsetInitialization())
  1025. return;
  1026. // Switch to an ImplicitValueInitExpr for the element type. This handles
  1027. // only one case: multidimensional array new of pointers to members. In
  1028. // all other cases, we already have an initializer for the array element.
  1029. Init = &IVIE;
  1030. }
  1031. // At this point we should have found an initializer for the individual
  1032. // elements of the array.
  1033. assert(getContext().hasSameUnqualifiedType(ElementType, Init->getType()) &&
  1034. "got wrong type of element to initialize");
  1035. // If we have an empty initializer list, we can usually use memset.
  1036. if (auto *ILE = dyn_cast<InitListExpr>(Init))
  1037. if (ILE->getNumInits() == 0 && TryMemsetInitialization())
  1038. return;
  1039. // If we have a struct whose every field is value-initialized, we can
  1040. // usually use memset.
  1041. if (auto *ILE = dyn_cast<InitListExpr>(Init)) {
  1042. if (const RecordType *RType = ILE->getType()->getAs<RecordType>()) {
  1043. if (RType->getDecl()->isStruct()) {
  1044. unsigned NumElements = 0;
  1045. if (auto *CXXRD = dyn_cast<CXXRecordDecl>(RType->getDecl()))
  1046. NumElements = CXXRD->getNumBases();
  1047. for (auto *Field : RType->getDecl()->fields())
  1048. if (!Field->isUnnamedBitfield())
  1049. ++NumElements;
  1050. // FIXME: Recurse into nested InitListExprs.
  1051. if (ILE->getNumInits() == NumElements)
  1052. for (unsigned i = 0, e = ILE->getNumInits(); i != e; ++i)
  1053. if (!isa<ImplicitValueInitExpr>(ILE->getInit(i)))
  1054. --NumElements;
  1055. if (ILE->getNumInits() == NumElements && TryMemsetInitialization())
  1056. return;
  1057. }
  1058. }
  1059. }
  1060. // Create the loop blocks.
  1061. llvm::BasicBlock *EntryBB = Builder.GetInsertBlock();
  1062. llvm::BasicBlock *LoopBB = createBasicBlock("new.loop");
  1063. llvm::BasicBlock *ContBB = createBasicBlock("new.loop.end");
  1064. // Find the end of the array, hoisted out of the loop.
  1065. llvm::Value *EndPtr =
  1066. Builder.CreateInBoundsGEP(BeginPtr.getElementType(), BeginPtr.getPointer(),
  1067. NumElements, "array.end");
  1068. // If the number of elements isn't constant, we have to now check if there is
  1069. // anything left to initialize.
  1070. if (!ConstNum) {
  1071. llvm::Value *IsEmpty =
  1072. Builder.CreateICmpEQ(CurPtr.getPointer(), EndPtr, "array.isempty");
  1073. Builder.CreateCondBr(IsEmpty, ContBB, LoopBB);
  1074. }
  1075. // Enter the loop.
  1076. EmitBlock(LoopBB);
  1077. // Set up the current-element phi.
  1078. llvm::PHINode *CurPtrPhi =
  1079. Builder.CreatePHI(CurPtr.getType(), 2, "array.cur");
  1080. CurPtrPhi->addIncoming(CurPtr.getPointer(), EntryBB);
  1081. CurPtr = Address(CurPtrPhi, CurPtr.getElementType(), ElementAlign);
  1082. // Store the new Cleanup position for irregular Cleanups.
  1083. if (EndOfInit.isValid())
  1084. Builder.CreateStore(CurPtr.getPointer(), EndOfInit);
  1085. // Enter a partial-destruction Cleanup if necessary.
  1086. if (!CleanupDominator && needsEHCleanup(DtorKind)) {
  1087. pushRegularPartialArrayCleanup(BeginPtr.getPointer(), CurPtr.getPointer(),
  1088. ElementType, ElementAlign,
  1089. getDestroyer(DtorKind));
  1090. Cleanup = EHStack.stable_begin();
  1091. CleanupDominator = Builder.CreateUnreachable();
  1092. }
  1093. // Emit the initializer into this element.
  1094. StoreAnyExprIntoOneUnit(*this, Init, Init->getType(), CurPtr,
  1095. AggValueSlot::DoesNotOverlap);
  1096. // Leave the Cleanup if we entered one.
  1097. if (CleanupDominator) {
  1098. DeactivateCleanupBlock(Cleanup, CleanupDominator);
  1099. CleanupDominator->eraseFromParent();
  1100. }
  1101. // Advance to the next element by adjusting the pointer type as necessary.
  1102. llvm::Value *NextPtr =
  1103. Builder.CreateConstInBoundsGEP1_32(ElementTy, CurPtr.getPointer(), 1,
  1104. "array.next");
  1105. // Check whether we've gotten to the end of the array and, if so,
  1106. // exit the loop.
  1107. llvm::Value *IsEnd = Builder.CreateICmpEQ(NextPtr, EndPtr, "array.atend");
  1108. Builder.CreateCondBr(IsEnd, ContBB, LoopBB);
  1109. CurPtrPhi->addIncoming(NextPtr, Builder.GetInsertBlock());
  1110. EmitBlock(ContBB);
  1111. }
  1112. static void EmitNewInitializer(CodeGenFunction &CGF, const CXXNewExpr *E,
  1113. QualType ElementType, llvm::Type *ElementTy,
  1114. Address NewPtr, llvm::Value *NumElements,
  1115. llvm::Value *AllocSizeWithoutCookie) {
  1116. ApplyDebugLocation DL(CGF, E);
  1117. if (E->isArray())
  1118. CGF.EmitNewArrayInitializer(E, ElementType, ElementTy, NewPtr, NumElements,
  1119. AllocSizeWithoutCookie);
  1120. else if (const Expr *Init = E->getInitializer())
  1121. StoreAnyExprIntoOneUnit(CGF, Init, E->getAllocatedType(), NewPtr,
  1122. AggValueSlot::DoesNotOverlap);
  1123. }
  1124. /// Emit a call to an operator new or operator delete function, as implicitly
  1125. /// created by new-expressions and delete-expressions.
  1126. static RValue EmitNewDeleteCall(CodeGenFunction &CGF,
  1127. const FunctionDecl *CalleeDecl,
  1128. const FunctionProtoType *CalleeType,
  1129. const CallArgList &Args) {
  1130. llvm::CallBase *CallOrInvoke;
  1131. llvm::Constant *CalleePtr = CGF.CGM.GetAddrOfFunction(CalleeDecl);
  1132. CGCallee Callee = CGCallee::forDirect(CalleePtr, GlobalDecl(CalleeDecl));
  1133. RValue RV =
  1134. CGF.EmitCall(CGF.CGM.getTypes().arrangeFreeFunctionCall(
  1135. Args, CalleeType, /*ChainCall=*/false),
  1136. Callee, ReturnValueSlot(), Args, &CallOrInvoke);
  1137. /// C++1y [expr.new]p10:
  1138. /// [In a new-expression,] an implementation is allowed to omit a call
  1139. /// to a replaceable global allocation function.
  1140. ///
  1141. /// We model such elidable calls with the 'builtin' attribute.
  1142. llvm::Function *Fn = dyn_cast<llvm::Function>(CalleePtr);
  1143. if (CalleeDecl->isReplaceableGlobalAllocationFunction() &&
  1144. Fn && Fn->hasFnAttribute(llvm::Attribute::NoBuiltin)) {
  1145. CallOrInvoke->addFnAttr(llvm::Attribute::Builtin);
  1146. }
  1147. return RV;
  1148. }
  1149. RValue CodeGenFunction::EmitBuiltinNewDeleteCall(const FunctionProtoType *Type,
  1150. const CallExpr *TheCall,
  1151. bool IsDelete) {
  1152. CallArgList Args;
  1153. EmitCallArgs(Args, Type, TheCall->arguments());
  1154. // Find the allocation or deallocation function that we're calling.
  1155. ASTContext &Ctx = getContext();
  1156. DeclarationName Name = Ctx.DeclarationNames
  1157. .getCXXOperatorName(IsDelete ? OO_Delete : OO_New);
  1158. for (auto *Decl : Ctx.getTranslationUnitDecl()->lookup(Name))
  1159. if (auto *FD = dyn_cast<FunctionDecl>(Decl))
  1160. if (Ctx.hasSameType(FD->getType(), QualType(Type, 0)))
  1161. return EmitNewDeleteCall(*this, FD, Type, Args);
  1162. llvm_unreachable("predeclared global operator new/delete is missing");
  1163. }
  1164. namespace {
  1165. /// The parameters to pass to a usual operator delete.
  1166. struct UsualDeleteParams {
  1167. bool DestroyingDelete = false;
  1168. bool Size = false;
  1169. bool Alignment = false;
  1170. };
  1171. }
  1172. static UsualDeleteParams getUsualDeleteParams(const FunctionDecl *FD) {
  1173. UsualDeleteParams Params;
  1174. const FunctionProtoType *FPT = FD->getType()->castAs<FunctionProtoType>();
  1175. auto AI = FPT->param_type_begin(), AE = FPT->param_type_end();
  1176. // The first argument is always a void*.
  1177. ++AI;
  1178. // The next parameter may be a std::destroying_delete_t.
  1179. if (FD->isDestroyingOperatorDelete()) {
  1180. Params.DestroyingDelete = true;
  1181. assert(AI != AE);
  1182. ++AI;
  1183. }
  1184. // Figure out what other parameters we should be implicitly passing.
  1185. if (AI != AE && (*AI)->isIntegerType()) {
  1186. Params.Size = true;
  1187. ++AI;
  1188. }
  1189. if (AI != AE && (*AI)->isAlignValT()) {
  1190. Params.Alignment = true;
  1191. ++AI;
  1192. }
  1193. assert(AI == AE && "unexpected usual deallocation function parameter");
  1194. return Params;
  1195. }
  1196. namespace {
  1197. /// A cleanup to call the given 'operator delete' function upon abnormal
  1198. /// exit from a new expression. Templated on a traits type that deals with
  1199. /// ensuring that the arguments dominate the cleanup if necessary.
  1200. template<typename Traits>
  1201. class CallDeleteDuringNew final : public EHScopeStack::Cleanup {
  1202. /// Type used to hold llvm::Value*s.
  1203. typedef typename Traits::ValueTy ValueTy;
  1204. /// Type used to hold RValues.
  1205. typedef typename Traits::RValueTy RValueTy;
  1206. struct PlacementArg {
  1207. RValueTy ArgValue;
  1208. QualType ArgType;
  1209. };
  1210. unsigned NumPlacementArgs : 31;
  1211. unsigned PassAlignmentToPlacementDelete : 1;
  1212. const FunctionDecl *OperatorDelete;
  1213. ValueTy Ptr;
  1214. ValueTy AllocSize;
  1215. CharUnits AllocAlign;
  1216. PlacementArg *getPlacementArgs() {
  1217. return reinterpret_cast<PlacementArg *>(this + 1);
  1218. }
  1219. public:
  1220. static size_t getExtraSize(size_t NumPlacementArgs) {
  1221. return NumPlacementArgs * sizeof(PlacementArg);
  1222. }
  1223. CallDeleteDuringNew(size_t NumPlacementArgs,
  1224. const FunctionDecl *OperatorDelete, ValueTy Ptr,
  1225. ValueTy AllocSize, bool PassAlignmentToPlacementDelete,
  1226. CharUnits AllocAlign)
  1227. : NumPlacementArgs(NumPlacementArgs),
  1228. PassAlignmentToPlacementDelete(PassAlignmentToPlacementDelete),
  1229. OperatorDelete(OperatorDelete), Ptr(Ptr), AllocSize(AllocSize),
  1230. AllocAlign(AllocAlign) {}
  1231. void setPlacementArg(unsigned I, RValueTy Arg, QualType Type) {
  1232. assert(I < NumPlacementArgs && "index out of range");
  1233. getPlacementArgs()[I] = {Arg, Type};
  1234. }
  1235. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1236. const auto *FPT = OperatorDelete->getType()->castAs<FunctionProtoType>();
  1237. CallArgList DeleteArgs;
  1238. // The first argument is always a void* (or C* for a destroying operator
  1239. // delete for class type C).
  1240. DeleteArgs.add(Traits::get(CGF, Ptr), FPT->getParamType(0));
  1241. // Figure out what other parameters we should be implicitly passing.
  1242. UsualDeleteParams Params;
  1243. if (NumPlacementArgs) {
  1244. // A placement deallocation function is implicitly passed an alignment
  1245. // if the placement allocation function was, but is never passed a size.
  1246. Params.Alignment = PassAlignmentToPlacementDelete;
  1247. } else {
  1248. // For a non-placement new-expression, 'operator delete' can take a
  1249. // size and/or an alignment if it has the right parameters.
  1250. Params = getUsualDeleteParams(OperatorDelete);
  1251. }
  1252. assert(!Params.DestroyingDelete &&
  1253. "should not call destroying delete in a new-expression");
  1254. // The second argument can be a std::size_t (for non-placement delete).
  1255. if (Params.Size)
  1256. DeleteArgs.add(Traits::get(CGF, AllocSize),
  1257. CGF.getContext().getSizeType());
  1258. // The next (second or third) argument can be a std::align_val_t, which
  1259. // is an enum whose underlying type is std::size_t.
  1260. // FIXME: Use the right type as the parameter type. Note that in a call
  1261. // to operator delete(size_t, ...), we may not have it available.
  1262. if (Params.Alignment)
  1263. DeleteArgs.add(RValue::get(llvm::ConstantInt::get(
  1264. CGF.SizeTy, AllocAlign.getQuantity())),
  1265. CGF.getContext().getSizeType());
  1266. // Pass the rest of the arguments, which must match exactly.
  1267. for (unsigned I = 0; I != NumPlacementArgs; ++I) {
  1268. auto Arg = getPlacementArgs()[I];
  1269. DeleteArgs.add(Traits::get(CGF, Arg.ArgValue), Arg.ArgType);
  1270. }
  1271. // Call 'operator delete'.
  1272. EmitNewDeleteCall(CGF, OperatorDelete, FPT, DeleteArgs);
  1273. }
  1274. };
  1275. }
  1276. /// Enter a cleanup to call 'operator delete' if the initializer in a
  1277. /// new-expression throws.
  1278. static void EnterNewDeleteCleanup(CodeGenFunction &CGF,
  1279. const CXXNewExpr *E,
  1280. Address NewPtr,
  1281. llvm::Value *AllocSize,
  1282. CharUnits AllocAlign,
  1283. const CallArgList &NewArgs) {
  1284. unsigned NumNonPlacementArgs = E->passAlignment() ? 2 : 1;
  1285. // If we're not inside a conditional branch, then the cleanup will
  1286. // dominate and we can do the easier (and more efficient) thing.
  1287. if (!CGF.isInConditionalBranch()) {
  1288. struct DirectCleanupTraits {
  1289. typedef llvm::Value *ValueTy;
  1290. typedef RValue RValueTy;
  1291. static RValue get(CodeGenFunction &, ValueTy V) { return RValue::get(V); }
  1292. static RValue get(CodeGenFunction &, RValueTy V) { return V; }
  1293. };
  1294. typedef CallDeleteDuringNew<DirectCleanupTraits> DirectCleanup;
  1295. DirectCleanup *Cleanup = CGF.EHStack
  1296. .pushCleanupWithExtra<DirectCleanup>(EHCleanup,
  1297. E->getNumPlacementArgs(),
  1298. E->getOperatorDelete(),
  1299. NewPtr.getPointer(),
  1300. AllocSize,
  1301. E->passAlignment(),
  1302. AllocAlign);
  1303. for (unsigned I = 0, N = E->getNumPlacementArgs(); I != N; ++I) {
  1304. auto &Arg = NewArgs[I + NumNonPlacementArgs];
  1305. Cleanup->setPlacementArg(I, Arg.getRValue(CGF), Arg.Ty);
  1306. }
  1307. return;
  1308. }
  1309. // Otherwise, we need to save all this stuff.
  1310. DominatingValue<RValue>::saved_type SavedNewPtr =
  1311. DominatingValue<RValue>::save(CGF, RValue::get(NewPtr.getPointer()));
  1312. DominatingValue<RValue>::saved_type SavedAllocSize =
  1313. DominatingValue<RValue>::save(CGF, RValue::get(AllocSize));
  1314. struct ConditionalCleanupTraits {
  1315. typedef DominatingValue<RValue>::saved_type ValueTy;
  1316. typedef DominatingValue<RValue>::saved_type RValueTy;
  1317. static RValue get(CodeGenFunction &CGF, ValueTy V) {
  1318. return V.restore(CGF);
  1319. }
  1320. };
  1321. typedef CallDeleteDuringNew<ConditionalCleanupTraits> ConditionalCleanup;
  1322. ConditionalCleanup *Cleanup = CGF.EHStack
  1323. .pushCleanupWithExtra<ConditionalCleanup>(EHCleanup,
  1324. E->getNumPlacementArgs(),
  1325. E->getOperatorDelete(),
  1326. SavedNewPtr,
  1327. SavedAllocSize,
  1328. E->passAlignment(),
  1329. AllocAlign);
  1330. for (unsigned I = 0, N = E->getNumPlacementArgs(); I != N; ++I) {
  1331. auto &Arg = NewArgs[I + NumNonPlacementArgs];
  1332. Cleanup->setPlacementArg(
  1333. I, DominatingValue<RValue>::save(CGF, Arg.getRValue(CGF)), Arg.Ty);
  1334. }
  1335. CGF.initFullExprCleanup();
  1336. }
  1337. llvm::Value *CodeGenFunction::EmitCXXNewExpr(const CXXNewExpr *E) {
  1338. // The element type being allocated.
  1339. QualType allocType = getContext().getBaseElementType(E->getAllocatedType());
  1340. // 1. Build a call to the allocation function.
  1341. FunctionDecl *allocator = E->getOperatorNew();
  1342. // If there is a brace-initializer, cannot allocate fewer elements than inits.
  1343. unsigned minElements = 0;
  1344. if (E->isArray() && E->hasInitializer()) {
  1345. const InitListExpr *ILE = dyn_cast<InitListExpr>(E->getInitializer());
  1346. if (ILE && ILE->isStringLiteralInit())
  1347. minElements =
  1348. cast<ConstantArrayType>(ILE->getType()->getAsArrayTypeUnsafe())
  1349. ->getSize().getZExtValue();
  1350. else if (ILE)
  1351. minElements = ILE->getNumInits();
  1352. }
  1353. llvm::Value *numElements = nullptr;
  1354. llvm::Value *allocSizeWithoutCookie = nullptr;
  1355. llvm::Value *allocSize =
  1356. EmitCXXNewAllocSize(*this, E, minElements, numElements,
  1357. allocSizeWithoutCookie);
  1358. CharUnits allocAlign = getContext().getTypeAlignInChars(allocType);
  1359. // Emit the allocation call. If the allocator is a global placement
  1360. // operator, just "inline" it directly.
  1361. Address allocation = Address::invalid();
  1362. CallArgList allocatorArgs;
  1363. if (allocator->isReservedGlobalPlacementOperator()) {
  1364. assert(E->getNumPlacementArgs() == 1);
  1365. const Expr *arg = *E->placement_arguments().begin();
  1366. LValueBaseInfo BaseInfo;
  1367. allocation = EmitPointerWithAlignment(arg, &BaseInfo);
  1368. // The pointer expression will, in many cases, be an opaque void*.
  1369. // In these cases, discard the computed alignment and use the
  1370. // formal alignment of the allocated type.
  1371. if (BaseInfo.getAlignmentSource() != AlignmentSource::Decl)
  1372. allocation = allocation.withAlignment(allocAlign);
  1373. // Set up allocatorArgs for the call to operator delete if it's not
  1374. // the reserved global operator.
  1375. if (E->getOperatorDelete() &&
  1376. !E->getOperatorDelete()->isReservedGlobalPlacementOperator()) {
  1377. allocatorArgs.add(RValue::get(allocSize), getContext().getSizeType());
  1378. allocatorArgs.add(RValue::get(allocation.getPointer()), arg->getType());
  1379. }
  1380. } else {
  1381. const FunctionProtoType *allocatorType =
  1382. allocator->getType()->castAs<FunctionProtoType>();
  1383. unsigned ParamsToSkip = 0;
  1384. // The allocation size is the first argument.
  1385. QualType sizeType = getContext().getSizeType();
  1386. allocatorArgs.add(RValue::get(allocSize), sizeType);
  1387. ++ParamsToSkip;
  1388. if (allocSize != allocSizeWithoutCookie) {
  1389. CharUnits cookieAlign = getSizeAlign(); // FIXME: Ask the ABI.
  1390. allocAlign = std::max(allocAlign, cookieAlign);
  1391. }
  1392. // The allocation alignment may be passed as the second argument.
  1393. if (E->passAlignment()) {
  1394. QualType AlignValT = sizeType;
  1395. if (allocatorType->getNumParams() > 1) {
  1396. AlignValT = allocatorType->getParamType(1);
  1397. assert(getContext().hasSameUnqualifiedType(
  1398. AlignValT->castAs<EnumType>()->getDecl()->getIntegerType(),
  1399. sizeType) &&
  1400. "wrong type for alignment parameter");
  1401. ++ParamsToSkip;
  1402. } else {
  1403. // Corner case, passing alignment to 'operator new(size_t, ...)'.
  1404. assert(allocator->isVariadic() && "can't pass alignment to allocator");
  1405. }
  1406. allocatorArgs.add(
  1407. RValue::get(llvm::ConstantInt::get(SizeTy, allocAlign.getQuantity())),
  1408. AlignValT);
  1409. }
  1410. // FIXME: Why do we not pass a CalleeDecl here?
  1411. EmitCallArgs(allocatorArgs, allocatorType, E->placement_arguments(),
  1412. /*AC*/AbstractCallee(), /*ParamsToSkip*/ParamsToSkip);
  1413. RValue RV =
  1414. EmitNewDeleteCall(*this, allocator, allocatorType, allocatorArgs);
  1415. // Set !heapallocsite metadata on the call to operator new.
  1416. if (getDebugInfo())
  1417. if (auto *newCall = dyn_cast<llvm::CallBase>(RV.getScalarVal()))
  1418. getDebugInfo()->addHeapAllocSiteMetadata(newCall, allocType,
  1419. E->getExprLoc());
  1420. // If this was a call to a global replaceable allocation function that does
  1421. // not take an alignment argument, the allocator is known to produce
  1422. // storage that's suitably aligned for any object that fits, up to a known
  1423. // threshold. Otherwise assume it's suitably aligned for the allocated type.
  1424. CharUnits allocationAlign = allocAlign;
  1425. if (!E->passAlignment() &&
  1426. allocator->isReplaceableGlobalAllocationFunction()) {
  1427. unsigned AllocatorAlign = llvm::PowerOf2Floor(std::min<uint64_t>(
  1428. Target.getNewAlign(), getContext().getTypeSize(allocType)));
  1429. allocationAlign = std::max(
  1430. allocationAlign, getContext().toCharUnitsFromBits(AllocatorAlign));
  1431. }
  1432. allocation = Address(RV.getScalarVal(), Int8Ty, allocationAlign);
  1433. }
  1434. // Emit a null check on the allocation result if the allocation
  1435. // function is allowed to return null (because it has a non-throwing
  1436. // exception spec or is the reserved placement new) and we have an
  1437. // interesting initializer will be running sanitizers on the initialization.
  1438. bool nullCheck = E->shouldNullCheckAllocation() &&
  1439. (!allocType.isPODType(getContext()) || E->hasInitializer() ||
  1440. sanitizePerformTypeCheck());
  1441. llvm::BasicBlock *nullCheckBB = nullptr;
  1442. llvm::BasicBlock *contBB = nullptr;
  1443. // The null-check means that the initializer is conditionally
  1444. // evaluated.
  1445. ConditionalEvaluation conditional(*this);
  1446. if (nullCheck) {
  1447. conditional.begin(*this);
  1448. nullCheckBB = Builder.GetInsertBlock();
  1449. llvm::BasicBlock *notNullBB = createBasicBlock("new.notnull");
  1450. contBB = createBasicBlock("new.cont");
  1451. llvm::Value *isNull =
  1452. Builder.CreateIsNull(allocation.getPointer(), "new.isnull");
  1453. Builder.CreateCondBr(isNull, contBB, notNullBB);
  1454. EmitBlock(notNullBB);
  1455. }
  1456. // If there's an operator delete, enter a cleanup to call it if an
  1457. // exception is thrown.
  1458. EHScopeStack::stable_iterator operatorDeleteCleanup;
  1459. llvm::Instruction *cleanupDominator = nullptr;
  1460. if (E->getOperatorDelete() &&
  1461. !E->getOperatorDelete()->isReservedGlobalPlacementOperator()) {
  1462. EnterNewDeleteCleanup(*this, E, allocation, allocSize, allocAlign,
  1463. allocatorArgs);
  1464. operatorDeleteCleanup = EHStack.stable_begin();
  1465. cleanupDominator = Builder.CreateUnreachable();
  1466. }
  1467. assert((allocSize == allocSizeWithoutCookie) ==
  1468. CalculateCookiePadding(*this, E).isZero());
  1469. if (allocSize != allocSizeWithoutCookie) {
  1470. assert(E->isArray());
  1471. allocation = CGM.getCXXABI().InitializeArrayCookie(*this, allocation,
  1472. numElements,
  1473. E, allocType);
  1474. }
  1475. llvm::Type *elementTy = ConvertTypeForMem(allocType);
  1476. Address result = Builder.CreateElementBitCast(allocation, elementTy);
  1477. // Passing pointer through launder.invariant.group to avoid propagation of
  1478. // vptrs information which may be included in previous type.
  1479. // To not break LTO with different optimizations levels, we do it regardless
  1480. // of optimization level.
  1481. if (CGM.getCodeGenOpts().StrictVTablePointers &&
  1482. allocator->isReservedGlobalPlacementOperator())
  1483. result = Builder.CreateLaunderInvariantGroup(result);
  1484. // Emit sanitizer checks for pointer value now, so that in the case of an
  1485. // array it was checked only once and not at each constructor call. We may
  1486. // have already checked that the pointer is non-null.
  1487. // FIXME: If we have an array cookie and a potentially-throwing allocator,
  1488. // we'll null check the wrong pointer here.
  1489. SanitizerSet SkippedChecks;
  1490. SkippedChecks.set(SanitizerKind::Null, nullCheck);
  1491. EmitTypeCheck(CodeGenFunction::TCK_ConstructorCall,
  1492. E->getAllocatedTypeSourceInfo()->getTypeLoc().getBeginLoc(),
  1493. result.getPointer(), allocType, result.getAlignment(),
  1494. SkippedChecks, numElements);
  1495. EmitNewInitializer(*this, E, allocType, elementTy, result, numElements,
  1496. allocSizeWithoutCookie);
  1497. llvm::Value *resultPtr = result.getPointer();
  1498. if (E->isArray()) {
  1499. // NewPtr is a pointer to the base element type. If we're
  1500. // allocating an array of arrays, we'll need to cast back to the
  1501. // array pointer type.
  1502. llvm::Type *resultType = ConvertTypeForMem(E->getType());
  1503. if (resultPtr->getType() != resultType)
  1504. resultPtr = Builder.CreateBitCast(resultPtr, resultType);
  1505. }
  1506. // Deactivate the 'operator delete' cleanup if we finished
  1507. // initialization.
  1508. if (operatorDeleteCleanup.isValid()) {
  1509. DeactivateCleanupBlock(operatorDeleteCleanup, cleanupDominator);
  1510. cleanupDominator->eraseFromParent();
  1511. }
  1512. if (nullCheck) {
  1513. conditional.end(*this);
  1514. llvm::BasicBlock *notNullBB = Builder.GetInsertBlock();
  1515. EmitBlock(contBB);
  1516. llvm::PHINode *PHI = Builder.CreatePHI(resultPtr->getType(), 2);
  1517. PHI->addIncoming(resultPtr, notNullBB);
  1518. PHI->addIncoming(llvm::Constant::getNullValue(resultPtr->getType()),
  1519. nullCheckBB);
  1520. resultPtr = PHI;
  1521. }
  1522. return resultPtr;
  1523. }
  1524. void CodeGenFunction::EmitDeleteCall(const FunctionDecl *DeleteFD,
  1525. llvm::Value *Ptr, QualType DeleteTy,
  1526. llvm::Value *NumElements,
  1527. CharUnits CookieSize) {
  1528. assert((!NumElements && CookieSize.isZero()) ||
  1529. DeleteFD->getOverloadedOperator() == OO_Array_Delete);
  1530. const auto *DeleteFTy = DeleteFD->getType()->castAs<FunctionProtoType>();
  1531. CallArgList DeleteArgs;
  1532. auto Params = getUsualDeleteParams(DeleteFD);
  1533. auto ParamTypeIt = DeleteFTy->param_type_begin();
  1534. // Pass the pointer itself.
  1535. QualType ArgTy = *ParamTypeIt++;
  1536. llvm::Value *DeletePtr = Builder.CreateBitCast(Ptr, ConvertType(ArgTy));
  1537. DeleteArgs.add(RValue::get(DeletePtr), ArgTy);
  1538. // Pass the std::destroying_delete tag if present.
  1539. llvm::AllocaInst *DestroyingDeleteTag = nullptr;
  1540. if (Params.DestroyingDelete) {
  1541. QualType DDTag = *ParamTypeIt++;
  1542. llvm::Type *Ty = getTypes().ConvertType(DDTag);
  1543. CharUnits Align = CGM.getNaturalTypeAlignment(DDTag);
  1544. DestroyingDeleteTag = CreateTempAlloca(Ty, "destroying.delete.tag");
  1545. DestroyingDeleteTag->setAlignment(Align.getAsAlign());
  1546. DeleteArgs.add(
  1547. RValue::getAggregate(Address(DestroyingDeleteTag, Ty, Align)), DDTag);
  1548. }
  1549. // Pass the size if the delete function has a size_t parameter.
  1550. if (Params.Size) {
  1551. QualType SizeType = *ParamTypeIt++;
  1552. CharUnits DeleteTypeSize = getContext().getTypeSizeInChars(DeleteTy);
  1553. llvm::Value *Size = llvm::ConstantInt::get(ConvertType(SizeType),
  1554. DeleteTypeSize.getQuantity());
  1555. // For array new, multiply by the number of elements.
  1556. if (NumElements)
  1557. Size = Builder.CreateMul(Size, NumElements);
  1558. // If there is a cookie, add the cookie size.
  1559. if (!CookieSize.isZero())
  1560. Size = Builder.CreateAdd(
  1561. Size, llvm::ConstantInt::get(SizeTy, CookieSize.getQuantity()));
  1562. DeleteArgs.add(RValue::get(Size), SizeType);
  1563. }
  1564. // Pass the alignment if the delete function has an align_val_t parameter.
  1565. if (Params.Alignment) {
  1566. QualType AlignValType = *ParamTypeIt++;
  1567. CharUnits DeleteTypeAlign =
  1568. getContext().toCharUnitsFromBits(getContext().getTypeAlignIfKnown(
  1569. DeleteTy, true /* NeedsPreferredAlignment */));
  1570. llvm::Value *Align = llvm::ConstantInt::get(ConvertType(AlignValType),
  1571. DeleteTypeAlign.getQuantity());
  1572. DeleteArgs.add(RValue::get(Align), AlignValType);
  1573. }
  1574. assert(ParamTypeIt == DeleteFTy->param_type_end() &&
  1575. "unknown parameter to usual delete function");
  1576. // Emit the call to delete.
  1577. EmitNewDeleteCall(*this, DeleteFD, DeleteFTy, DeleteArgs);
  1578. // If call argument lowering didn't use the destroying_delete_t alloca,
  1579. // remove it again.
  1580. if (DestroyingDeleteTag && DestroyingDeleteTag->use_empty())
  1581. DestroyingDeleteTag->eraseFromParent();
  1582. }
  1583. namespace {
  1584. /// Calls the given 'operator delete' on a single object.
  1585. struct CallObjectDelete final : EHScopeStack::Cleanup {
  1586. llvm::Value *Ptr;
  1587. const FunctionDecl *OperatorDelete;
  1588. QualType ElementType;
  1589. CallObjectDelete(llvm::Value *Ptr,
  1590. const FunctionDecl *OperatorDelete,
  1591. QualType ElementType)
  1592. : Ptr(Ptr), OperatorDelete(OperatorDelete), ElementType(ElementType) {}
  1593. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1594. CGF.EmitDeleteCall(OperatorDelete, Ptr, ElementType);
  1595. }
  1596. };
  1597. }
  1598. void
  1599. CodeGenFunction::pushCallObjectDeleteCleanup(const FunctionDecl *OperatorDelete,
  1600. llvm::Value *CompletePtr,
  1601. QualType ElementType) {
  1602. EHStack.pushCleanup<CallObjectDelete>(NormalAndEHCleanup, CompletePtr,
  1603. OperatorDelete, ElementType);
  1604. }
  1605. /// Emit the code for deleting a single object with a destroying operator
  1606. /// delete. If the element type has a non-virtual destructor, Ptr has already
  1607. /// been converted to the type of the parameter of 'operator delete'. Otherwise
  1608. /// Ptr points to an object of the static type.
  1609. static void EmitDestroyingObjectDelete(CodeGenFunction &CGF,
  1610. const CXXDeleteExpr *DE, Address Ptr,
  1611. QualType ElementType) {
  1612. auto *Dtor = ElementType->getAsCXXRecordDecl()->getDestructor();
  1613. if (Dtor && Dtor->isVirtual())
  1614. CGF.CGM.getCXXABI().emitVirtualObjectDelete(CGF, DE, Ptr, ElementType,
  1615. Dtor);
  1616. else
  1617. CGF.EmitDeleteCall(DE->getOperatorDelete(), Ptr.getPointer(), ElementType);
  1618. }
  1619. /// Emit the code for deleting a single object.
  1620. /// \return \c true if we started emitting UnconditionalDeleteBlock, \c false
  1621. /// if not.
  1622. static bool EmitObjectDelete(CodeGenFunction &CGF,
  1623. const CXXDeleteExpr *DE,
  1624. Address Ptr,
  1625. QualType ElementType,
  1626. llvm::BasicBlock *UnconditionalDeleteBlock) {
  1627. // C++11 [expr.delete]p3:
  1628. // If the static type of the object to be deleted is different from its
  1629. // dynamic type, the static type shall be a base class of the dynamic type
  1630. // of the object to be deleted and the static type shall have a virtual
  1631. // destructor or the behavior is undefined.
  1632. CGF.EmitTypeCheck(CodeGenFunction::TCK_MemberCall,
  1633. DE->getExprLoc(), Ptr.getPointer(),
  1634. ElementType);
  1635. const FunctionDecl *OperatorDelete = DE->getOperatorDelete();
  1636. assert(!OperatorDelete->isDestroyingOperatorDelete());
  1637. // Find the destructor for the type, if applicable. If the
  1638. // destructor is virtual, we'll just emit the vcall and return.
  1639. const CXXDestructorDecl *Dtor = nullptr;
  1640. if (const RecordType *RT = ElementType->getAs<RecordType>()) {
  1641. CXXRecordDecl *RD = cast<CXXRecordDecl>(RT->getDecl());
  1642. if (RD->hasDefinition() && !RD->hasTrivialDestructor()) {
  1643. Dtor = RD->getDestructor();
  1644. if (Dtor->isVirtual()) {
  1645. bool UseVirtualCall = true;
  1646. const Expr *Base = DE->getArgument();
  1647. if (auto *DevirtualizedDtor =
  1648. dyn_cast_or_null<const CXXDestructorDecl>(
  1649. Dtor->getDevirtualizedMethod(
  1650. Base, CGF.CGM.getLangOpts().AppleKext))) {
  1651. UseVirtualCall = false;
  1652. const CXXRecordDecl *DevirtualizedClass =
  1653. DevirtualizedDtor->getParent();
  1654. if (declaresSameEntity(getCXXRecord(Base), DevirtualizedClass)) {
  1655. // Devirtualized to the class of the base type (the type of the
  1656. // whole expression).
  1657. Dtor = DevirtualizedDtor;
  1658. } else {
  1659. // Devirtualized to some other type. Would need to cast the this
  1660. // pointer to that type but we don't have support for that yet, so
  1661. // do a virtual call. FIXME: handle the case where it is
  1662. // devirtualized to the derived type (the type of the inner
  1663. // expression) as in EmitCXXMemberOrOperatorMemberCallExpr.
  1664. UseVirtualCall = true;
  1665. }
  1666. }
  1667. if (UseVirtualCall) {
  1668. CGF.CGM.getCXXABI().emitVirtualObjectDelete(CGF, DE, Ptr, ElementType,
  1669. Dtor);
  1670. return false;
  1671. }
  1672. }
  1673. }
  1674. }
  1675. // Make sure that we call delete even if the dtor throws.
  1676. // This doesn't have to a conditional cleanup because we're going
  1677. // to pop it off in a second.
  1678. CGF.EHStack.pushCleanup<CallObjectDelete>(NormalAndEHCleanup,
  1679. Ptr.getPointer(),
  1680. OperatorDelete, ElementType);
  1681. if (Dtor)
  1682. CGF.EmitCXXDestructorCall(Dtor, Dtor_Complete,
  1683. /*ForVirtualBase=*/false,
  1684. /*Delegating=*/false,
  1685. Ptr, ElementType);
  1686. else if (auto Lifetime = ElementType.getObjCLifetime()) {
  1687. switch (Lifetime) {
  1688. case Qualifiers::OCL_None:
  1689. case Qualifiers::OCL_ExplicitNone:
  1690. case Qualifiers::OCL_Autoreleasing:
  1691. break;
  1692. case Qualifiers::OCL_Strong:
  1693. CGF.EmitARCDestroyStrong(Ptr, ARCPreciseLifetime);
  1694. break;
  1695. case Qualifiers::OCL_Weak:
  1696. CGF.EmitARCDestroyWeak(Ptr);
  1697. break;
  1698. }
  1699. }
  1700. // When optimizing for size, call 'operator delete' unconditionally.
  1701. if (CGF.CGM.getCodeGenOpts().OptimizeSize > 1) {
  1702. CGF.EmitBlock(UnconditionalDeleteBlock);
  1703. CGF.PopCleanupBlock();
  1704. return true;
  1705. }
  1706. CGF.PopCleanupBlock();
  1707. return false;
  1708. }
  1709. namespace {
  1710. /// Calls the given 'operator delete' on an array of objects.
  1711. struct CallArrayDelete final : EHScopeStack::Cleanup {
  1712. llvm::Value *Ptr;
  1713. const FunctionDecl *OperatorDelete;
  1714. llvm::Value *NumElements;
  1715. QualType ElementType;
  1716. CharUnits CookieSize;
  1717. CallArrayDelete(llvm::Value *Ptr,
  1718. const FunctionDecl *OperatorDelete,
  1719. llvm::Value *NumElements,
  1720. QualType ElementType,
  1721. CharUnits CookieSize)
  1722. : Ptr(Ptr), OperatorDelete(OperatorDelete), NumElements(NumElements),
  1723. ElementType(ElementType), CookieSize(CookieSize) {}
  1724. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1725. CGF.EmitDeleteCall(OperatorDelete, Ptr, ElementType, NumElements,
  1726. CookieSize);
  1727. }
  1728. };
  1729. }
  1730. /// Emit the code for deleting an array of objects.
  1731. static void EmitArrayDelete(CodeGenFunction &CGF,
  1732. const CXXDeleteExpr *E,
  1733. Address deletedPtr,
  1734. QualType elementType) {
  1735. llvm::Value *numElements = nullptr;
  1736. llvm::Value *allocatedPtr = nullptr;
  1737. CharUnits cookieSize;
  1738. CGF.CGM.getCXXABI().ReadArrayCookie(CGF, deletedPtr, E, elementType,
  1739. numElements, allocatedPtr, cookieSize);
  1740. assert(allocatedPtr && "ReadArrayCookie didn't set allocated pointer");
  1741. // Make sure that we call delete even if one of the dtors throws.
  1742. const FunctionDecl *operatorDelete = E->getOperatorDelete();
  1743. CGF.EHStack.pushCleanup<CallArrayDelete>(NormalAndEHCleanup,
  1744. allocatedPtr, operatorDelete,
  1745. numElements, elementType,
  1746. cookieSize);
  1747. // Destroy the elements.
  1748. if (QualType::DestructionKind dtorKind = elementType.isDestructedType()) {
  1749. assert(numElements && "no element count for a type with a destructor!");
  1750. CharUnits elementSize = CGF.getContext().getTypeSizeInChars(elementType);
  1751. CharUnits elementAlign =
  1752. deletedPtr.getAlignment().alignmentOfArrayElement(elementSize);
  1753. llvm::Value *arrayBegin = deletedPtr.getPointer();
  1754. llvm::Value *arrayEnd = CGF.Builder.CreateInBoundsGEP(
  1755. deletedPtr.getElementType(), arrayBegin, numElements, "delete.end");
  1756. // Note that it is legal to allocate a zero-length array, and we
  1757. // can never fold the check away because the length should always
  1758. // come from a cookie.
  1759. CGF.emitArrayDestroy(arrayBegin, arrayEnd, elementType, elementAlign,
  1760. CGF.getDestroyer(dtorKind),
  1761. /*checkZeroLength*/ true,
  1762. CGF.needsEHCleanup(dtorKind));
  1763. }
  1764. // Pop the cleanup block.
  1765. CGF.PopCleanupBlock();
  1766. }
  1767. void CodeGenFunction::EmitCXXDeleteExpr(const CXXDeleteExpr *E) {
  1768. const Expr *Arg = E->getArgument();
  1769. Address Ptr = EmitPointerWithAlignment(Arg);
  1770. // Null check the pointer.
  1771. //
  1772. // We could avoid this null check if we can determine that the object
  1773. // destruction is trivial and doesn't require an array cookie; we can
  1774. // unconditionally perform the operator delete call in that case. For now, we
  1775. // assume that deleted pointers are null rarely enough that it's better to
  1776. // keep the branch. This might be worth revisiting for a -O0 code size win.
  1777. llvm::BasicBlock *DeleteNotNull = createBasicBlock("delete.notnull");
  1778. llvm::BasicBlock *DeleteEnd = createBasicBlock("delete.end");
  1779. llvm::Value *IsNull = Builder.CreateIsNull(Ptr.getPointer(), "isnull");
  1780. Builder.CreateCondBr(IsNull, DeleteEnd, DeleteNotNull);
  1781. EmitBlock(DeleteNotNull);
  1782. QualType DeleteTy = E->getDestroyedType();
  1783. // A destroying operator delete overrides the entire operation of the
  1784. // delete expression.
  1785. if (E->getOperatorDelete()->isDestroyingOperatorDelete()) {
  1786. EmitDestroyingObjectDelete(*this, E, Ptr, DeleteTy);
  1787. EmitBlock(DeleteEnd);
  1788. return;
  1789. }
  1790. // We might be deleting a pointer to array. If so, GEP down to the
  1791. // first non-array element.
  1792. // (this assumes that A(*)[3][7] is converted to [3 x [7 x %A]]*)
  1793. if (DeleteTy->isConstantArrayType()) {
  1794. llvm::Value *Zero = Builder.getInt32(0);
  1795. SmallVector<llvm::Value*,8> GEP;
  1796. GEP.push_back(Zero); // point at the outermost array
  1797. // For each layer of array type we're pointing at:
  1798. while (const ConstantArrayType *Arr
  1799. = getContext().getAsConstantArrayType(DeleteTy)) {
  1800. // 1. Unpeel the array type.
  1801. DeleteTy = Arr->getElementType();
  1802. // 2. GEP to the first element of the array.
  1803. GEP.push_back(Zero);
  1804. }
  1805. Ptr = Address(Builder.CreateInBoundsGEP(Ptr.getElementType(),
  1806. Ptr.getPointer(), GEP, "del.first"),
  1807. ConvertTypeForMem(DeleteTy), Ptr.getAlignment());
  1808. }
  1809. assert(ConvertTypeForMem(DeleteTy) == Ptr.getElementType());
  1810. if (E->isArrayForm()) {
  1811. EmitArrayDelete(*this, E, Ptr, DeleteTy);
  1812. EmitBlock(DeleteEnd);
  1813. } else {
  1814. if (!EmitObjectDelete(*this, E, Ptr, DeleteTy, DeleteEnd))
  1815. EmitBlock(DeleteEnd);
  1816. }
  1817. }
  1818. static bool isGLValueFromPointerDeref(const Expr *E) {
  1819. E = E->IgnoreParens();
  1820. if (const auto *CE = dyn_cast<CastExpr>(E)) {
  1821. if (!CE->getSubExpr()->isGLValue())
  1822. return false;
  1823. return isGLValueFromPointerDeref(CE->getSubExpr());
  1824. }
  1825. if (const auto *OVE = dyn_cast<OpaqueValueExpr>(E))
  1826. return isGLValueFromPointerDeref(OVE->getSourceExpr());
  1827. if (const auto *BO = dyn_cast<BinaryOperator>(E))
  1828. if (BO->getOpcode() == BO_Comma)
  1829. return isGLValueFromPointerDeref(BO->getRHS());
  1830. if (const auto *ACO = dyn_cast<AbstractConditionalOperator>(E))
  1831. return isGLValueFromPointerDeref(ACO->getTrueExpr()) ||
  1832. isGLValueFromPointerDeref(ACO->getFalseExpr());
  1833. // C++11 [expr.sub]p1:
  1834. // The expression E1[E2] is identical (by definition) to *((E1)+(E2))
  1835. if (isa<ArraySubscriptExpr>(E))
  1836. return true;
  1837. if (const auto *UO = dyn_cast<UnaryOperator>(E))
  1838. if (UO->getOpcode() == UO_Deref)
  1839. return true;
  1840. return false;
  1841. }
  1842. static llvm::Value *EmitTypeidFromVTable(CodeGenFunction &CGF, const Expr *E,
  1843. llvm::Type *StdTypeInfoPtrTy) {
  1844. // Get the vtable pointer.
  1845. Address ThisPtr = CGF.EmitLValue(E).getAddress(CGF);
  1846. QualType SrcRecordTy = E->getType();
  1847. // C++ [class.cdtor]p4:
  1848. // If the operand of typeid refers to the object under construction or
  1849. // destruction and the static type of the operand is neither the constructor
  1850. // or destructor’s class nor one of its bases, the behavior is undefined.
  1851. CGF.EmitTypeCheck(CodeGenFunction::TCK_DynamicOperation, E->getExprLoc(),
  1852. ThisPtr.getPointer(), SrcRecordTy);
  1853. // C++ [expr.typeid]p2:
  1854. // If the glvalue expression is obtained by applying the unary * operator to
  1855. // a pointer and the pointer is a null pointer value, the typeid expression
  1856. // throws the std::bad_typeid exception.
  1857. //
  1858. // However, this paragraph's intent is not clear. We choose a very generous
  1859. // interpretation which implores us to consider comma operators, conditional
  1860. // operators, parentheses and other such constructs.
  1861. if (CGF.CGM.getCXXABI().shouldTypeidBeNullChecked(
  1862. isGLValueFromPointerDeref(E), SrcRecordTy)) {
  1863. llvm::BasicBlock *BadTypeidBlock =
  1864. CGF.createBasicBlock("typeid.bad_typeid");
  1865. llvm::BasicBlock *EndBlock = CGF.createBasicBlock("typeid.end");
  1866. llvm::Value *IsNull = CGF.Builder.CreateIsNull(ThisPtr.getPointer());
  1867. CGF.Builder.CreateCondBr(IsNull, BadTypeidBlock, EndBlock);
  1868. CGF.EmitBlock(BadTypeidBlock);
  1869. CGF.CGM.getCXXABI().EmitBadTypeidCall(CGF);
  1870. CGF.EmitBlock(EndBlock);
  1871. }
  1872. return CGF.CGM.getCXXABI().EmitTypeid(CGF, SrcRecordTy, ThisPtr,
  1873. StdTypeInfoPtrTy);
  1874. }
  1875. llvm::Value *CodeGenFunction::EmitCXXTypeidExpr(const CXXTypeidExpr *E) {
  1876. llvm::Type *StdTypeInfoPtrTy =
  1877. ConvertType(E->getType())->getPointerTo();
  1878. if (E->isTypeOperand()) {
  1879. llvm::Constant *TypeInfo =
  1880. CGM.GetAddrOfRTTIDescriptor(E->getTypeOperand(getContext()));
  1881. return Builder.CreateBitCast(TypeInfo, StdTypeInfoPtrTy);
  1882. }
  1883. // C++ [expr.typeid]p2:
  1884. // When typeid is applied to a glvalue expression whose type is a
  1885. // polymorphic class type, the result refers to a std::type_info object
  1886. // representing the type of the most derived object (that is, the dynamic
  1887. // type) to which the glvalue refers.
  1888. // If the operand is already most derived object, no need to look up vtable.
  1889. if (E->isPotentiallyEvaluated() && !E->isMostDerived(getContext()))
  1890. return EmitTypeidFromVTable(*this, E->getExprOperand(),
  1891. StdTypeInfoPtrTy);
  1892. QualType OperandTy = E->getExprOperand()->getType();
  1893. return Builder.CreateBitCast(CGM.GetAddrOfRTTIDescriptor(OperandTy),
  1894. StdTypeInfoPtrTy);
  1895. }
  1896. static llvm::Value *EmitDynamicCastToNull(CodeGenFunction &CGF,
  1897. QualType DestTy) {
  1898. llvm::Type *DestLTy = CGF.ConvertType(DestTy);
  1899. if (DestTy->isPointerType())
  1900. return llvm::Constant::getNullValue(DestLTy);
  1901. /// C++ [expr.dynamic.cast]p9:
  1902. /// A failed cast to reference type throws std::bad_cast
  1903. if (!CGF.CGM.getCXXABI().EmitBadCastCall(CGF))
  1904. return nullptr;
  1905. CGF.EmitBlock(CGF.createBasicBlock("dynamic_cast.end"));
  1906. return llvm::UndefValue::get(DestLTy);
  1907. }
  1908. llvm::Value *CodeGenFunction::EmitDynamicCast(Address ThisAddr,
  1909. const CXXDynamicCastExpr *DCE) {
  1910. CGM.EmitExplicitCastExprType(DCE, this);
  1911. QualType DestTy = DCE->getTypeAsWritten();
  1912. QualType SrcTy = DCE->getSubExpr()->getType();
  1913. // C++ [expr.dynamic.cast]p7:
  1914. // If T is "pointer to cv void," then the result is a pointer to the most
  1915. // derived object pointed to by v.
  1916. const PointerType *DestPTy = DestTy->getAs<PointerType>();
  1917. bool isDynamicCastToVoid;
  1918. QualType SrcRecordTy;
  1919. QualType DestRecordTy;
  1920. if (DestPTy) {
  1921. isDynamicCastToVoid = DestPTy->getPointeeType()->isVoidType();
  1922. SrcRecordTy = SrcTy->castAs<PointerType>()->getPointeeType();
  1923. DestRecordTy = DestPTy->getPointeeType();
  1924. } else {
  1925. isDynamicCastToVoid = false;
  1926. SrcRecordTy = SrcTy;
  1927. DestRecordTy = DestTy->castAs<ReferenceType>()->getPointeeType();
  1928. }
  1929. // C++ [class.cdtor]p5:
  1930. // If the operand of the dynamic_cast refers to the object under
  1931. // construction or destruction and the static type of the operand is not a
  1932. // pointer to or object of the constructor or destructor’s own class or one
  1933. // of its bases, the dynamic_cast results in undefined behavior.
  1934. EmitTypeCheck(TCK_DynamicOperation, DCE->getExprLoc(), ThisAddr.getPointer(),
  1935. SrcRecordTy);
  1936. if (DCE->isAlwaysNull())
  1937. if (llvm::Value *T = EmitDynamicCastToNull(*this, DestTy))
  1938. return T;
  1939. assert(SrcRecordTy->isRecordType() && "source type must be a record type!");
  1940. // C++ [expr.dynamic.cast]p4:
  1941. // If the value of v is a null pointer value in the pointer case, the result
  1942. // is the null pointer value of type T.
  1943. bool ShouldNullCheckSrcValue =
  1944. CGM.getCXXABI().shouldDynamicCastCallBeNullChecked(SrcTy->isPointerType(),
  1945. SrcRecordTy);
  1946. llvm::BasicBlock *CastNull = nullptr;
  1947. llvm::BasicBlock *CastNotNull = nullptr;
  1948. llvm::BasicBlock *CastEnd = createBasicBlock("dynamic_cast.end");
  1949. if (ShouldNullCheckSrcValue) {
  1950. CastNull = createBasicBlock("dynamic_cast.null");
  1951. CastNotNull = createBasicBlock("dynamic_cast.notnull");
  1952. llvm::Value *IsNull = Builder.CreateIsNull(ThisAddr.getPointer());
  1953. Builder.CreateCondBr(IsNull, CastNull, CastNotNull);
  1954. EmitBlock(CastNotNull);
  1955. }
  1956. llvm::Value *Value;
  1957. if (isDynamicCastToVoid) {
  1958. Value = CGM.getCXXABI().EmitDynamicCastToVoid(*this, ThisAddr, SrcRecordTy,
  1959. DestTy);
  1960. } else {
  1961. assert(DestRecordTy->isRecordType() &&
  1962. "destination type must be a record type!");
  1963. Value = CGM.getCXXABI().EmitDynamicCastCall(*this, ThisAddr, SrcRecordTy,
  1964. DestTy, DestRecordTy, CastEnd);
  1965. CastNotNull = Builder.GetInsertBlock();
  1966. }
  1967. if (ShouldNullCheckSrcValue) {
  1968. EmitBranch(CastEnd);
  1969. EmitBlock(CastNull);
  1970. EmitBranch(CastEnd);
  1971. }
  1972. EmitBlock(CastEnd);
  1973. if (ShouldNullCheckSrcValue) {
  1974. llvm::PHINode *PHI = Builder.CreatePHI(Value->getType(), 2);
  1975. PHI->addIncoming(Value, CastNotNull);
  1976. PHI->addIncoming(llvm::Constant::getNullValue(Value->getType()), CastNull);
  1977. Value = PHI;
  1978. }
  1979. return Value;
  1980. }