CGObjC.cpp 156 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106
  1. //===---- CGObjC.cpp - Emit LLVM Code for Objective-C ---------------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This contains code to emit Objective-C code as LLVM code.
  10. //
  11. //===----------------------------------------------------------------------===//
  12. #include "CGDebugInfo.h"
  13. #include "CGObjCRuntime.h"
  14. #include "CodeGenFunction.h"
  15. #include "CodeGenModule.h"
  16. #include "ConstantEmitter.h"
  17. #include "TargetInfo.h"
  18. #include "clang/AST/ASTContext.h"
  19. #include "clang/AST/Attr.h"
  20. #include "clang/AST/DeclObjC.h"
  21. #include "clang/AST/StmtObjC.h"
  22. #include "clang/Basic/Diagnostic.h"
  23. #include "clang/CodeGen/CGFunctionInfo.h"
  24. #include "clang/CodeGen/CodeGenABITypes.h"
  25. #include "llvm/ADT/STLExtras.h"
  26. #include "llvm/Analysis/ObjCARCUtil.h"
  27. #include "llvm/BinaryFormat/MachO.h"
  28. #include "llvm/IR/Constants.h"
  29. #include "llvm/IR/DataLayout.h"
  30. #include "llvm/IR/InlineAsm.h"
  31. #include <optional>
  32. using namespace clang;
  33. using namespace CodeGen;
  34. typedef llvm::PointerIntPair<llvm::Value*,1,bool> TryEmitResult;
  35. static TryEmitResult
  36. tryEmitARCRetainScalarExpr(CodeGenFunction &CGF, const Expr *e);
  37. static RValue AdjustObjCObjectType(CodeGenFunction &CGF,
  38. QualType ET,
  39. RValue Result);
  40. /// Given the address of a variable of pointer type, find the correct
  41. /// null to store into it.
  42. static llvm::Constant *getNullForVariable(Address addr) {
  43. llvm::Type *type = addr.getElementType();
  44. return llvm::ConstantPointerNull::get(cast<llvm::PointerType>(type));
  45. }
  46. /// Emits an instance of NSConstantString representing the object.
  47. llvm::Value *CodeGenFunction::EmitObjCStringLiteral(const ObjCStringLiteral *E)
  48. {
  49. llvm::Constant *C =
  50. CGM.getObjCRuntime().GenerateConstantString(E->getString()).getPointer();
  51. // FIXME: This bitcast should just be made an invariant on the Runtime.
  52. return llvm::ConstantExpr::getBitCast(C, ConvertType(E->getType()));
  53. }
  54. /// EmitObjCBoxedExpr - This routine generates code to call
  55. /// the appropriate expression boxing method. This will either be
  56. /// one of +[NSNumber numberWith<Type>:], or +[NSString stringWithUTF8String:],
  57. /// or [NSValue valueWithBytes:objCType:].
  58. ///
  59. llvm::Value *
  60. CodeGenFunction::EmitObjCBoxedExpr(const ObjCBoxedExpr *E) {
  61. // Generate the correct selector for this literal's concrete type.
  62. // Get the method.
  63. const ObjCMethodDecl *BoxingMethod = E->getBoxingMethod();
  64. const Expr *SubExpr = E->getSubExpr();
  65. if (E->isExpressibleAsConstantInitializer()) {
  66. ConstantEmitter ConstEmitter(CGM);
  67. return ConstEmitter.tryEmitAbstract(E, E->getType());
  68. }
  69. assert(BoxingMethod->isClassMethod() && "BoxingMethod must be a class method");
  70. Selector Sel = BoxingMethod->getSelector();
  71. // Generate a reference to the class pointer, which will be the receiver.
  72. // Assumes that the method was introduced in the class that should be
  73. // messaged (avoids pulling it out of the result type).
  74. CGObjCRuntime &Runtime = CGM.getObjCRuntime();
  75. const ObjCInterfaceDecl *ClassDecl = BoxingMethod->getClassInterface();
  76. llvm::Value *Receiver = Runtime.GetClass(*this, ClassDecl);
  77. CallArgList Args;
  78. const ParmVarDecl *ArgDecl = *BoxingMethod->param_begin();
  79. QualType ArgQT = ArgDecl->getType().getUnqualifiedType();
  80. // ObjCBoxedExpr supports boxing of structs and unions
  81. // via [NSValue valueWithBytes:objCType:]
  82. const QualType ValueType(SubExpr->getType().getCanonicalType());
  83. if (ValueType->isObjCBoxableRecordType()) {
  84. // Emit CodeGen for first parameter
  85. // and cast value to correct type
  86. Address Temporary = CreateMemTemp(SubExpr->getType());
  87. EmitAnyExprToMem(SubExpr, Temporary, Qualifiers(), /*isInit*/ true);
  88. llvm::Value *BitCast =
  89. Builder.CreateBitCast(Temporary.getPointer(), ConvertType(ArgQT));
  90. Args.add(RValue::get(BitCast), ArgQT);
  91. // Create char array to store type encoding
  92. std::string Str;
  93. getContext().getObjCEncodingForType(ValueType, Str);
  94. llvm::Constant *GV = CGM.GetAddrOfConstantCString(Str).getPointer();
  95. // Cast type encoding to correct type
  96. const ParmVarDecl *EncodingDecl = BoxingMethod->parameters()[1];
  97. QualType EncodingQT = EncodingDecl->getType().getUnqualifiedType();
  98. llvm::Value *Cast = Builder.CreateBitCast(GV, ConvertType(EncodingQT));
  99. Args.add(RValue::get(Cast), EncodingQT);
  100. } else {
  101. Args.add(EmitAnyExpr(SubExpr), ArgQT);
  102. }
  103. RValue result = Runtime.GenerateMessageSend(
  104. *this, ReturnValueSlot(), BoxingMethod->getReturnType(), Sel, Receiver,
  105. Args, ClassDecl, BoxingMethod);
  106. return Builder.CreateBitCast(result.getScalarVal(),
  107. ConvertType(E->getType()));
  108. }
  109. llvm::Value *CodeGenFunction::EmitObjCCollectionLiteral(const Expr *E,
  110. const ObjCMethodDecl *MethodWithObjects) {
  111. ASTContext &Context = CGM.getContext();
  112. const ObjCDictionaryLiteral *DLE = nullptr;
  113. const ObjCArrayLiteral *ALE = dyn_cast<ObjCArrayLiteral>(E);
  114. if (!ALE)
  115. DLE = cast<ObjCDictionaryLiteral>(E);
  116. // Optimize empty collections by referencing constants, when available.
  117. uint64_t NumElements =
  118. ALE ? ALE->getNumElements() : DLE->getNumElements();
  119. if (NumElements == 0 && CGM.getLangOpts().ObjCRuntime.hasEmptyCollections()) {
  120. StringRef ConstantName = ALE ? "__NSArray0__" : "__NSDictionary0__";
  121. QualType IdTy(CGM.getContext().getObjCIdType());
  122. llvm::Constant *Constant =
  123. CGM.CreateRuntimeVariable(ConvertType(IdTy), ConstantName);
  124. LValue LV = MakeNaturalAlignAddrLValue(Constant, IdTy);
  125. llvm::Value *Ptr = EmitLoadOfScalar(LV, E->getBeginLoc());
  126. cast<llvm::LoadInst>(Ptr)->setMetadata(
  127. CGM.getModule().getMDKindID("invariant.load"),
  128. llvm::MDNode::get(getLLVMContext(), std::nullopt));
  129. return Builder.CreateBitCast(Ptr, ConvertType(E->getType()));
  130. }
  131. // Compute the type of the array we're initializing.
  132. llvm::APInt APNumElements(Context.getTypeSize(Context.getSizeType()),
  133. NumElements);
  134. QualType ElementType = Context.getObjCIdType().withConst();
  135. QualType ElementArrayType
  136. = Context.getConstantArrayType(ElementType, APNumElements, nullptr,
  137. ArrayType::Normal, /*IndexTypeQuals=*/0);
  138. // Allocate the temporary array(s).
  139. Address Objects = CreateMemTemp(ElementArrayType, "objects");
  140. Address Keys = Address::invalid();
  141. if (DLE)
  142. Keys = CreateMemTemp(ElementArrayType, "keys");
  143. // In ARC, we may need to do extra work to keep all the keys and
  144. // values alive until after the call.
  145. SmallVector<llvm::Value *, 16> NeededObjects;
  146. bool TrackNeededObjects =
  147. (getLangOpts().ObjCAutoRefCount &&
  148. CGM.getCodeGenOpts().OptimizationLevel != 0);
  149. // Perform the actual initialialization of the array(s).
  150. for (uint64_t i = 0; i < NumElements; i++) {
  151. if (ALE) {
  152. // Emit the element and store it to the appropriate array slot.
  153. const Expr *Rhs = ALE->getElement(i);
  154. LValue LV = MakeAddrLValue(Builder.CreateConstArrayGEP(Objects, i),
  155. ElementType, AlignmentSource::Decl);
  156. llvm::Value *value = EmitScalarExpr(Rhs);
  157. EmitStoreThroughLValue(RValue::get(value), LV, true);
  158. if (TrackNeededObjects) {
  159. NeededObjects.push_back(value);
  160. }
  161. } else {
  162. // Emit the key and store it to the appropriate array slot.
  163. const Expr *Key = DLE->getKeyValueElement(i).Key;
  164. LValue KeyLV = MakeAddrLValue(Builder.CreateConstArrayGEP(Keys, i),
  165. ElementType, AlignmentSource::Decl);
  166. llvm::Value *keyValue = EmitScalarExpr(Key);
  167. EmitStoreThroughLValue(RValue::get(keyValue), KeyLV, /*isInit=*/true);
  168. // Emit the value and store it to the appropriate array slot.
  169. const Expr *Value = DLE->getKeyValueElement(i).Value;
  170. LValue ValueLV = MakeAddrLValue(Builder.CreateConstArrayGEP(Objects, i),
  171. ElementType, AlignmentSource::Decl);
  172. llvm::Value *valueValue = EmitScalarExpr(Value);
  173. EmitStoreThroughLValue(RValue::get(valueValue), ValueLV, /*isInit=*/true);
  174. if (TrackNeededObjects) {
  175. NeededObjects.push_back(keyValue);
  176. NeededObjects.push_back(valueValue);
  177. }
  178. }
  179. }
  180. // Generate the argument list.
  181. CallArgList Args;
  182. ObjCMethodDecl::param_const_iterator PI = MethodWithObjects->param_begin();
  183. const ParmVarDecl *argDecl = *PI++;
  184. QualType ArgQT = argDecl->getType().getUnqualifiedType();
  185. Args.add(RValue::get(Objects.getPointer()), ArgQT);
  186. if (DLE) {
  187. argDecl = *PI++;
  188. ArgQT = argDecl->getType().getUnqualifiedType();
  189. Args.add(RValue::get(Keys.getPointer()), ArgQT);
  190. }
  191. argDecl = *PI;
  192. ArgQT = argDecl->getType().getUnqualifiedType();
  193. llvm::Value *Count =
  194. llvm::ConstantInt::get(CGM.getTypes().ConvertType(ArgQT), NumElements);
  195. Args.add(RValue::get(Count), ArgQT);
  196. // Generate a reference to the class pointer, which will be the receiver.
  197. Selector Sel = MethodWithObjects->getSelector();
  198. QualType ResultType = E->getType();
  199. const ObjCObjectPointerType *InterfacePointerType
  200. = ResultType->getAsObjCInterfacePointerType();
  201. ObjCInterfaceDecl *Class
  202. = InterfacePointerType->getObjectType()->getInterface();
  203. CGObjCRuntime &Runtime = CGM.getObjCRuntime();
  204. llvm::Value *Receiver = Runtime.GetClass(*this, Class);
  205. // Generate the message send.
  206. RValue result = Runtime.GenerateMessageSend(
  207. *this, ReturnValueSlot(), MethodWithObjects->getReturnType(), Sel,
  208. Receiver, Args, Class, MethodWithObjects);
  209. // The above message send needs these objects, but in ARC they are
  210. // passed in a buffer that is essentially __unsafe_unretained.
  211. // Therefore we must prevent the optimizer from releasing them until
  212. // after the call.
  213. if (TrackNeededObjects) {
  214. EmitARCIntrinsicUse(NeededObjects);
  215. }
  216. return Builder.CreateBitCast(result.getScalarVal(),
  217. ConvertType(E->getType()));
  218. }
  219. llvm::Value *CodeGenFunction::EmitObjCArrayLiteral(const ObjCArrayLiteral *E) {
  220. return EmitObjCCollectionLiteral(E, E->getArrayWithObjectsMethod());
  221. }
  222. llvm::Value *CodeGenFunction::EmitObjCDictionaryLiteral(
  223. const ObjCDictionaryLiteral *E) {
  224. return EmitObjCCollectionLiteral(E, E->getDictWithObjectsMethod());
  225. }
  226. /// Emit a selector.
  227. llvm::Value *CodeGenFunction::EmitObjCSelectorExpr(const ObjCSelectorExpr *E) {
  228. // Untyped selector.
  229. // Note that this implementation allows for non-constant strings to be passed
  230. // as arguments to @selector(). Currently, the only thing preventing this
  231. // behaviour is the type checking in the front end.
  232. return CGM.getObjCRuntime().GetSelector(*this, E->getSelector());
  233. }
  234. llvm::Value *CodeGenFunction::EmitObjCProtocolExpr(const ObjCProtocolExpr *E) {
  235. // FIXME: This should pass the Decl not the name.
  236. return CGM.getObjCRuntime().GenerateProtocolRef(*this, E->getProtocol());
  237. }
  238. /// Adjust the type of an Objective-C object that doesn't match up due
  239. /// to type erasure at various points, e.g., related result types or the use
  240. /// of parameterized classes.
  241. static RValue AdjustObjCObjectType(CodeGenFunction &CGF, QualType ExpT,
  242. RValue Result) {
  243. if (!ExpT->isObjCRetainableType())
  244. return Result;
  245. // If the converted types are the same, we're done.
  246. llvm::Type *ExpLLVMTy = CGF.ConvertType(ExpT);
  247. if (ExpLLVMTy == Result.getScalarVal()->getType())
  248. return Result;
  249. // We have applied a substitution. Cast the rvalue appropriately.
  250. return RValue::get(CGF.Builder.CreateBitCast(Result.getScalarVal(),
  251. ExpLLVMTy));
  252. }
  253. /// Decide whether to extend the lifetime of the receiver of a
  254. /// returns-inner-pointer message.
  255. static bool
  256. shouldExtendReceiverForInnerPointerMessage(const ObjCMessageExpr *message) {
  257. switch (message->getReceiverKind()) {
  258. // For a normal instance message, we should extend unless the
  259. // receiver is loaded from a variable with precise lifetime.
  260. case ObjCMessageExpr::Instance: {
  261. const Expr *receiver = message->getInstanceReceiver();
  262. // Look through OVEs.
  263. if (auto opaque = dyn_cast<OpaqueValueExpr>(receiver)) {
  264. if (opaque->getSourceExpr())
  265. receiver = opaque->getSourceExpr()->IgnoreParens();
  266. }
  267. const ImplicitCastExpr *ice = dyn_cast<ImplicitCastExpr>(receiver);
  268. if (!ice || ice->getCastKind() != CK_LValueToRValue) return true;
  269. receiver = ice->getSubExpr()->IgnoreParens();
  270. // Look through OVEs.
  271. if (auto opaque = dyn_cast<OpaqueValueExpr>(receiver)) {
  272. if (opaque->getSourceExpr())
  273. receiver = opaque->getSourceExpr()->IgnoreParens();
  274. }
  275. // Only __strong variables.
  276. if (receiver->getType().getObjCLifetime() != Qualifiers::OCL_Strong)
  277. return true;
  278. // All ivars and fields have precise lifetime.
  279. if (isa<MemberExpr>(receiver) || isa<ObjCIvarRefExpr>(receiver))
  280. return false;
  281. // Otherwise, check for variables.
  282. const DeclRefExpr *declRef = dyn_cast<DeclRefExpr>(ice->getSubExpr());
  283. if (!declRef) return true;
  284. const VarDecl *var = dyn_cast<VarDecl>(declRef->getDecl());
  285. if (!var) return true;
  286. // All variables have precise lifetime except local variables with
  287. // automatic storage duration that aren't specially marked.
  288. return (var->hasLocalStorage() &&
  289. !var->hasAttr<ObjCPreciseLifetimeAttr>());
  290. }
  291. case ObjCMessageExpr::Class:
  292. case ObjCMessageExpr::SuperClass:
  293. // It's never necessary for class objects.
  294. return false;
  295. case ObjCMessageExpr::SuperInstance:
  296. // We generally assume that 'self' lives throughout a method call.
  297. return false;
  298. }
  299. llvm_unreachable("invalid receiver kind");
  300. }
  301. /// Given an expression of ObjC pointer type, check whether it was
  302. /// immediately loaded from an ARC __weak l-value.
  303. static const Expr *findWeakLValue(const Expr *E) {
  304. assert(E->getType()->isObjCRetainableType());
  305. E = E->IgnoreParens();
  306. if (auto CE = dyn_cast<CastExpr>(E)) {
  307. if (CE->getCastKind() == CK_LValueToRValue) {
  308. if (CE->getSubExpr()->getType().getObjCLifetime() == Qualifiers::OCL_Weak)
  309. return CE->getSubExpr();
  310. }
  311. }
  312. return nullptr;
  313. }
  314. /// The ObjC runtime may provide entrypoints that are likely to be faster
  315. /// than an ordinary message send of the appropriate selector.
  316. ///
  317. /// The entrypoints are guaranteed to be equivalent to just sending the
  318. /// corresponding message. If the entrypoint is implemented naively as just a
  319. /// message send, using it is a trade-off: it sacrifices a few cycles of
  320. /// overhead to save a small amount of code. However, it's possible for
  321. /// runtimes to detect and special-case classes that use "standard"
  322. /// behavior; if that's dynamically a large proportion of all objects, using
  323. /// the entrypoint will also be faster than using a message send.
  324. ///
  325. /// If the runtime does support a required entrypoint, then this method will
  326. /// generate a call and return the resulting value. Otherwise it will return
  327. /// std::nullopt and the caller can generate a msgSend instead.
  328. static std::optional<llvm::Value *> tryGenerateSpecializedMessageSend(
  329. CodeGenFunction &CGF, QualType ResultType, llvm::Value *Receiver,
  330. const CallArgList &Args, Selector Sel, const ObjCMethodDecl *method,
  331. bool isClassMessage) {
  332. auto &CGM = CGF.CGM;
  333. if (!CGM.getCodeGenOpts().ObjCConvertMessagesToRuntimeCalls)
  334. return std::nullopt;
  335. auto &Runtime = CGM.getLangOpts().ObjCRuntime;
  336. switch (Sel.getMethodFamily()) {
  337. case OMF_alloc:
  338. if (isClassMessage &&
  339. Runtime.shouldUseRuntimeFunctionsForAlloc() &&
  340. ResultType->isObjCObjectPointerType()) {
  341. // [Foo alloc] -> objc_alloc(Foo) or
  342. // [self alloc] -> objc_alloc(self)
  343. if (Sel.isUnarySelector() && Sel.getNameForSlot(0) == "alloc")
  344. return CGF.EmitObjCAlloc(Receiver, CGF.ConvertType(ResultType));
  345. // [Foo allocWithZone:nil] -> objc_allocWithZone(Foo) or
  346. // [self allocWithZone:nil] -> objc_allocWithZone(self)
  347. if (Sel.isKeywordSelector() && Sel.getNumArgs() == 1 &&
  348. Args.size() == 1 && Args.front().getType()->isPointerType() &&
  349. Sel.getNameForSlot(0) == "allocWithZone") {
  350. const llvm::Value* arg = Args.front().getKnownRValue().getScalarVal();
  351. if (isa<llvm::ConstantPointerNull>(arg))
  352. return CGF.EmitObjCAllocWithZone(Receiver,
  353. CGF.ConvertType(ResultType));
  354. return std::nullopt;
  355. }
  356. }
  357. break;
  358. case OMF_autorelease:
  359. if (ResultType->isObjCObjectPointerType() &&
  360. CGM.getLangOpts().getGC() == LangOptions::NonGC &&
  361. Runtime.shouldUseARCFunctionsForRetainRelease())
  362. return CGF.EmitObjCAutorelease(Receiver, CGF.ConvertType(ResultType));
  363. break;
  364. case OMF_retain:
  365. if (ResultType->isObjCObjectPointerType() &&
  366. CGM.getLangOpts().getGC() == LangOptions::NonGC &&
  367. Runtime.shouldUseARCFunctionsForRetainRelease())
  368. return CGF.EmitObjCRetainNonBlock(Receiver, CGF.ConvertType(ResultType));
  369. break;
  370. case OMF_release:
  371. if (ResultType->isVoidType() &&
  372. CGM.getLangOpts().getGC() == LangOptions::NonGC &&
  373. Runtime.shouldUseARCFunctionsForRetainRelease()) {
  374. CGF.EmitObjCRelease(Receiver, ARCPreciseLifetime);
  375. return nullptr;
  376. }
  377. break;
  378. default:
  379. break;
  380. }
  381. return std::nullopt;
  382. }
  383. CodeGen::RValue CGObjCRuntime::GeneratePossiblySpecializedMessageSend(
  384. CodeGenFunction &CGF, ReturnValueSlot Return, QualType ResultType,
  385. Selector Sel, llvm::Value *Receiver, const CallArgList &Args,
  386. const ObjCInterfaceDecl *OID, const ObjCMethodDecl *Method,
  387. bool isClassMessage) {
  388. if (std::optional<llvm::Value *> SpecializedResult =
  389. tryGenerateSpecializedMessageSend(CGF, ResultType, Receiver, Args,
  390. Sel, Method, isClassMessage)) {
  391. return RValue::get(*SpecializedResult);
  392. }
  393. return GenerateMessageSend(CGF, Return, ResultType, Sel, Receiver, Args, OID,
  394. Method);
  395. }
  396. static void AppendFirstImpliedRuntimeProtocols(
  397. const ObjCProtocolDecl *PD,
  398. llvm::UniqueVector<const ObjCProtocolDecl *> &PDs) {
  399. if (!PD->isNonRuntimeProtocol()) {
  400. const auto *Can = PD->getCanonicalDecl();
  401. PDs.insert(Can);
  402. return;
  403. }
  404. for (const auto *ParentPD : PD->protocols())
  405. AppendFirstImpliedRuntimeProtocols(ParentPD, PDs);
  406. }
  407. std::vector<const ObjCProtocolDecl *>
  408. CGObjCRuntime::GetRuntimeProtocolList(ObjCProtocolDecl::protocol_iterator begin,
  409. ObjCProtocolDecl::protocol_iterator end) {
  410. std::vector<const ObjCProtocolDecl *> RuntimePds;
  411. llvm::DenseSet<const ObjCProtocolDecl *> NonRuntimePDs;
  412. for (; begin != end; ++begin) {
  413. const auto *It = *begin;
  414. const auto *Can = It->getCanonicalDecl();
  415. if (Can->isNonRuntimeProtocol())
  416. NonRuntimePDs.insert(Can);
  417. else
  418. RuntimePds.push_back(Can);
  419. }
  420. // If there are no non-runtime protocols then we can just stop now.
  421. if (NonRuntimePDs.empty())
  422. return RuntimePds;
  423. // Else we have to search through the non-runtime protocol's inheritancy
  424. // hierarchy DAG stopping whenever a branch either finds a runtime protocol or
  425. // a non-runtime protocol without any parents. These are the "first-implied"
  426. // protocols from a non-runtime protocol.
  427. llvm::UniqueVector<const ObjCProtocolDecl *> FirstImpliedProtos;
  428. for (const auto *PD : NonRuntimePDs)
  429. AppendFirstImpliedRuntimeProtocols(PD, FirstImpliedProtos);
  430. // Walk the Runtime list to get all protocols implied via the inclusion of
  431. // this protocol, e.g. all protocols it inherits from including itself.
  432. llvm::DenseSet<const ObjCProtocolDecl *> AllImpliedProtocols;
  433. for (const auto *PD : RuntimePds) {
  434. const auto *Can = PD->getCanonicalDecl();
  435. AllImpliedProtocols.insert(Can);
  436. Can->getImpliedProtocols(AllImpliedProtocols);
  437. }
  438. // Similar to above, walk the list of first-implied protocols to find the set
  439. // all the protocols implied excluding the listed protocols themselves since
  440. // they are not yet a part of the `RuntimePds` list.
  441. for (const auto *PD : FirstImpliedProtos) {
  442. PD->getImpliedProtocols(AllImpliedProtocols);
  443. }
  444. // From the first-implied list we have to finish building the final protocol
  445. // list. If a protocol in the first-implied list was already implied via some
  446. // inheritance path through some other protocols then it would be redundant to
  447. // add it here and so we skip over it.
  448. for (const auto *PD : FirstImpliedProtos) {
  449. if (!AllImpliedProtocols.contains(PD)) {
  450. RuntimePds.push_back(PD);
  451. }
  452. }
  453. return RuntimePds;
  454. }
  455. /// Instead of '[[MyClass alloc] init]', try to generate
  456. /// 'objc_alloc_init(MyClass)'. This provides a code size improvement on the
  457. /// caller side, as well as the optimized objc_alloc.
  458. static std::optional<llvm::Value *>
  459. tryEmitSpecializedAllocInit(CodeGenFunction &CGF, const ObjCMessageExpr *OME) {
  460. auto &Runtime = CGF.getLangOpts().ObjCRuntime;
  461. if (!Runtime.shouldUseRuntimeFunctionForCombinedAllocInit())
  462. return std::nullopt;
  463. // Match the exact pattern '[[MyClass alloc] init]'.
  464. Selector Sel = OME->getSelector();
  465. if (OME->getReceiverKind() != ObjCMessageExpr::Instance ||
  466. !OME->getType()->isObjCObjectPointerType() || !Sel.isUnarySelector() ||
  467. Sel.getNameForSlot(0) != "init")
  468. return std::nullopt;
  469. // Okay, this is '[receiver init]', check if 'receiver' is '[cls alloc]'
  470. // with 'cls' a Class.
  471. auto *SubOME =
  472. dyn_cast<ObjCMessageExpr>(OME->getInstanceReceiver()->IgnoreParenCasts());
  473. if (!SubOME)
  474. return std::nullopt;
  475. Selector SubSel = SubOME->getSelector();
  476. if (!SubOME->getType()->isObjCObjectPointerType() ||
  477. !SubSel.isUnarySelector() || SubSel.getNameForSlot(0) != "alloc")
  478. return std::nullopt;
  479. llvm::Value *Receiver = nullptr;
  480. switch (SubOME->getReceiverKind()) {
  481. case ObjCMessageExpr::Instance:
  482. if (!SubOME->getInstanceReceiver()->getType()->isObjCClassType())
  483. return std::nullopt;
  484. Receiver = CGF.EmitScalarExpr(SubOME->getInstanceReceiver());
  485. break;
  486. case ObjCMessageExpr::Class: {
  487. QualType ReceiverType = SubOME->getClassReceiver();
  488. const ObjCObjectType *ObjTy = ReceiverType->castAs<ObjCObjectType>();
  489. const ObjCInterfaceDecl *ID = ObjTy->getInterface();
  490. assert(ID && "null interface should be impossible here");
  491. Receiver = CGF.CGM.getObjCRuntime().GetClass(CGF, ID);
  492. break;
  493. }
  494. case ObjCMessageExpr::SuperInstance:
  495. case ObjCMessageExpr::SuperClass:
  496. return std::nullopt;
  497. }
  498. return CGF.EmitObjCAllocInit(Receiver, CGF.ConvertType(OME->getType()));
  499. }
  500. RValue CodeGenFunction::EmitObjCMessageExpr(const ObjCMessageExpr *E,
  501. ReturnValueSlot Return) {
  502. // Only the lookup mechanism and first two arguments of the method
  503. // implementation vary between runtimes. We can get the receiver and
  504. // arguments in generic code.
  505. bool isDelegateInit = E->isDelegateInitCall();
  506. const ObjCMethodDecl *method = E->getMethodDecl();
  507. // If the method is -retain, and the receiver's being loaded from
  508. // a __weak variable, peephole the entire operation to objc_loadWeakRetained.
  509. if (method && E->getReceiverKind() == ObjCMessageExpr::Instance &&
  510. method->getMethodFamily() == OMF_retain) {
  511. if (auto lvalueExpr = findWeakLValue(E->getInstanceReceiver())) {
  512. LValue lvalue = EmitLValue(lvalueExpr);
  513. llvm::Value *result = EmitARCLoadWeakRetained(lvalue.getAddress(*this));
  514. return AdjustObjCObjectType(*this, E->getType(), RValue::get(result));
  515. }
  516. }
  517. if (std::optional<llvm::Value *> Val = tryEmitSpecializedAllocInit(*this, E))
  518. return AdjustObjCObjectType(*this, E->getType(), RValue::get(*Val));
  519. // We don't retain the receiver in delegate init calls, and this is
  520. // safe because the receiver value is always loaded from 'self',
  521. // which we zero out. We don't want to Block_copy block receivers,
  522. // though.
  523. bool retainSelf =
  524. (!isDelegateInit &&
  525. CGM.getLangOpts().ObjCAutoRefCount &&
  526. method &&
  527. method->hasAttr<NSConsumesSelfAttr>());
  528. CGObjCRuntime &Runtime = CGM.getObjCRuntime();
  529. bool isSuperMessage = false;
  530. bool isClassMessage = false;
  531. ObjCInterfaceDecl *OID = nullptr;
  532. // Find the receiver
  533. QualType ReceiverType;
  534. llvm::Value *Receiver = nullptr;
  535. switch (E->getReceiverKind()) {
  536. case ObjCMessageExpr::Instance:
  537. ReceiverType = E->getInstanceReceiver()->getType();
  538. isClassMessage = ReceiverType->isObjCClassType();
  539. if (retainSelf) {
  540. TryEmitResult ter = tryEmitARCRetainScalarExpr(*this,
  541. E->getInstanceReceiver());
  542. Receiver = ter.getPointer();
  543. if (ter.getInt()) retainSelf = false;
  544. } else
  545. Receiver = EmitScalarExpr(E->getInstanceReceiver());
  546. break;
  547. case ObjCMessageExpr::Class: {
  548. ReceiverType = E->getClassReceiver();
  549. OID = ReceiverType->castAs<ObjCObjectType>()->getInterface();
  550. assert(OID && "Invalid Objective-C class message send");
  551. Receiver = Runtime.GetClass(*this, OID);
  552. isClassMessage = true;
  553. break;
  554. }
  555. case ObjCMessageExpr::SuperInstance:
  556. ReceiverType = E->getSuperType();
  557. Receiver = LoadObjCSelf();
  558. isSuperMessage = true;
  559. break;
  560. case ObjCMessageExpr::SuperClass:
  561. ReceiverType = E->getSuperType();
  562. Receiver = LoadObjCSelf();
  563. isSuperMessage = true;
  564. isClassMessage = true;
  565. break;
  566. }
  567. if (retainSelf)
  568. Receiver = EmitARCRetainNonBlock(Receiver);
  569. // In ARC, we sometimes want to "extend the lifetime"
  570. // (i.e. retain+autorelease) of receivers of returns-inner-pointer
  571. // messages.
  572. if (getLangOpts().ObjCAutoRefCount && method &&
  573. method->hasAttr<ObjCReturnsInnerPointerAttr>() &&
  574. shouldExtendReceiverForInnerPointerMessage(E))
  575. Receiver = EmitARCRetainAutorelease(ReceiverType, Receiver);
  576. QualType ResultType = method ? method->getReturnType() : E->getType();
  577. CallArgList Args;
  578. EmitCallArgs(Args, method, E->arguments(), /*AC*/AbstractCallee(method));
  579. // For delegate init calls in ARC, do an unsafe store of null into
  580. // self. This represents the call taking direct ownership of that
  581. // value. We have to do this after emitting the other call
  582. // arguments because they might also reference self, but we don't
  583. // have to worry about any of them modifying self because that would
  584. // be an undefined read and write of an object in unordered
  585. // expressions.
  586. if (isDelegateInit) {
  587. assert(getLangOpts().ObjCAutoRefCount &&
  588. "delegate init calls should only be marked in ARC");
  589. // Do an unsafe store of null into self.
  590. Address selfAddr =
  591. GetAddrOfLocalVar(cast<ObjCMethodDecl>(CurCodeDecl)->getSelfDecl());
  592. Builder.CreateStore(getNullForVariable(selfAddr), selfAddr);
  593. }
  594. RValue result;
  595. if (isSuperMessage) {
  596. // super is only valid in an Objective-C method
  597. const ObjCMethodDecl *OMD = cast<ObjCMethodDecl>(CurFuncDecl);
  598. bool isCategoryImpl = isa<ObjCCategoryImplDecl>(OMD->getDeclContext());
  599. result = Runtime.GenerateMessageSendSuper(*this, Return, ResultType,
  600. E->getSelector(),
  601. OMD->getClassInterface(),
  602. isCategoryImpl,
  603. Receiver,
  604. isClassMessage,
  605. Args,
  606. method);
  607. } else {
  608. // Call runtime methods directly if we can.
  609. result = Runtime.GeneratePossiblySpecializedMessageSend(
  610. *this, Return, ResultType, E->getSelector(), Receiver, Args, OID,
  611. method, isClassMessage);
  612. }
  613. // For delegate init calls in ARC, implicitly store the result of
  614. // the call back into self. This takes ownership of the value.
  615. if (isDelegateInit) {
  616. Address selfAddr =
  617. GetAddrOfLocalVar(cast<ObjCMethodDecl>(CurCodeDecl)->getSelfDecl());
  618. llvm::Value *newSelf = result.getScalarVal();
  619. // The delegate return type isn't necessarily a matching type; in
  620. // fact, it's quite likely to be 'id'.
  621. llvm::Type *selfTy = selfAddr.getElementType();
  622. newSelf = Builder.CreateBitCast(newSelf, selfTy);
  623. Builder.CreateStore(newSelf, selfAddr);
  624. }
  625. return AdjustObjCObjectType(*this, E->getType(), result);
  626. }
  627. namespace {
  628. struct FinishARCDealloc final : EHScopeStack::Cleanup {
  629. void Emit(CodeGenFunction &CGF, Flags flags) override {
  630. const ObjCMethodDecl *method = cast<ObjCMethodDecl>(CGF.CurCodeDecl);
  631. const ObjCImplDecl *impl = cast<ObjCImplDecl>(method->getDeclContext());
  632. const ObjCInterfaceDecl *iface = impl->getClassInterface();
  633. if (!iface->getSuperClass()) return;
  634. bool isCategory = isa<ObjCCategoryImplDecl>(impl);
  635. // Call [super dealloc] if we have a superclass.
  636. llvm::Value *self = CGF.LoadObjCSelf();
  637. CallArgList args;
  638. CGF.CGM.getObjCRuntime().GenerateMessageSendSuper(CGF, ReturnValueSlot(),
  639. CGF.getContext().VoidTy,
  640. method->getSelector(),
  641. iface,
  642. isCategory,
  643. self,
  644. /*is class msg*/ false,
  645. args,
  646. method);
  647. }
  648. };
  649. }
  650. /// StartObjCMethod - Begin emission of an ObjCMethod. This generates
  651. /// the LLVM function and sets the other context used by
  652. /// CodeGenFunction.
  653. void CodeGenFunction::StartObjCMethod(const ObjCMethodDecl *OMD,
  654. const ObjCContainerDecl *CD) {
  655. SourceLocation StartLoc = OMD->getBeginLoc();
  656. FunctionArgList args;
  657. // Check if we should generate debug info for this method.
  658. if (OMD->hasAttr<NoDebugAttr>())
  659. DebugInfo = nullptr; // disable debug info indefinitely for this function
  660. llvm::Function *Fn = CGM.getObjCRuntime().GenerateMethod(OMD, CD);
  661. const CGFunctionInfo &FI = CGM.getTypes().arrangeObjCMethodDeclaration(OMD);
  662. if (OMD->isDirectMethod()) {
  663. Fn->setVisibility(llvm::Function::HiddenVisibility);
  664. CGM.SetLLVMFunctionAttributes(OMD, FI, Fn, /*IsThunk=*/false);
  665. CGM.SetLLVMFunctionAttributesForDefinition(OMD, Fn);
  666. } else {
  667. CGM.SetInternalFunctionAttributes(OMD, Fn, FI);
  668. }
  669. args.push_back(OMD->getSelfDecl());
  670. if (!OMD->isDirectMethod())
  671. args.push_back(OMD->getCmdDecl());
  672. args.append(OMD->param_begin(), OMD->param_end());
  673. CurGD = OMD;
  674. CurEHLocation = OMD->getEndLoc();
  675. StartFunction(OMD, OMD->getReturnType(), Fn, FI, args,
  676. OMD->getLocation(), StartLoc);
  677. if (OMD->isDirectMethod()) {
  678. // This function is a direct call, it has to implement a nil check
  679. // on entry.
  680. //
  681. // TODO: possibly have several entry points to elide the check
  682. CGM.getObjCRuntime().GenerateDirectMethodPrologue(*this, Fn, OMD, CD);
  683. }
  684. // In ARC, certain methods get an extra cleanup.
  685. if (CGM.getLangOpts().ObjCAutoRefCount &&
  686. OMD->isInstanceMethod() &&
  687. OMD->getSelector().isUnarySelector()) {
  688. const IdentifierInfo *ident =
  689. OMD->getSelector().getIdentifierInfoForSlot(0);
  690. if (ident->isStr("dealloc"))
  691. EHStack.pushCleanup<FinishARCDealloc>(getARCCleanupKind());
  692. }
  693. }
  694. static llvm::Value *emitARCRetainLoadOfScalar(CodeGenFunction &CGF,
  695. LValue lvalue, QualType type);
  696. /// Generate an Objective-C method. An Objective-C method is a C function with
  697. /// its pointer, name, and types registered in the class structure.
  698. void CodeGenFunction::GenerateObjCMethod(const ObjCMethodDecl *OMD) {
  699. StartObjCMethod(OMD, OMD->getClassInterface());
  700. PGO.assignRegionCounters(GlobalDecl(OMD), CurFn);
  701. assert(isa<CompoundStmt>(OMD->getBody()));
  702. incrementProfileCounter(OMD->getBody());
  703. EmitCompoundStmtWithoutScope(*cast<CompoundStmt>(OMD->getBody()));
  704. FinishFunction(OMD->getBodyRBrace());
  705. }
  706. /// emitStructGetterCall - Call the runtime function to load a property
  707. /// into the return value slot.
  708. static void emitStructGetterCall(CodeGenFunction &CGF, ObjCIvarDecl *ivar,
  709. bool isAtomic, bool hasStrong) {
  710. ASTContext &Context = CGF.getContext();
  711. llvm::Value *src =
  712. CGF.EmitLValueForIvar(CGF.TypeOfSelfObject(), CGF.LoadObjCSelf(), ivar, 0)
  713. .getPointer(CGF);
  714. // objc_copyStruct (ReturnValue, &structIvar,
  715. // sizeof (Type of Ivar), isAtomic, false);
  716. CallArgList args;
  717. llvm::Value *dest =
  718. CGF.Builder.CreateBitCast(CGF.ReturnValue.getPointer(), CGF.VoidPtrTy);
  719. args.add(RValue::get(dest), Context.VoidPtrTy);
  720. src = CGF.Builder.CreateBitCast(src, CGF.VoidPtrTy);
  721. args.add(RValue::get(src), Context.VoidPtrTy);
  722. CharUnits size = CGF.getContext().getTypeSizeInChars(ivar->getType());
  723. args.add(RValue::get(CGF.CGM.getSize(size)), Context.getSizeType());
  724. args.add(RValue::get(CGF.Builder.getInt1(isAtomic)), Context.BoolTy);
  725. args.add(RValue::get(CGF.Builder.getInt1(hasStrong)), Context.BoolTy);
  726. llvm::FunctionCallee fn = CGF.CGM.getObjCRuntime().GetGetStructFunction();
  727. CGCallee callee = CGCallee::forDirect(fn);
  728. CGF.EmitCall(CGF.getTypes().arrangeBuiltinFunctionCall(Context.VoidTy, args),
  729. callee, ReturnValueSlot(), args);
  730. }
  731. /// Determine whether the given architecture supports unaligned atomic
  732. /// accesses. They don't have to be fast, just faster than a function
  733. /// call and a mutex.
  734. static bool hasUnalignedAtomics(llvm::Triple::ArchType arch) {
  735. // FIXME: Allow unaligned atomic load/store on x86. (It is not
  736. // currently supported by the backend.)
  737. return false;
  738. }
  739. /// Return the maximum size that permits atomic accesses for the given
  740. /// architecture.
  741. static CharUnits getMaxAtomicAccessSize(CodeGenModule &CGM,
  742. llvm::Triple::ArchType arch) {
  743. // ARM has 8-byte atomic accesses, but it's not clear whether we
  744. // want to rely on them here.
  745. // In the default case, just assume that any size up to a pointer is
  746. // fine given adequate alignment.
  747. return CharUnits::fromQuantity(CGM.PointerSizeInBytes);
  748. }
  749. namespace {
  750. class PropertyImplStrategy {
  751. public:
  752. enum StrategyKind {
  753. /// The 'native' strategy is to use the architecture's provided
  754. /// reads and writes.
  755. Native,
  756. /// Use objc_setProperty and objc_getProperty.
  757. GetSetProperty,
  758. /// Use objc_setProperty for the setter, but use expression
  759. /// evaluation for the getter.
  760. SetPropertyAndExpressionGet,
  761. /// Use objc_copyStruct.
  762. CopyStruct,
  763. /// The 'expression' strategy is to emit normal assignment or
  764. /// lvalue-to-rvalue expressions.
  765. Expression
  766. };
  767. StrategyKind getKind() const { return StrategyKind(Kind); }
  768. bool hasStrongMember() const { return HasStrong; }
  769. bool isAtomic() const { return IsAtomic; }
  770. bool isCopy() const { return IsCopy; }
  771. CharUnits getIvarSize() const { return IvarSize; }
  772. CharUnits getIvarAlignment() const { return IvarAlignment; }
  773. PropertyImplStrategy(CodeGenModule &CGM,
  774. const ObjCPropertyImplDecl *propImpl);
  775. private:
  776. unsigned Kind : 8;
  777. unsigned IsAtomic : 1;
  778. unsigned IsCopy : 1;
  779. unsigned HasStrong : 1;
  780. CharUnits IvarSize;
  781. CharUnits IvarAlignment;
  782. };
  783. }
  784. /// Pick an implementation strategy for the given property synthesis.
  785. PropertyImplStrategy::PropertyImplStrategy(CodeGenModule &CGM,
  786. const ObjCPropertyImplDecl *propImpl) {
  787. const ObjCPropertyDecl *prop = propImpl->getPropertyDecl();
  788. ObjCPropertyDecl::SetterKind setterKind = prop->getSetterKind();
  789. IsCopy = (setterKind == ObjCPropertyDecl::Copy);
  790. IsAtomic = prop->isAtomic();
  791. HasStrong = false; // doesn't matter here.
  792. // Evaluate the ivar's size and alignment.
  793. ObjCIvarDecl *ivar = propImpl->getPropertyIvarDecl();
  794. QualType ivarType = ivar->getType();
  795. auto TInfo = CGM.getContext().getTypeInfoInChars(ivarType);
  796. IvarSize = TInfo.Width;
  797. IvarAlignment = TInfo.Align;
  798. // If we have a copy property, we always have to use setProperty.
  799. // If the property is atomic we need to use getProperty, but in
  800. // the nonatomic case we can just use expression.
  801. if (IsCopy) {
  802. Kind = IsAtomic ? GetSetProperty : SetPropertyAndExpressionGet;
  803. return;
  804. }
  805. // Handle retain.
  806. if (setterKind == ObjCPropertyDecl::Retain) {
  807. // In GC-only, there's nothing special that needs to be done.
  808. if (CGM.getLangOpts().getGC() == LangOptions::GCOnly) {
  809. // fallthrough
  810. // In ARC, if the property is non-atomic, use expression emission,
  811. // which translates to objc_storeStrong. This isn't required, but
  812. // it's slightly nicer.
  813. } else if (CGM.getLangOpts().ObjCAutoRefCount && !IsAtomic) {
  814. // Using standard expression emission for the setter is only
  815. // acceptable if the ivar is __strong, which won't be true if
  816. // the property is annotated with __attribute__((NSObject)).
  817. // TODO: falling all the way back to objc_setProperty here is
  818. // just laziness, though; we could still use objc_storeStrong
  819. // if we hacked it right.
  820. if (ivarType.getObjCLifetime() == Qualifiers::OCL_Strong)
  821. Kind = Expression;
  822. else
  823. Kind = SetPropertyAndExpressionGet;
  824. return;
  825. // Otherwise, we need to at least use setProperty. However, if
  826. // the property isn't atomic, we can use normal expression
  827. // emission for the getter.
  828. } else if (!IsAtomic) {
  829. Kind = SetPropertyAndExpressionGet;
  830. return;
  831. // Otherwise, we have to use both setProperty and getProperty.
  832. } else {
  833. Kind = GetSetProperty;
  834. return;
  835. }
  836. }
  837. // If we're not atomic, just use expression accesses.
  838. if (!IsAtomic) {
  839. Kind = Expression;
  840. return;
  841. }
  842. // Properties on bitfield ivars need to be emitted using expression
  843. // accesses even if they're nominally atomic.
  844. if (ivar->isBitField()) {
  845. Kind = Expression;
  846. return;
  847. }
  848. // GC-qualified or ARC-qualified ivars need to be emitted as
  849. // expressions. This actually works out to being atomic anyway,
  850. // except for ARC __strong, but that should trigger the above code.
  851. if (ivarType.hasNonTrivialObjCLifetime() ||
  852. (CGM.getLangOpts().getGC() &&
  853. CGM.getContext().getObjCGCAttrKind(ivarType))) {
  854. Kind = Expression;
  855. return;
  856. }
  857. // Compute whether the ivar has strong members.
  858. if (CGM.getLangOpts().getGC())
  859. if (const RecordType *recordType = ivarType->getAs<RecordType>())
  860. HasStrong = recordType->getDecl()->hasObjectMember();
  861. // We can never access structs with object members with a native
  862. // access, because we need to use write barriers. This is what
  863. // objc_copyStruct is for.
  864. if (HasStrong) {
  865. Kind = CopyStruct;
  866. return;
  867. }
  868. // Otherwise, this is target-dependent and based on the size and
  869. // alignment of the ivar.
  870. // If the size of the ivar is not a power of two, give up. We don't
  871. // want to get into the business of doing compare-and-swaps.
  872. if (!IvarSize.isPowerOfTwo()) {
  873. Kind = CopyStruct;
  874. return;
  875. }
  876. llvm::Triple::ArchType arch =
  877. CGM.getTarget().getTriple().getArch();
  878. // Most architectures require memory to fit within a single cache
  879. // line, so the alignment has to be at least the size of the access.
  880. // Otherwise we have to grab a lock.
  881. if (IvarAlignment < IvarSize && !hasUnalignedAtomics(arch)) {
  882. Kind = CopyStruct;
  883. return;
  884. }
  885. // If the ivar's size exceeds the architecture's maximum atomic
  886. // access size, we have to use CopyStruct.
  887. if (IvarSize > getMaxAtomicAccessSize(CGM, arch)) {
  888. Kind = CopyStruct;
  889. return;
  890. }
  891. // Otherwise, we can use native loads and stores.
  892. Kind = Native;
  893. }
  894. /// Generate an Objective-C property getter function.
  895. ///
  896. /// The given Decl must be an ObjCImplementationDecl. \@synthesize
  897. /// is illegal within a category.
  898. void CodeGenFunction::GenerateObjCGetter(ObjCImplementationDecl *IMP,
  899. const ObjCPropertyImplDecl *PID) {
  900. llvm::Constant *AtomicHelperFn =
  901. CodeGenFunction(CGM).GenerateObjCAtomicGetterCopyHelperFunction(PID);
  902. ObjCMethodDecl *OMD = PID->getGetterMethodDecl();
  903. assert(OMD && "Invalid call to generate getter (empty method)");
  904. StartObjCMethod(OMD, IMP->getClassInterface());
  905. generateObjCGetterBody(IMP, PID, OMD, AtomicHelperFn);
  906. FinishFunction(OMD->getEndLoc());
  907. }
  908. static bool hasTrivialGetExpr(const ObjCPropertyImplDecl *propImpl) {
  909. const Expr *getter = propImpl->getGetterCXXConstructor();
  910. if (!getter) return true;
  911. // Sema only makes only of these when the ivar has a C++ class type,
  912. // so the form is pretty constrained.
  913. // If the property has a reference type, we might just be binding a
  914. // reference, in which case the result will be a gl-value. We should
  915. // treat this as a non-trivial operation.
  916. if (getter->isGLValue())
  917. return false;
  918. // If we selected a trivial copy-constructor, we're okay.
  919. if (const CXXConstructExpr *construct = dyn_cast<CXXConstructExpr>(getter))
  920. return (construct->getConstructor()->isTrivial());
  921. // The constructor might require cleanups (in which case it's never
  922. // trivial).
  923. assert(isa<ExprWithCleanups>(getter));
  924. return false;
  925. }
  926. /// emitCPPObjectAtomicGetterCall - Call the runtime function to
  927. /// copy the ivar into the resturn slot.
  928. static void emitCPPObjectAtomicGetterCall(CodeGenFunction &CGF,
  929. llvm::Value *returnAddr,
  930. ObjCIvarDecl *ivar,
  931. llvm::Constant *AtomicHelperFn) {
  932. // objc_copyCppObjectAtomic (&returnSlot, &CppObjectIvar,
  933. // AtomicHelperFn);
  934. CallArgList args;
  935. // The 1st argument is the return Slot.
  936. args.add(RValue::get(returnAddr), CGF.getContext().VoidPtrTy);
  937. // The 2nd argument is the address of the ivar.
  938. llvm::Value *ivarAddr =
  939. CGF.EmitLValueForIvar(CGF.TypeOfSelfObject(), CGF.LoadObjCSelf(), ivar, 0)
  940. .getPointer(CGF);
  941. ivarAddr = CGF.Builder.CreateBitCast(ivarAddr, CGF.Int8PtrTy);
  942. args.add(RValue::get(ivarAddr), CGF.getContext().VoidPtrTy);
  943. // Third argument is the helper function.
  944. args.add(RValue::get(AtomicHelperFn), CGF.getContext().VoidPtrTy);
  945. llvm::FunctionCallee copyCppAtomicObjectFn =
  946. CGF.CGM.getObjCRuntime().GetCppAtomicObjectGetFunction();
  947. CGCallee callee = CGCallee::forDirect(copyCppAtomicObjectFn);
  948. CGF.EmitCall(
  949. CGF.getTypes().arrangeBuiltinFunctionCall(CGF.getContext().VoidTy, args),
  950. callee, ReturnValueSlot(), args);
  951. }
  952. // emitCmdValueForGetterSetterBody - Handle emitting the load necessary for
  953. // the `_cmd` selector argument for getter/setter bodies. For direct methods,
  954. // this returns an undefined/poison value; this matches behavior prior to `_cmd`
  955. // being removed from the direct method ABI as the getter/setter caller would
  956. // never load one. For non-direct methods, this emits a load of the implicit
  957. // `_cmd` storage.
  958. static llvm::Value *emitCmdValueForGetterSetterBody(CodeGenFunction &CGF,
  959. ObjCMethodDecl *MD) {
  960. if (MD->isDirectMethod()) {
  961. // Direct methods do not have a `_cmd` argument. Emit an undefined/poison
  962. // value. This will be passed to objc_getProperty/objc_setProperty, which
  963. // has not appeared bothered by the `_cmd` argument being undefined before.
  964. llvm::Type *selType = CGF.ConvertType(CGF.getContext().getObjCSelType());
  965. return llvm::PoisonValue::get(selType);
  966. }
  967. return CGF.Builder.CreateLoad(CGF.GetAddrOfLocalVar(MD->getCmdDecl()), "cmd");
  968. }
  969. void
  970. CodeGenFunction::generateObjCGetterBody(const ObjCImplementationDecl *classImpl,
  971. const ObjCPropertyImplDecl *propImpl,
  972. const ObjCMethodDecl *GetterMethodDecl,
  973. llvm::Constant *AtomicHelperFn) {
  974. ObjCIvarDecl *ivar = propImpl->getPropertyIvarDecl();
  975. if (ivar->getType().isNonTrivialToPrimitiveCopy() == QualType::PCK_Struct) {
  976. if (!AtomicHelperFn) {
  977. LValue Src =
  978. EmitLValueForIvar(TypeOfSelfObject(), LoadObjCSelf(), ivar, 0);
  979. LValue Dst = MakeAddrLValue(ReturnValue, ivar->getType());
  980. callCStructCopyConstructor(Dst, Src);
  981. } else {
  982. ObjCIvarDecl *ivar = propImpl->getPropertyIvarDecl();
  983. emitCPPObjectAtomicGetterCall(*this, ReturnValue.getPointer(), ivar,
  984. AtomicHelperFn);
  985. }
  986. return;
  987. }
  988. // If there's a non-trivial 'get' expression, we just have to emit that.
  989. if (!hasTrivialGetExpr(propImpl)) {
  990. if (!AtomicHelperFn) {
  991. auto *ret = ReturnStmt::Create(getContext(), SourceLocation(),
  992. propImpl->getGetterCXXConstructor(),
  993. /* NRVOCandidate=*/nullptr);
  994. EmitReturnStmt(*ret);
  995. }
  996. else {
  997. ObjCIvarDecl *ivar = propImpl->getPropertyIvarDecl();
  998. emitCPPObjectAtomicGetterCall(*this, ReturnValue.getPointer(),
  999. ivar, AtomicHelperFn);
  1000. }
  1001. return;
  1002. }
  1003. const ObjCPropertyDecl *prop = propImpl->getPropertyDecl();
  1004. QualType propType = prop->getType();
  1005. ObjCMethodDecl *getterMethod = propImpl->getGetterMethodDecl();
  1006. // Pick an implementation strategy.
  1007. PropertyImplStrategy strategy(CGM, propImpl);
  1008. switch (strategy.getKind()) {
  1009. case PropertyImplStrategy::Native: {
  1010. // We don't need to do anything for a zero-size struct.
  1011. if (strategy.getIvarSize().isZero())
  1012. return;
  1013. LValue LV = EmitLValueForIvar(TypeOfSelfObject(), LoadObjCSelf(), ivar, 0);
  1014. // Currently, all atomic accesses have to be through integer
  1015. // types, so there's no point in trying to pick a prettier type.
  1016. uint64_t ivarSize = getContext().toBits(strategy.getIvarSize());
  1017. llvm::Type *bitcastType = llvm::Type::getIntNTy(getLLVMContext(), ivarSize);
  1018. // Perform an atomic load. This does not impose ordering constraints.
  1019. Address ivarAddr = LV.getAddress(*this);
  1020. ivarAddr = Builder.CreateElementBitCast(ivarAddr, bitcastType);
  1021. llvm::LoadInst *load = Builder.CreateLoad(ivarAddr, "load");
  1022. load->setAtomic(llvm::AtomicOrdering::Unordered);
  1023. // Store that value into the return address. Doing this with a
  1024. // bitcast is likely to produce some pretty ugly IR, but it's not
  1025. // the *most* terrible thing in the world.
  1026. llvm::Type *retTy = ConvertType(getterMethod->getReturnType());
  1027. uint64_t retTySize = CGM.getDataLayout().getTypeSizeInBits(retTy);
  1028. llvm::Value *ivarVal = load;
  1029. if (ivarSize > retTySize) {
  1030. bitcastType = llvm::Type::getIntNTy(getLLVMContext(), retTySize);
  1031. ivarVal = Builder.CreateTrunc(load, bitcastType);
  1032. }
  1033. Builder.CreateStore(ivarVal,
  1034. Builder.CreateElementBitCast(ReturnValue, bitcastType));
  1035. // Make sure we don't do an autorelease.
  1036. AutoreleaseResult = false;
  1037. return;
  1038. }
  1039. case PropertyImplStrategy::GetSetProperty: {
  1040. llvm::FunctionCallee getPropertyFn =
  1041. CGM.getObjCRuntime().GetPropertyGetFunction();
  1042. if (!getPropertyFn) {
  1043. CGM.ErrorUnsupported(propImpl, "Obj-C getter requiring atomic copy");
  1044. return;
  1045. }
  1046. CGCallee callee = CGCallee::forDirect(getPropertyFn);
  1047. // Return (ivar-type) objc_getProperty((id) self, _cmd, offset, true).
  1048. // FIXME: Can't this be simpler? This might even be worse than the
  1049. // corresponding gcc code.
  1050. llvm::Value *cmd = emitCmdValueForGetterSetterBody(*this, getterMethod);
  1051. llvm::Value *self = Builder.CreateBitCast(LoadObjCSelf(), VoidPtrTy);
  1052. llvm::Value *ivarOffset =
  1053. EmitIvarOffsetAsPointerDiff(classImpl->getClassInterface(), ivar);
  1054. CallArgList args;
  1055. args.add(RValue::get(self), getContext().getObjCIdType());
  1056. args.add(RValue::get(cmd), getContext().getObjCSelType());
  1057. args.add(RValue::get(ivarOffset), getContext().getPointerDiffType());
  1058. args.add(RValue::get(Builder.getInt1(strategy.isAtomic())),
  1059. getContext().BoolTy);
  1060. // FIXME: We shouldn't need to get the function info here, the
  1061. // runtime already should have computed it to build the function.
  1062. llvm::CallBase *CallInstruction;
  1063. RValue RV = EmitCall(getTypes().arrangeBuiltinFunctionCall(
  1064. getContext().getObjCIdType(), args),
  1065. callee, ReturnValueSlot(), args, &CallInstruction);
  1066. if (llvm::CallInst *call = dyn_cast<llvm::CallInst>(CallInstruction))
  1067. call->setTailCall();
  1068. // We need to fix the type here. Ivars with copy & retain are
  1069. // always objects so we don't need to worry about complex or
  1070. // aggregates.
  1071. RV = RValue::get(Builder.CreateBitCast(
  1072. RV.getScalarVal(),
  1073. getTypes().ConvertType(getterMethod->getReturnType())));
  1074. EmitReturnOfRValue(RV, propType);
  1075. // objc_getProperty does an autorelease, so we should suppress ours.
  1076. AutoreleaseResult = false;
  1077. return;
  1078. }
  1079. case PropertyImplStrategy::CopyStruct:
  1080. emitStructGetterCall(*this, ivar, strategy.isAtomic(),
  1081. strategy.hasStrongMember());
  1082. return;
  1083. case PropertyImplStrategy::Expression:
  1084. case PropertyImplStrategy::SetPropertyAndExpressionGet: {
  1085. LValue LV = EmitLValueForIvar(TypeOfSelfObject(), LoadObjCSelf(), ivar, 0);
  1086. QualType ivarType = ivar->getType();
  1087. switch (getEvaluationKind(ivarType)) {
  1088. case TEK_Complex: {
  1089. ComplexPairTy pair = EmitLoadOfComplex(LV, SourceLocation());
  1090. EmitStoreOfComplex(pair, MakeAddrLValue(ReturnValue, ivarType),
  1091. /*init*/ true);
  1092. return;
  1093. }
  1094. case TEK_Aggregate: {
  1095. // The return value slot is guaranteed to not be aliased, but
  1096. // that's not necessarily the same as "on the stack", so
  1097. // we still potentially need objc_memmove_collectable.
  1098. EmitAggregateCopy(/* Dest= */ MakeAddrLValue(ReturnValue, ivarType),
  1099. /* Src= */ LV, ivarType, getOverlapForReturnValue());
  1100. return;
  1101. }
  1102. case TEK_Scalar: {
  1103. llvm::Value *value;
  1104. if (propType->isReferenceType()) {
  1105. value = LV.getAddress(*this).getPointer();
  1106. } else {
  1107. // We want to load and autoreleaseReturnValue ARC __weak ivars.
  1108. if (LV.getQuals().getObjCLifetime() == Qualifiers::OCL_Weak) {
  1109. if (getLangOpts().ObjCAutoRefCount) {
  1110. value = emitARCRetainLoadOfScalar(*this, LV, ivarType);
  1111. } else {
  1112. value = EmitARCLoadWeak(LV.getAddress(*this));
  1113. }
  1114. // Otherwise we want to do a simple load, suppressing the
  1115. // final autorelease.
  1116. } else {
  1117. value = EmitLoadOfLValue(LV, SourceLocation()).getScalarVal();
  1118. AutoreleaseResult = false;
  1119. }
  1120. value = Builder.CreateBitCast(
  1121. value, ConvertType(GetterMethodDecl->getReturnType()));
  1122. }
  1123. EmitReturnOfRValue(RValue::get(value), propType);
  1124. return;
  1125. }
  1126. }
  1127. llvm_unreachable("bad evaluation kind");
  1128. }
  1129. }
  1130. llvm_unreachable("bad @property implementation strategy!");
  1131. }
  1132. /// emitStructSetterCall - Call the runtime function to store the value
  1133. /// from the first formal parameter into the given ivar.
  1134. static void emitStructSetterCall(CodeGenFunction &CGF, ObjCMethodDecl *OMD,
  1135. ObjCIvarDecl *ivar) {
  1136. // objc_copyStruct (&structIvar, &Arg,
  1137. // sizeof (struct something), true, false);
  1138. CallArgList args;
  1139. // The first argument is the address of the ivar.
  1140. llvm::Value *ivarAddr =
  1141. CGF.EmitLValueForIvar(CGF.TypeOfSelfObject(), CGF.LoadObjCSelf(), ivar, 0)
  1142. .getPointer(CGF);
  1143. ivarAddr = CGF.Builder.CreateBitCast(ivarAddr, CGF.Int8PtrTy);
  1144. args.add(RValue::get(ivarAddr), CGF.getContext().VoidPtrTy);
  1145. // The second argument is the address of the parameter variable.
  1146. ParmVarDecl *argVar = *OMD->param_begin();
  1147. DeclRefExpr argRef(CGF.getContext(), argVar, false,
  1148. argVar->getType().getNonReferenceType(), VK_LValue,
  1149. SourceLocation());
  1150. llvm::Value *argAddr = CGF.EmitLValue(&argRef).getPointer(CGF);
  1151. argAddr = CGF.Builder.CreateBitCast(argAddr, CGF.Int8PtrTy);
  1152. args.add(RValue::get(argAddr), CGF.getContext().VoidPtrTy);
  1153. // The third argument is the sizeof the type.
  1154. llvm::Value *size =
  1155. CGF.CGM.getSize(CGF.getContext().getTypeSizeInChars(ivar->getType()));
  1156. args.add(RValue::get(size), CGF.getContext().getSizeType());
  1157. // The fourth argument is the 'isAtomic' flag.
  1158. args.add(RValue::get(CGF.Builder.getTrue()), CGF.getContext().BoolTy);
  1159. // The fifth argument is the 'hasStrong' flag.
  1160. // FIXME: should this really always be false?
  1161. args.add(RValue::get(CGF.Builder.getFalse()), CGF.getContext().BoolTy);
  1162. llvm::FunctionCallee fn = CGF.CGM.getObjCRuntime().GetSetStructFunction();
  1163. CGCallee callee = CGCallee::forDirect(fn);
  1164. CGF.EmitCall(
  1165. CGF.getTypes().arrangeBuiltinFunctionCall(CGF.getContext().VoidTy, args),
  1166. callee, ReturnValueSlot(), args);
  1167. }
  1168. /// emitCPPObjectAtomicSetterCall - Call the runtime function to store
  1169. /// the value from the first formal parameter into the given ivar, using
  1170. /// the Cpp API for atomic Cpp objects with non-trivial copy assignment.
  1171. static void emitCPPObjectAtomicSetterCall(CodeGenFunction &CGF,
  1172. ObjCMethodDecl *OMD,
  1173. ObjCIvarDecl *ivar,
  1174. llvm::Constant *AtomicHelperFn) {
  1175. // objc_copyCppObjectAtomic (&CppObjectIvar, &Arg,
  1176. // AtomicHelperFn);
  1177. CallArgList args;
  1178. // The first argument is the address of the ivar.
  1179. llvm::Value *ivarAddr =
  1180. CGF.EmitLValueForIvar(CGF.TypeOfSelfObject(), CGF.LoadObjCSelf(), ivar, 0)
  1181. .getPointer(CGF);
  1182. ivarAddr = CGF.Builder.CreateBitCast(ivarAddr, CGF.Int8PtrTy);
  1183. args.add(RValue::get(ivarAddr), CGF.getContext().VoidPtrTy);
  1184. // The second argument is the address of the parameter variable.
  1185. ParmVarDecl *argVar = *OMD->param_begin();
  1186. DeclRefExpr argRef(CGF.getContext(), argVar, false,
  1187. argVar->getType().getNonReferenceType(), VK_LValue,
  1188. SourceLocation());
  1189. llvm::Value *argAddr = CGF.EmitLValue(&argRef).getPointer(CGF);
  1190. argAddr = CGF.Builder.CreateBitCast(argAddr, CGF.Int8PtrTy);
  1191. args.add(RValue::get(argAddr), CGF.getContext().VoidPtrTy);
  1192. // Third argument is the helper function.
  1193. args.add(RValue::get(AtomicHelperFn), CGF.getContext().VoidPtrTy);
  1194. llvm::FunctionCallee fn =
  1195. CGF.CGM.getObjCRuntime().GetCppAtomicObjectSetFunction();
  1196. CGCallee callee = CGCallee::forDirect(fn);
  1197. CGF.EmitCall(
  1198. CGF.getTypes().arrangeBuiltinFunctionCall(CGF.getContext().VoidTy, args),
  1199. callee, ReturnValueSlot(), args);
  1200. }
  1201. static bool hasTrivialSetExpr(const ObjCPropertyImplDecl *PID) {
  1202. Expr *setter = PID->getSetterCXXAssignment();
  1203. if (!setter) return true;
  1204. // Sema only makes only of these when the ivar has a C++ class type,
  1205. // so the form is pretty constrained.
  1206. // An operator call is trivial if the function it calls is trivial.
  1207. // This also implies that there's nothing non-trivial going on with
  1208. // the arguments, because operator= can only be trivial if it's a
  1209. // synthesized assignment operator and therefore both parameters are
  1210. // references.
  1211. if (CallExpr *call = dyn_cast<CallExpr>(setter)) {
  1212. if (const FunctionDecl *callee
  1213. = dyn_cast_or_null<FunctionDecl>(call->getCalleeDecl()))
  1214. if (callee->isTrivial())
  1215. return true;
  1216. return false;
  1217. }
  1218. assert(isa<ExprWithCleanups>(setter));
  1219. return false;
  1220. }
  1221. static bool UseOptimizedSetter(CodeGenModule &CGM) {
  1222. if (CGM.getLangOpts().getGC() != LangOptions::NonGC)
  1223. return false;
  1224. return CGM.getLangOpts().ObjCRuntime.hasOptimizedSetter();
  1225. }
  1226. void
  1227. CodeGenFunction::generateObjCSetterBody(const ObjCImplementationDecl *classImpl,
  1228. const ObjCPropertyImplDecl *propImpl,
  1229. llvm::Constant *AtomicHelperFn) {
  1230. ObjCIvarDecl *ivar = propImpl->getPropertyIvarDecl();
  1231. ObjCMethodDecl *setterMethod = propImpl->getSetterMethodDecl();
  1232. if (ivar->getType().isNonTrivialToPrimitiveCopy() == QualType::PCK_Struct) {
  1233. ParmVarDecl *PVD = *setterMethod->param_begin();
  1234. if (!AtomicHelperFn) {
  1235. // Call the move assignment operator instead of calling the copy
  1236. // assignment operator and destructor.
  1237. LValue Dst = EmitLValueForIvar(TypeOfSelfObject(), LoadObjCSelf(), ivar,
  1238. /*quals*/ 0);
  1239. LValue Src = MakeAddrLValue(GetAddrOfLocalVar(PVD), ivar->getType());
  1240. callCStructMoveAssignmentOperator(Dst, Src);
  1241. } else {
  1242. // If atomic, assignment is called via a locking api.
  1243. emitCPPObjectAtomicSetterCall(*this, setterMethod, ivar, AtomicHelperFn);
  1244. }
  1245. // Decativate the destructor for the setter parameter.
  1246. DeactivateCleanupBlock(CalleeDestructedParamCleanups[PVD], AllocaInsertPt);
  1247. return;
  1248. }
  1249. // Just use the setter expression if Sema gave us one and it's
  1250. // non-trivial.
  1251. if (!hasTrivialSetExpr(propImpl)) {
  1252. if (!AtomicHelperFn)
  1253. // If non-atomic, assignment is called directly.
  1254. EmitStmt(propImpl->getSetterCXXAssignment());
  1255. else
  1256. // If atomic, assignment is called via a locking api.
  1257. emitCPPObjectAtomicSetterCall(*this, setterMethod, ivar,
  1258. AtomicHelperFn);
  1259. return;
  1260. }
  1261. PropertyImplStrategy strategy(CGM, propImpl);
  1262. switch (strategy.getKind()) {
  1263. case PropertyImplStrategy::Native: {
  1264. // We don't need to do anything for a zero-size struct.
  1265. if (strategy.getIvarSize().isZero())
  1266. return;
  1267. Address argAddr = GetAddrOfLocalVar(*setterMethod->param_begin());
  1268. LValue ivarLValue =
  1269. EmitLValueForIvar(TypeOfSelfObject(), LoadObjCSelf(), ivar, /*quals*/ 0);
  1270. Address ivarAddr = ivarLValue.getAddress(*this);
  1271. // Currently, all atomic accesses have to be through integer
  1272. // types, so there's no point in trying to pick a prettier type.
  1273. llvm::Type *bitcastType =
  1274. llvm::Type::getIntNTy(getLLVMContext(),
  1275. getContext().toBits(strategy.getIvarSize()));
  1276. // Cast both arguments to the chosen operation type.
  1277. argAddr = Builder.CreateElementBitCast(argAddr, bitcastType);
  1278. ivarAddr = Builder.CreateElementBitCast(ivarAddr, bitcastType);
  1279. // This bitcast load is likely to cause some nasty IR.
  1280. llvm::Value *load = Builder.CreateLoad(argAddr);
  1281. // Perform an atomic store. There are no memory ordering requirements.
  1282. llvm::StoreInst *store = Builder.CreateStore(load, ivarAddr);
  1283. store->setAtomic(llvm::AtomicOrdering::Unordered);
  1284. return;
  1285. }
  1286. case PropertyImplStrategy::GetSetProperty:
  1287. case PropertyImplStrategy::SetPropertyAndExpressionGet: {
  1288. llvm::FunctionCallee setOptimizedPropertyFn = nullptr;
  1289. llvm::FunctionCallee setPropertyFn = nullptr;
  1290. if (UseOptimizedSetter(CGM)) {
  1291. // 10.8 and iOS 6.0 code and GC is off
  1292. setOptimizedPropertyFn =
  1293. CGM.getObjCRuntime().GetOptimizedPropertySetFunction(
  1294. strategy.isAtomic(), strategy.isCopy());
  1295. if (!setOptimizedPropertyFn) {
  1296. CGM.ErrorUnsupported(propImpl, "Obj-C optimized setter - NYI");
  1297. return;
  1298. }
  1299. }
  1300. else {
  1301. setPropertyFn = CGM.getObjCRuntime().GetPropertySetFunction();
  1302. if (!setPropertyFn) {
  1303. CGM.ErrorUnsupported(propImpl, "Obj-C setter requiring atomic copy");
  1304. return;
  1305. }
  1306. }
  1307. // Emit objc_setProperty((id) self, _cmd, offset, arg,
  1308. // <is-atomic>, <is-copy>).
  1309. llvm::Value *cmd = emitCmdValueForGetterSetterBody(*this, setterMethod);
  1310. llvm::Value *self =
  1311. Builder.CreateBitCast(LoadObjCSelf(), VoidPtrTy);
  1312. llvm::Value *ivarOffset =
  1313. EmitIvarOffsetAsPointerDiff(classImpl->getClassInterface(), ivar);
  1314. Address argAddr = GetAddrOfLocalVar(*setterMethod->param_begin());
  1315. llvm::Value *arg = Builder.CreateLoad(argAddr, "arg");
  1316. arg = Builder.CreateBitCast(arg, VoidPtrTy);
  1317. CallArgList args;
  1318. args.add(RValue::get(self), getContext().getObjCIdType());
  1319. args.add(RValue::get(cmd), getContext().getObjCSelType());
  1320. if (setOptimizedPropertyFn) {
  1321. args.add(RValue::get(arg), getContext().getObjCIdType());
  1322. args.add(RValue::get(ivarOffset), getContext().getPointerDiffType());
  1323. CGCallee callee = CGCallee::forDirect(setOptimizedPropertyFn);
  1324. EmitCall(getTypes().arrangeBuiltinFunctionCall(getContext().VoidTy, args),
  1325. callee, ReturnValueSlot(), args);
  1326. } else {
  1327. args.add(RValue::get(ivarOffset), getContext().getPointerDiffType());
  1328. args.add(RValue::get(arg), getContext().getObjCIdType());
  1329. args.add(RValue::get(Builder.getInt1(strategy.isAtomic())),
  1330. getContext().BoolTy);
  1331. args.add(RValue::get(Builder.getInt1(strategy.isCopy())),
  1332. getContext().BoolTy);
  1333. // FIXME: We shouldn't need to get the function info here, the runtime
  1334. // already should have computed it to build the function.
  1335. CGCallee callee = CGCallee::forDirect(setPropertyFn);
  1336. EmitCall(getTypes().arrangeBuiltinFunctionCall(getContext().VoidTy, args),
  1337. callee, ReturnValueSlot(), args);
  1338. }
  1339. return;
  1340. }
  1341. case PropertyImplStrategy::CopyStruct:
  1342. emitStructSetterCall(*this, setterMethod, ivar);
  1343. return;
  1344. case PropertyImplStrategy::Expression:
  1345. break;
  1346. }
  1347. // Otherwise, fake up some ASTs and emit a normal assignment.
  1348. ValueDecl *selfDecl = setterMethod->getSelfDecl();
  1349. DeclRefExpr self(getContext(), selfDecl, false, selfDecl->getType(),
  1350. VK_LValue, SourceLocation());
  1351. ImplicitCastExpr selfLoad(ImplicitCastExpr::OnStack, selfDecl->getType(),
  1352. CK_LValueToRValue, &self, VK_PRValue,
  1353. FPOptionsOverride());
  1354. ObjCIvarRefExpr ivarRef(ivar, ivar->getType().getNonReferenceType(),
  1355. SourceLocation(), SourceLocation(),
  1356. &selfLoad, true, true);
  1357. ParmVarDecl *argDecl = *setterMethod->param_begin();
  1358. QualType argType = argDecl->getType().getNonReferenceType();
  1359. DeclRefExpr arg(getContext(), argDecl, false, argType, VK_LValue,
  1360. SourceLocation());
  1361. ImplicitCastExpr argLoad(ImplicitCastExpr::OnStack,
  1362. argType.getUnqualifiedType(), CK_LValueToRValue,
  1363. &arg, VK_PRValue, FPOptionsOverride());
  1364. // The property type can differ from the ivar type in some situations with
  1365. // Objective-C pointer types, we can always bit cast the RHS in these cases.
  1366. // The following absurdity is just to ensure well-formed IR.
  1367. CastKind argCK = CK_NoOp;
  1368. if (ivarRef.getType()->isObjCObjectPointerType()) {
  1369. if (argLoad.getType()->isObjCObjectPointerType())
  1370. argCK = CK_BitCast;
  1371. else if (argLoad.getType()->isBlockPointerType())
  1372. argCK = CK_BlockPointerToObjCPointerCast;
  1373. else
  1374. argCK = CK_CPointerToObjCPointerCast;
  1375. } else if (ivarRef.getType()->isBlockPointerType()) {
  1376. if (argLoad.getType()->isBlockPointerType())
  1377. argCK = CK_BitCast;
  1378. else
  1379. argCK = CK_AnyPointerToBlockPointerCast;
  1380. } else if (ivarRef.getType()->isPointerType()) {
  1381. argCK = CK_BitCast;
  1382. } else if (argLoad.getType()->isAtomicType() &&
  1383. !ivarRef.getType()->isAtomicType()) {
  1384. argCK = CK_AtomicToNonAtomic;
  1385. } else if (!argLoad.getType()->isAtomicType() &&
  1386. ivarRef.getType()->isAtomicType()) {
  1387. argCK = CK_NonAtomicToAtomic;
  1388. }
  1389. ImplicitCastExpr argCast(ImplicitCastExpr::OnStack, ivarRef.getType(), argCK,
  1390. &argLoad, VK_PRValue, FPOptionsOverride());
  1391. Expr *finalArg = &argLoad;
  1392. if (!getContext().hasSameUnqualifiedType(ivarRef.getType(),
  1393. argLoad.getType()))
  1394. finalArg = &argCast;
  1395. BinaryOperator *assign = BinaryOperator::Create(
  1396. getContext(), &ivarRef, finalArg, BO_Assign, ivarRef.getType(),
  1397. VK_PRValue, OK_Ordinary, SourceLocation(), FPOptionsOverride());
  1398. EmitStmt(assign);
  1399. }
  1400. /// Generate an Objective-C property setter function.
  1401. ///
  1402. /// The given Decl must be an ObjCImplementationDecl. \@synthesize
  1403. /// is illegal within a category.
  1404. void CodeGenFunction::GenerateObjCSetter(ObjCImplementationDecl *IMP,
  1405. const ObjCPropertyImplDecl *PID) {
  1406. llvm::Constant *AtomicHelperFn =
  1407. CodeGenFunction(CGM).GenerateObjCAtomicSetterCopyHelperFunction(PID);
  1408. ObjCMethodDecl *OMD = PID->getSetterMethodDecl();
  1409. assert(OMD && "Invalid call to generate setter (empty method)");
  1410. StartObjCMethod(OMD, IMP->getClassInterface());
  1411. generateObjCSetterBody(IMP, PID, AtomicHelperFn);
  1412. FinishFunction(OMD->getEndLoc());
  1413. }
  1414. namespace {
  1415. struct DestroyIvar final : EHScopeStack::Cleanup {
  1416. private:
  1417. llvm::Value *addr;
  1418. const ObjCIvarDecl *ivar;
  1419. CodeGenFunction::Destroyer *destroyer;
  1420. bool useEHCleanupForArray;
  1421. public:
  1422. DestroyIvar(llvm::Value *addr, const ObjCIvarDecl *ivar,
  1423. CodeGenFunction::Destroyer *destroyer,
  1424. bool useEHCleanupForArray)
  1425. : addr(addr), ivar(ivar), destroyer(destroyer),
  1426. useEHCleanupForArray(useEHCleanupForArray) {}
  1427. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1428. LValue lvalue
  1429. = CGF.EmitLValueForIvar(CGF.TypeOfSelfObject(), addr, ivar, /*CVR*/ 0);
  1430. CGF.emitDestroy(lvalue.getAddress(CGF), ivar->getType(), destroyer,
  1431. flags.isForNormalCleanup() && useEHCleanupForArray);
  1432. }
  1433. };
  1434. }
  1435. /// Like CodeGenFunction::destroyARCStrong, but do it with a call.
  1436. static void destroyARCStrongWithStore(CodeGenFunction &CGF,
  1437. Address addr,
  1438. QualType type) {
  1439. llvm::Value *null = getNullForVariable(addr);
  1440. CGF.EmitARCStoreStrongCall(addr, null, /*ignored*/ true);
  1441. }
  1442. static void emitCXXDestructMethod(CodeGenFunction &CGF,
  1443. ObjCImplementationDecl *impl) {
  1444. CodeGenFunction::RunCleanupsScope scope(CGF);
  1445. llvm::Value *self = CGF.LoadObjCSelf();
  1446. const ObjCInterfaceDecl *iface = impl->getClassInterface();
  1447. for (const ObjCIvarDecl *ivar = iface->all_declared_ivar_begin();
  1448. ivar; ivar = ivar->getNextIvar()) {
  1449. QualType type = ivar->getType();
  1450. // Check whether the ivar is a destructible type.
  1451. QualType::DestructionKind dtorKind = type.isDestructedType();
  1452. if (!dtorKind) continue;
  1453. CodeGenFunction::Destroyer *destroyer = nullptr;
  1454. // Use a call to objc_storeStrong to destroy strong ivars, for the
  1455. // general benefit of the tools.
  1456. if (dtorKind == QualType::DK_objc_strong_lifetime) {
  1457. destroyer = destroyARCStrongWithStore;
  1458. // Otherwise use the default for the destruction kind.
  1459. } else {
  1460. destroyer = CGF.getDestroyer(dtorKind);
  1461. }
  1462. CleanupKind cleanupKind = CGF.getCleanupKind(dtorKind);
  1463. CGF.EHStack.pushCleanup<DestroyIvar>(cleanupKind, self, ivar, destroyer,
  1464. cleanupKind & EHCleanup);
  1465. }
  1466. assert(scope.requiresCleanups() && "nothing to do in .cxx_destruct?");
  1467. }
  1468. void CodeGenFunction::GenerateObjCCtorDtorMethod(ObjCImplementationDecl *IMP,
  1469. ObjCMethodDecl *MD,
  1470. bool ctor) {
  1471. MD->createImplicitParams(CGM.getContext(), IMP->getClassInterface());
  1472. StartObjCMethod(MD, IMP->getClassInterface());
  1473. // Emit .cxx_construct.
  1474. if (ctor) {
  1475. // Suppress the final autorelease in ARC.
  1476. AutoreleaseResult = false;
  1477. for (const auto *IvarInit : IMP->inits()) {
  1478. FieldDecl *Field = IvarInit->getAnyMember();
  1479. ObjCIvarDecl *Ivar = cast<ObjCIvarDecl>(Field);
  1480. LValue LV = EmitLValueForIvar(TypeOfSelfObject(),
  1481. LoadObjCSelf(), Ivar, 0);
  1482. EmitAggExpr(IvarInit->getInit(),
  1483. AggValueSlot::forLValue(LV, *this, AggValueSlot::IsDestructed,
  1484. AggValueSlot::DoesNotNeedGCBarriers,
  1485. AggValueSlot::IsNotAliased,
  1486. AggValueSlot::DoesNotOverlap));
  1487. }
  1488. // constructor returns 'self'.
  1489. CodeGenTypes &Types = CGM.getTypes();
  1490. QualType IdTy(CGM.getContext().getObjCIdType());
  1491. llvm::Value *SelfAsId =
  1492. Builder.CreateBitCast(LoadObjCSelf(), Types.ConvertType(IdTy));
  1493. EmitReturnOfRValue(RValue::get(SelfAsId), IdTy);
  1494. // Emit .cxx_destruct.
  1495. } else {
  1496. emitCXXDestructMethod(*this, IMP);
  1497. }
  1498. FinishFunction();
  1499. }
  1500. llvm::Value *CodeGenFunction::LoadObjCSelf() {
  1501. VarDecl *Self = cast<ObjCMethodDecl>(CurFuncDecl)->getSelfDecl();
  1502. DeclRefExpr DRE(getContext(), Self,
  1503. /*is enclosing local*/ (CurFuncDecl != CurCodeDecl),
  1504. Self->getType(), VK_LValue, SourceLocation());
  1505. return EmitLoadOfScalar(EmitDeclRefLValue(&DRE), SourceLocation());
  1506. }
  1507. QualType CodeGenFunction::TypeOfSelfObject() {
  1508. const ObjCMethodDecl *OMD = cast<ObjCMethodDecl>(CurFuncDecl);
  1509. ImplicitParamDecl *selfDecl = OMD->getSelfDecl();
  1510. const ObjCObjectPointerType *PTy = cast<ObjCObjectPointerType>(
  1511. getContext().getCanonicalType(selfDecl->getType()));
  1512. return PTy->getPointeeType();
  1513. }
  1514. void CodeGenFunction::EmitObjCForCollectionStmt(const ObjCForCollectionStmt &S){
  1515. llvm::FunctionCallee EnumerationMutationFnPtr =
  1516. CGM.getObjCRuntime().EnumerationMutationFunction();
  1517. if (!EnumerationMutationFnPtr) {
  1518. CGM.ErrorUnsupported(&S, "Obj-C fast enumeration for this runtime");
  1519. return;
  1520. }
  1521. CGCallee EnumerationMutationFn =
  1522. CGCallee::forDirect(EnumerationMutationFnPtr);
  1523. CGDebugInfo *DI = getDebugInfo();
  1524. if (DI)
  1525. DI->EmitLexicalBlockStart(Builder, S.getSourceRange().getBegin());
  1526. RunCleanupsScope ForScope(*this);
  1527. // The local variable comes into scope immediately.
  1528. AutoVarEmission variable = AutoVarEmission::invalid();
  1529. if (const DeclStmt *SD = dyn_cast<DeclStmt>(S.getElement()))
  1530. variable = EmitAutoVarAlloca(*cast<VarDecl>(SD->getSingleDecl()));
  1531. JumpDest LoopEnd = getJumpDestInCurrentScope("forcoll.end");
  1532. // Fast enumeration state.
  1533. QualType StateTy = CGM.getObjCFastEnumerationStateType();
  1534. Address StatePtr = CreateMemTemp(StateTy, "state.ptr");
  1535. EmitNullInitialization(StatePtr, StateTy);
  1536. // Number of elements in the items array.
  1537. static const unsigned NumItems = 16;
  1538. // Fetch the countByEnumeratingWithState:objects:count: selector.
  1539. IdentifierInfo *II[] = {
  1540. &CGM.getContext().Idents.get("countByEnumeratingWithState"),
  1541. &CGM.getContext().Idents.get("objects"),
  1542. &CGM.getContext().Idents.get("count")
  1543. };
  1544. Selector FastEnumSel =
  1545. CGM.getContext().Selectors.getSelector(std::size(II), &II[0]);
  1546. QualType ItemsTy =
  1547. getContext().getConstantArrayType(getContext().getObjCIdType(),
  1548. llvm::APInt(32, NumItems), nullptr,
  1549. ArrayType::Normal, 0);
  1550. Address ItemsPtr = CreateMemTemp(ItemsTy, "items.ptr");
  1551. // Emit the collection pointer. In ARC, we do a retain.
  1552. llvm::Value *Collection;
  1553. if (getLangOpts().ObjCAutoRefCount) {
  1554. Collection = EmitARCRetainScalarExpr(S.getCollection());
  1555. // Enter a cleanup to do the release.
  1556. EmitObjCConsumeObject(S.getCollection()->getType(), Collection);
  1557. } else {
  1558. Collection = EmitScalarExpr(S.getCollection());
  1559. }
  1560. // The 'continue' label needs to appear within the cleanup for the
  1561. // collection object.
  1562. JumpDest AfterBody = getJumpDestInCurrentScope("forcoll.next");
  1563. // Send it our message:
  1564. CallArgList Args;
  1565. // The first argument is a temporary of the enumeration-state type.
  1566. Args.add(RValue::get(StatePtr.getPointer()),
  1567. getContext().getPointerType(StateTy));
  1568. // The second argument is a temporary array with space for NumItems
  1569. // pointers. We'll actually be loading elements from the array
  1570. // pointer written into the control state; this buffer is so that
  1571. // collections that *aren't* backed by arrays can still queue up
  1572. // batches of elements.
  1573. Args.add(RValue::get(ItemsPtr.getPointer()),
  1574. getContext().getPointerType(ItemsTy));
  1575. // The third argument is the capacity of that temporary array.
  1576. llvm::Type *NSUIntegerTy = ConvertType(getContext().getNSUIntegerType());
  1577. llvm::Constant *Count = llvm::ConstantInt::get(NSUIntegerTy, NumItems);
  1578. Args.add(RValue::get(Count), getContext().getNSUIntegerType());
  1579. // Start the enumeration.
  1580. RValue CountRV =
  1581. CGM.getObjCRuntime().GenerateMessageSend(*this, ReturnValueSlot(),
  1582. getContext().getNSUIntegerType(),
  1583. FastEnumSel, Collection, Args);
  1584. // The initial number of objects that were returned in the buffer.
  1585. llvm::Value *initialBufferLimit = CountRV.getScalarVal();
  1586. llvm::BasicBlock *EmptyBB = createBasicBlock("forcoll.empty");
  1587. llvm::BasicBlock *LoopInitBB = createBasicBlock("forcoll.loopinit");
  1588. llvm::Value *zero = llvm::Constant::getNullValue(NSUIntegerTy);
  1589. // If the limit pointer was zero to begin with, the collection is
  1590. // empty; skip all this. Set the branch weight assuming this has the same
  1591. // probability of exiting the loop as any other loop exit.
  1592. uint64_t EntryCount = getCurrentProfileCount();
  1593. Builder.CreateCondBr(
  1594. Builder.CreateICmpEQ(initialBufferLimit, zero, "iszero"), EmptyBB,
  1595. LoopInitBB,
  1596. createProfileWeights(EntryCount, getProfileCount(S.getBody())));
  1597. // Otherwise, initialize the loop.
  1598. EmitBlock(LoopInitBB);
  1599. // Save the initial mutations value. This is the value at an
  1600. // address that was written into the state object by
  1601. // countByEnumeratingWithState:objects:count:.
  1602. Address StateMutationsPtrPtr =
  1603. Builder.CreateStructGEP(StatePtr, 2, "mutationsptr.ptr");
  1604. llvm::Value *StateMutationsPtr
  1605. = Builder.CreateLoad(StateMutationsPtrPtr, "mutationsptr");
  1606. llvm::Type *UnsignedLongTy = ConvertType(getContext().UnsignedLongTy);
  1607. llvm::Value *initialMutations =
  1608. Builder.CreateAlignedLoad(UnsignedLongTy, StateMutationsPtr,
  1609. getPointerAlign(), "forcoll.initial-mutations");
  1610. // Start looping. This is the point we return to whenever we have a
  1611. // fresh, non-empty batch of objects.
  1612. llvm::BasicBlock *LoopBodyBB = createBasicBlock("forcoll.loopbody");
  1613. EmitBlock(LoopBodyBB);
  1614. // The current index into the buffer.
  1615. llvm::PHINode *index = Builder.CreatePHI(NSUIntegerTy, 3, "forcoll.index");
  1616. index->addIncoming(zero, LoopInitBB);
  1617. // The current buffer size.
  1618. llvm::PHINode *count = Builder.CreatePHI(NSUIntegerTy, 3, "forcoll.count");
  1619. count->addIncoming(initialBufferLimit, LoopInitBB);
  1620. incrementProfileCounter(&S);
  1621. // Check whether the mutations value has changed from where it was
  1622. // at start. StateMutationsPtr should actually be invariant between
  1623. // refreshes.
  1624. StateMutationsPtr = Builder.CreateLoad(StateMutationsPtrPtr, "mutationsptr");
  1625. llvm::Value *currentMutations
  1626. = Builder.CreateAlignedLoad(UnsignedLongTy, StateMutationsPtr,
  1627. getPointerAlign(), "statemutations");
  1628. llvm::BasicBlock *WasMutatedBB = createBasicBlock("forcoll.mutated");
  1629. llvm::BasicBlock *WasNotMutatedBB = createBasicBlock("forcoll.notmutated");
  1630. Builder.CreateCondBr(Builder.CreateICmpEQ(currentMutations, initialMutations),
  1631. WasNotMutatedBB, WasMutatedBB);
  1632. // If so, call the enumeration-mutation function.
  1633. EmitBlock(WasMutatedBB);
  1634. llvm::Type *ObjCIdType = ConvertType(getContext().getObjCIdType());
  1635. llvm::Value *V =
  1636. Builder.CreateBitCast(Collection, ObjCIdType);
  1637. CallArgList Args2;
  1638. Args2.add(RValue::get(V), getContext().getObjCIdType());
  1639. // FIXME: We shouldn't need to get the function info here, the runtime already
  1640. // should have computed it to build the function.
  1641. EmitCall(
  1642. CGM.getTypes().arrangeBuiltinFunctionCall(getContext().VoidTy, Args2),
  1643. EnumerationMutationFn, ReturnValueSlot(), Args2);
  1644. // Otherwise, or if the mutation function returns, just continue.
  1645. EmitBlock(WasNotMutatedBB);
  1646. // Initialize the element variable.
  1647. RunCleanupsScope elementVariableScope(*this);
  1648. bool elementIsVariable;
  1649. LValue elementLValue;
  1650. QualType elementType;
  1651. if (const DeclStmt *SD = dyn_cast<DeclStmt>(S.getElement())) {
  1652. // Initialize the variable, in case it's a __block variable or something.
  1653. EmitAutoVarInit(variable);
  1654. const VarDecl *D = cast<VarDecl>(SD->getSingleDecl());
  1655. DeclRefExpr tempDRE(getContext(), const_cast<VarDecl *>(D), false,
  1656. D->getType(), VK_LValue, SourceLocation());
  1657. elementLValue = EmitLValue(&tempDRE);
  1658. elementType = D->getType();
  1659. elementIsVariable = true;
  1660. if (D->isARCPseudoStrong())
  1661. elementLValue.getQuals().setObjCLifetime(Qualifiers::OCL_ExplicitNone);
  1662. } else {
  1663. elementLValue = LValue(); // suppress warning
  1664. elementType = cast<Expr>(S.getElement())->getType();
  1665. elementIsVariable = false;
  1666. }
  1667. llvm::Type *convertedElementType = ConvertType(elementType);
  1668. // Fetch the buffer out of the enumeration state.
  1669. // TODO: this pointer should actually be invariant between
  1670. // refreshes, which would help us do certain loop optimizations.
  1671. Address StateItemsPtr =
  1672. Builder.CreateStructGEP(StatePtr, 1, "stateitems.ptr");
  1673. llvm::Value *EnumStateItems =
  1674. Builder.CreateLoad(StateItemsPtr, "stateitems");
  1675. // Fetch the value at the current index from the buffer.
  1676. llvm::Value *CurrentItemPtr = Builder.CreateGEP(
  1677. ObjCIdType, EnumStateItems, index, "currentitem.ptr");
  1678. llvm::Value *CurrentItem =
  1679. Builder.CreateAlignedLoad(ObjCIdType, CurrentItemPtr, getPointerAlign());
  1680. if (SanOpts.has(SanitizerKind::ObjCCast)) {
  1681. // Before using an item from the collection, check that the implicit cast
  1682. // from id to the element type is valid. This is done with instrumentation
  1683. // roughly corresponding to:
  1684. //
  1685. // if (![item isKindOfClass:expectedCls]) { /* emit diagnostic */ }
  1686. const ObjCObjectPointerType *ObjPtrTy =
  1687. elementType->getAsObjCInterfacePointerType();
  1688. const ObjCInterfaceType *InterfaceTy =
  1689. ObjPtrTy ? ObjPtrTy->getInterfaceType() : nullptr;
  1690. if (InterfaceTy) {
  1691. SanitizerScope SanScope(this);
  1692. auto &C = CGM.getContext();
  1693. assert(InterfaceTy->getDecl() && "No decl for ObjC interface type");
  1694. Selector IsKindOfClassSel = GetUnarySelector("isKindOfClass", C);
  1695. CallArgList IsKindOfClassArgs;
  1696. llvm::Value *Cls =
  1697. CGM.getObjCRuntime().GetClass(*this, InterfaceTy->getDecl());
  1698. IsKindOfClassArgs.add(RValue::get(Cls), C.getObjCClassType());
  1699. llvm::Value *IsClass =
  1700. CGM.getObjCRuntime()
  1701. .GenerateMessageSend(*this, ReturnValueSlot(), C.BoolTy,
  1702. IsKindOfClassSel, CurrentItem,
  1703. IsKindOfClassArgs)
  1704. .getScalarVal();
  1705. llvm::Constant *StaticData[] = {
  1706. EmitCheckSourceLocation(S.getBeginLoc()),
  1707. EmitCheckTypeDescriptor(QualType(InterfaceTy, 0))};
  1708. EmitCheck({{IsClass, SanitizerKind::ObjCCast}},
  1709. SanitizerHandler::InvalidObjCCast,
  1710. ArrayRef<llvm::Constant *>(StaticData), CurrentItem);
  1711. }
  1712. }
  1713. // Cast that value to the right type.
  1714. CurrentItem = Builder.CreateBitCast(CurrentItem, convertedElementType,
  1715. "currentitem");
  1716. // Make sure we have an l-value. Yes, this gets evaluated every
  1717. // time through the loop.
  1718. if (!elementIsVariable) {
  1719. elementLValue = EmitLValue(cast<Expr>(S.getElement()));
  1720. EmitStoreThroughLValue(RValue::get(CurrentItem), elementLValue);
  1721. } else {
  1722. EmitStoreThroughLValue(RValue::get(CurrentItem), elementLValue,
  1723. /*isInit*/ true);
  1724. }
  1725. // If we do have an element variable, this assignment is the end of
  1726. // its initialization.
  1727. if (elementIsVariable)
  1728. EmitAutoVarCleanups(variable);
  1729. // Perform the loop body, setting up break and continue labels.
  1730. BreakContinueStack.push_back(BreakContinue(LoopEnd, AfterBody));
  1731. {
  1732. RunCleanupsScope Scope(*this);
  1733. EmitStmt(S.getBody());
  1734. }
  1735. BreakContinueStack.pop_back();
  1736. // Destroy the element variable now.
  1737. elementVariableScope.ForceCleanup();
  1738. // Check whether there are more elements.
  1739. EmitBlock(AfterBody.getBlock());
  1740. llvm::BasicBlock *FetchMoreBB = createBasicBlock("forcoll.refetch");
  1741. // First we check in the local buffer.
  1742. llvm::Value *indexPlusOne =
  1743. Builder.CreateAdd(index, llvm::ConstantInt::get(NSUIntegerTy, 1));
  1744. // If we haven't overrun the buffer yet, we can continue.
  1745. // Set the branch weights based on the simplifying assumption that this is
  1746. // like a while-loop, i.e., ignoring that the false branch fetches more
  1747. // elements and then returns to the loop.
  1748. Builder.CreateCondBr(
  1749. Builder.CreateICmpULT(indexPlusOne, count), LoopBodyBB, FetchMoreBB,
  1750. createProfileWeights(getProfileCount(S.getBody()), EntryCount));
  1751. index->addIncoming(indexPlusOne, AfterBody.getBlock());
  1752. count->addIncoming(count, AfterBody.getBlock());
  1753. // Otherwise, we have to fetch more elements.
  1754. EmitBlock(FetchMoreBB);
  1755. CountRV =
  1756. CGM.getObjCRuntime().GenerateMessageSend(*this, ReturnValueSlot(),
  1757. getContext().getNSUIntegerType(),
  1758. FastEnumSel, Collection, Args);
  1759. // If we got a zero count, we're done.
  1760. llvm::Value *refetchCount = CountRV.getScalarVal();
  1761. // (note that the message send might split FetchMoreBB)
  1762. index->addIncoming(zero, Builder.GetInsertBlock());
  1763. count->addIncoming(refetchCount, Builder.GetInsertBlock());
  1764. Builder.CreateCondBr(Builder.CreateICmpEQ(refetchCount, zero),
  1765. EmptyBB, LoopBodyBB);
  1766. // No more elements.
  1767. EmitBlock(EmptyBB);
  1768. if (!elementIsVariable) {
  1769. // If the element was not a declaration, set it to be null.
  1770. llvm::Value *null = llvm::Constant::getNullValue(convertedElementType);
  1771. elementLValue = EmitLValue(cast<Expr>(S.getElement()));
  1772. EmitStoreThroughLValue(RValue::get(null), elementLValue);
  1773. }
  1774. if (DI)
  1775. DI->EmitLexicalBlockEnd(Builder, S.getSourceRange().getEnd());
  1776. ForScope.ForceCleanup();
  1777. EmitBlock(LoopEnd.getBlock());
  1778. }
  1779. void CodeGenFunction::EmitObjCAtTryStmt(const ObjCAtTryStmt &S) {
  1780. CGM.getObjCRuntime().EmitTryStmt(*this, S);
  1781. }
  1782. void CodeGenFunction::EmitObjCAtThrowStmt(const ObjCAtThrowStmt &S) {
  1783. CGM.getObjCRuntime().EmitThrowStmt(*this, S);
  1784. }
  1785. void CodeGenFunction::EmitObjCAtSynchronizedStmt(
  1786. const ObjCAtSynchronizedStmt &S) {
  1787. CGM.getObjCRuntime().EmitSynchronizedStmt(*this, S);
  1788. }
  1789. namespace {
  1790. struct CallObjCRelease final : EHScopeStack::Cleanup {
  1791. CallObjCRelease(llvm::Value *object) : object(object) {}
  1792. llvm::Value *object;
  1793. void Emit(CodeGenFunction &CGF, Flags flags) override {
  1794. // Releases at the end of the full-expression are imprecise.
  1795. CGF.EmitARCRelease(object, ARCImpreciseLifetime);
  1796. }
  1797. };
  1798. }
  1799. /// Produce the code for a CK_ARCConsumeObject. Does a primitive
  1800. /// release at the end of the full-expression.
  1801. llvm::Value *CodeGenFunction::EmitObjCConsumeObject(QualType type,
  1802. llvm::Value *object) {
  1803. // If we're in a conditional branch, we need to make the cleanup
  1804. // conditional.
  1805. pushFullExprCleanup<CallObjCRelease>(getARCCleanupKind(), object);
  1806. return object;
  1807. }
  1808. llvm::Value *CodeGenFunction::EmitObjCExtendObjectLifetime(QualType type,
  1809. llvm::Value *value) {
  1810. return EmitARCRetainAutorelease(type, value);
  1811. }
  1812. /// Given a number of pointers, inform the optimizer that they're
  1813. /// being intrinsically used up until this point in the program.
  1814. void CodeGenFunction::EmitARCIntrinsicUse(ArrayRef<llvm::Value*> values) {
  1815. llvm::Function *&fn = CGM.getObjCEntrypoints().clang_arc_use;
  1816. if (!fn)
  1817. fn = CGM.getIntrinsic(llvm::Intrinsic::objc_clang_arc_use);
  1818. // This isn't really a "runtime" function, but as an intrinsic it
  1819. // doesn't really matter as long as we align things up.
  1820. EmitNounwindRuntimeCall(fn, values);
  1821. }
  1822. /// Emit a call to "clang.arc.noop.use", which consumes the result of a call
  1823. /// that has operand bundle "clang.arc.attachedcall".
  1824. void CodeGenFunction::EmitARCNoopIntrinsicUse(ArrayRef<llvm::Value *> values) {
  1825. llvm::Function *&fn = CGM.getObjCEntrypoints().clang_arc_noop_use;
  1826. if (!fn)
  1827. fn = CGM.getIntrinsic(llvm::Intrinsic::objc_clang_arc_noop_use);
  1828. EmitNounwindRuntimeCall(fn, values);
  1829. }
  1830. static void setARCRuntimeFunctionLinkage(CodeGenModule &CGM, llvm::Value *RTF) {
  1831. if (auto *F = dyn_cast<llvm::Function>(RTF)) {
  1832. // If the target runtime doesn't naturally support ARC, emit weak
  1833. // references to the runtime support library. We don't really
  1834. // permit this to fail, but we need a particular relocation style.
  1835. if (!CGM.getLangOpts().ObjCRuntime.hasNativeARC() &&
  1836. !CGM.getTriple().isOSBinFormatCOFF()) {
  1837. F->setLinkage(llvm::Function::ExternalWeakLinkage);
  1838. }
  1839. }
  1840. }
  1841. static void setARCRuntimeFunctionLinkage(CodeGenModule &CGM,
  1842. llvm::FunctionCallee RTF) {
  1843. setARCRuntimeFunctionLinkage(CGM, RTF.getCallee());
  1844. }
  1845. static llvm::Function *getARCIntrinsic(llvm::Intrinsic::ID IntID,
  1846. CodeGenModule &CGM) {
  1847. llvm::Function *fn = CGM.getIntrinsic(IntID);
  1848. setARCRuntimeFunctionLinkage(CGM, fn);
  1849. return fn;
  1850. }
  1851. /// Perform an operation having the signature
  1852. /// i8* (i8*)
  1853. /// where a null input causes a no-op and returns null.
  1854. static llvm::Value *emitARCValueOperation(
  1855. CodeGenFunction &CGF, llvm::Value *value, llvm::Type *returnType,
  1856. llvm::Function *&fn, llvm::Intrinsic::ID IntID,
  1857. llvm::CallInst::TailCallKind tailKind = llvm::CallInst::TCK_None) {
  1858. if (isa<llvm::ConstantPointerNull>(value))
  1859. return value;
  1860. if (!fn)
  1861. fn = getARCIntrinsic(IntID, CGF.CGM);
  1862. // Cast the argument to 'id'.
  1863. llvm::Type *origType = returnType ? returnType : value->getType();
  1864. value = CGF.Builder.CreateBitCast(value, CGF.Int8PtrTy);
  1865. // Call the function.
  1866. llvm::CallInst *call = CGF.EmitNounwindRuntimeCall(fn, value);
  1867. call->setTailCallKind(tailKind);
  1868. // Cast the result back to the original type.
  1869. return CGF.Builder.CreateBitCast(call, origType);
  1870. }
  1871. /// Perform an operation having the following signature:
  1872. /// i8* (i8**)
  1873. static llvm::Value *emitARCLoadOperation(CodeGenFunction &CGF, Address addr,
  1874. llvm::Function *&fn,
  1875. llvm::Intrinsic::ID IntID) {
  1876. if (!fn)
  1877. fn = getARCIntrinsic(IntID, CGF.CGM);
  1878. // Cast the argument to 'id*'.
  1879. llvm::Type *origType = addr.getElementType();
  1880. addr = CGF.Builder.CreateElementBitCast(addr, CGF.Int8PtrTy);
  1881. // Call the function.
  1882. llvm::Value *result = CGF.EmitNounwindRuntimeCall(fn, addr.getPointer());
  1883. // Cast the result back to a dereference of the original type.
  1884. if (origType != CGF.Int8PtrTy)
  1885. result = CGF.Builder.CreateBitCast(result, origType);
  1886. return result;
  1887. }
  1888. /// Perform an operation having the following signature:
  1889. /// i8* (i8**, i8*)
  1890. static llvm::Value *emitARCStoreOperation(CodeGenFunction &CGF, Address addr,
  1891. llvm::Value *value,
  1892. llvm::Function *&fn,
  1893. llvm::Intrinsic::ID IntID,
  1894. bool ignored) {
  1895. assert(addr.getElementType() == value->getType());
  1896. if (!fn)
  1897. fn = getARCIntrinsic(IntID, CGF.CGM);
  1898. llvm::Type *origType = value->getType();
  1899. llvm::Value *args[] = {
  1900. CGF.Builder.CreateBitCast(addr.getPointer(), CGF.Int8PtrPtrTy),
  1901. CGF.Builder.CreateBitCast(value, CGF.Int8PtrTy)
  1902. };
  1903. llvm::CallInst *result = CGF.EmitNounwindRuntimeCall(fn, args);
  1904. if (ignored) return nullptr;
  1905. return CGF.Builder.CreateBitCast(result, origType);
  1906. }
  1907. /// Perform an operation having the following signature:
  1908. /// void (i8**, i8**)
  1909. static void emitARCCopyOperation(CodeGenFunction &CGF, Address dst, Address src,
  1910. llvm::Function *&fn,
  1911. llvm::Intrinsic::ID IntID) {
  1912. assert(dst.getType() == src.getType());
  1913. if (!fn)
  1914. fn = getARCIntrinsic(IntID, CGF.CGM);
  1915. llvm::Value *args[] = {
  1916. CGF.Builder.CreateBitCast(dst.getPointer(), CGF.Int8PtrPtrTy),
  1917. CGF.Builder.CreateBitCast(src.getPointer(), CGF.Int8PtrPtrTy)
  1918. };
  1919. CGF.EmitNounwindRuntimeCall(fn, args);
  1920. }
  1921. /// Perform an operation having the signature
  1922. /// i8* (i8*)
  1923. /// where a null input causes a no-op and returns null.
  1924. static llvm::Value *emitObjCValueOperation(CodeGenFunction &CGF,
  1925. llvm::Value *value,
  1926. llvm::Type *returnType,
  1927. llvm::FunctionCallee &fn,
  1928. StringRef fnName) {
  1929. if (isa<llvm::ConstantPointerNull>(value))
  1930. return value;
  1931. if (!fn) {
  1932. llvm::FunctionType *fnType =
  1933. llvm::FunctionType::get(CGF.Int8PtrTy, CGF.Int8PtrTy, false);
  1934. fn = CGF.CGM.CreateRuntimeFunction(fnType, fnName);
  1935. // We have Native ARC, so set nonlazybind attribute for performance
  1936. if (llvm::Function *f = dyn_cast<llvm::Function>(fn.getCallee()))
  1937. if (fnName == "objc_retain")
  1938. f->addFnAttr(llvm::Attribute::NonLazyBind);
  1939. }
  1940. // Cast the argument to 'id'.
  1941. llvm::Type *origType = returnType ? returnType : value->getType();
  1942. value = CGF.Builder.CreateBitCast(value, CGF.Int8PtrTy);
  1943. // Call the function.
  1944. llvm::CallBase *Inst = CGF.EmitCallOrInvoke(fn, value);
  1945. // Mark calls to objc_autorelease as tail on the assumption that methods
  1946. // overriding autorelease do not touch anything on the stack.
  1947. if (fnName == "objc_autorelease")
  1948. if (auto *Call = dyn_cast<llvm::CallInst>(Inst))
  1949. Call->setTailCall();
  1950. // Cast the result back to the original type.
  1951. return CGF.Builder.CreateBitCast(Inst, origType);
  1952. }
  1953. /// Produce the code to do a retain. Based on the type, calls one of:
  1954. /// call i8* \@objc_retain(i8* %value)
  1955. /// call i8* \@objc_retainBlock(i8* %value)
  1956. llvm::Value *CodeGenFunction::EmitARCRetain(QualType type, llvm::Value *value) {
  1957. if (type->isBlockPointerType())
  1958. return EmitARCRetainBlock(value, /*mandatory*/ false);
  1959. else
  1960. return EmitARCRetainNonBlock(value);
  1961. }
  1962. /// Retain the given object, with normal retain semantics.
  1963. /// call i8* \@objc_retain(i8* %value)
  1964. llvm::Value *CodeGenFunction::EmitARCRetainNonBlock(llvm::Value *value) {
  1965. return emitARCValueOperation(*this, value, nullptr,
  1966. CGM.getObjCEntrypoints().objc_retain,
  1967. llvm::Intrinsic::objc_retain);
  1968. }
  1969. /// Retain the given block, with _Block_copy semantics.
  1970. /// call i8* \@objc_retainBlock(i8* %value)
  1971. ///
  1972. /// \param mandatory - If false, emit the call with metadata
  1973. /// indicating that it's okay for the optimizer to eliminate this call
  1974. /// if it can prove that the block never escapes except down the stack.
  1975. llvm::Value *CodeGenFunction::EmitARCRetainBlock(llvm::Value *value,
  1976. bool mandatory) {
  1977. llvm::Value *result
  1978. = emitARCValueOperation(*this, value, nullptr,
  1979. CGM.getObjCEntrypoints().objc_retainBlock,
  1980. llvm::Intrinsic::objc_retainBlock);
  1981. // If the copy isn't mandatory, add !clang.arc.copy_on_escape to
  1982. // tell the optimizer that it doesn't need to do this copy if the
  1983. // block doesn't escape, where being passed as an argument doesn't
  1984. // count as escaping.
  1985. if (!mandatory && isa<llvm::Instruction>(result)) {
  1986. llvm::CallInst *call
  1987. = cast<llvm::CallInst>(result->stripPointerCasts());
  1988. assert(call->getCalledOperand() ==
  1989. CGM.getObjCEntrypoints().objc_retainBlock);
  1990. call->setMetadata("clang.arc.copy_on_escape",
  1991. llvm::MDNode::get(Builder.getContext(), std::nullopt));
  1992. }
  1993. return result;
  1994. }
  1995. static void emitAutoreleasedReturnValueMarker(CodeGenFunction &CGF) {
  1996. // Fetch the void(void) inline asm which marks that we're going to
  1997. // do something with the autoreleased return value.
  1998. llvm::InlineAsm *&marker
  1999. = CGF.CGM.getObjCEntrypoints().retainAutoreleasedReturnValueMarker;
  2000. if (!marker) {
  2001. StringRef assembly
  2002. = CGF.CGM.getTargetCodeGenInfo()
  2003. .getARCRetainAutoreleasedReturnValueMarker();
  2004. // If we have an empty assembly string, there's nothing to do.
  2005. if (assembly.empty()) {
  2006. // Otherwise, at -O0, build an inline asm that we're going to call
  2007. // in a moment.
  2008. } else if (CGF.CGM.getCodeGenOpts().OptimizationLevel == 0) {
  2009. llvm::FunctionType *type =
  2010. llvm::FunctionType::get(CGF.VoidTy, /*variadic*/false);
  2011. marker = llvm::InlineAsm::get(type, assembly, "", /*sideeffects*/ true);
  2012. // If we're at -O1 and above, we don't want to litter the code
  2013. // with this marker yet, so leave a breadcrumb for the ARC
  2014. // optimizer to pick up.
  2015. } else {
  2016. const char *retainRVMarkerKey = llvm::objcarc::getRVMarkerModuleFlagStr();
  2017. if (!CGF.CGM.getModule().getModuleFlag(retainRVMarkerKey)) {
  2018. auto *str = llvm::MDString::get(CGF.getLLVMContext(), assembly);
  2019. CGF.CGM.getModule().addModuleFlag(llvm::Module::Error,
  2020. retainRVMarkerKey, str);
  2021. }
  2022. }
  2023. }
  2024. // Call the marker asm if we made one, which we do only at -O0.
  2025. if (marker)
  2026. CGF.Builder.CreateCall(marker, std::nullopt,
  2027. CGF.getBundlesForFunclet(marker));
  2028. }
  2029. static llvm::Value *emitOptimizedARCReturnCall(llvm::Value *value,
  2030. bool IsRetainRV,
  2031. CodeGenFunction &CGF) {
  2032. emitAutoreleasedReturnValueMarker(CGF);
  2033. // Add operand bundle "clang.arc.attachedcall" to the call instead of emitting
  2034. // retainRV or claimRV calls in the IR. We currently do this only when the
  2035. // optimization level isn't -O0 since global-isel, which is currently run at
  2036. // -O0, doesn't know about the operand bundle.
  2037. ObjCEntrypoints &EPs = CGF.CGM.getObjCEntrypoints();
  2038. llvm::Function *&EP = IsRetainRV
  2039. ? EPs.objc_retainAutoreleasedReturnValue
  2040. : EPs.objc_unsafeClaimAutoreleasedReturnValue;
  2041. llvm::Intrinsic::ID IID =
  2042. IsRetainRV ? llvm::Intrinsic::objc_retainAutoreleasedReturnValue
  2043. : llvm::Intrinsic::objc_unsafeClaimAutoreleasedReturnValue;
  2044. EP = getARCIntrinsic(IID, CGF.CGM);
  2045. llvm::Triple::ArchType Arch = CGF.CGM.getTriple().getArch();
  2046. // FIXME: Do this on all targets and at -O0 too. This can be enabled only if
  2047. // the target backend knows how to handle the operand bundle.
  2048. if (CGF.CGM.getCodeGenOpts().OptimizationLevel > 0 &&
  2049. (Arch == llvm::Triple::aarch64 || Arch == llvm::Triple::x86_64)) {
  2050. llvm::Value *bundleArgs[] = {EP};
  2051. llvm::OperandBundleDef OB("clang.arc.attachedcall", bundleArgs);
  2052. auto *oldCall = cast<llvm::CallBase>(value);
  2053. llvm::CallBase *newCall = llvm::CallBase::addOperandBundle(
  2054. oldCall, llvm::LLVMContext::OB_clang_arc_attachedcall, OB, oldCall);
  2055. newCall->copyMetadata(*oldCall);
  2056. oldCall->replaceAllUsesWith(newCall);
  2057. oldCall->eraseFromParent();
  2058. CGF.EmitARCNoopIntrinsicUse(newCall);
  2059. return newCall;
  2060. }
  2061. bool isNoTail =
  2062. CGF.CGM.getTargetCodeGenInfo().markARCOptimizedReturnCallsAsNoTail();
  2063. llvm::CallInst::TailCallKind tailKind =
  2064. isNoTail ? llvm::CallInst::TCK_NoTail : llvm::CallInst::TCK_None;
  2065. return emitARCValueOperation(CGF, value, nullptr, EP, IID, tailKind);
  2066. }
  2067. /// Retain the given object which is the result of a function call.
  2068. /// call i8* \@objc_retainAutoreleasedReturnValue(i8* %value)
  2069. ///
  2070. /// Yes, this function name is one character away from a different
  2071. /// call with completely different semantics.
  2072. llvm::Value *
  2073. CodeGenFunction::EmitARCRetainAutoreleasedReturnValue(llvm::Value *value) {
  2074. return emitOptimizedARCReturnCall(value, true, *this);
  2075. }
  2076. /// Claim a possibly-autoreleased return value at +0. This is only
  2077. /// valid to do in contexts which do not rely on the retain to keep
  2078. /// the object valid for all of its uses; for example, when
  2079. /// the value is ignored, or when it is being assigned to an
  2080. /// __unsafe_unretained variable.
  2081. ///
  2082. /// call i8* \@objc_unsafeClaimAutoreleasedReturnValue(i8* %value)
  2083. llvm::Value *
  2084. CodeGenFunction::EmitARCUnsafeClaimAutoreleasedReturnValue(llvm::Value *value) {
  2085. return emitOptimizedARCReturnCall(value, false, *this);
  2086. }
  2087. /// Release the given object.
  2088. /// call void \@objc_release(i8* %value)
  2089. void CodeGenFunction::EmitARCRelease(llvm::Value *value,
  2090. ARCPreciseLifetime_t precise) {
  2091. if (isa<llvm::ConstantPointerNull>(value)) return;
  2092. llvm::Function *&fn = CGM.getObjCEntrypoints().objc_release;
  2093. if (!fn)
  2094. fn = getARCIntrinsic(llvm::Intrinsic::objc_release, CGM);
  2095. // Cast the argument to 'id'.
  2096. value = Builder.CreateBitCast(value, Int8PtrTy);
  2097. // Call objc_release.
  2098. llvm::CallInst *call = EmitNounwindRuntimeCall(fn, value);
  2099. if (precise == ARCImpreciseLifetime) {
  2100. call->setMetadata("clang.imprecise_release",
  2101. llvm::MDNode::get(Builder.getContext(), std::nullopt));
  2102. }
  2103. }
  2104. /// Destroy a __strong variable.
  2105. ///
  2106. /// At -O0, emit a call to store 'null' into the address;
  2107. /// instrumenting tools prefer this because the address is exposed,
  2108. /// but it's relatively cumbersome to optimize.
  2109. ///
  2110. /// At -O1 and above, just load and call objc_release.
  2111. ///
  2112. /// call void \@objc_storeStrong(i8** %addr, i8* null)
  2113. void CodeGenFunction::EmitARCDestroyStrong(Address addr,
  2114. ARCPreciseLifetime_t precise) {
  2115. if (CGM.getCodeGenOpts().OptimizationLevel == 0) {
  2116. llvm::Value *null = getNullForVariable(addr);
  2117. EmitARCStoreStrongCall(addr, null, /*ignored*/ true);
  2118. return;
  2119. }
  2120. llvm::Value *value = Builder.CreateLoad(addr);
  2121. EmitARCRelease(value, precise);
  2122. }
  2123. /// Store into a strong object. Always calls this:
  2124. /// call void \@objc_storeStrong(i8** %addr, i8* %value)
  2125. llvm::Value *CodeGenFunction::EmitARCStoreStrongCall(Address addr,
  2126. llvm::Value *value,
  2127. bool ignored) {
  2128. assert(addr.getElementType() == value->getType());
  2129. llvm::Function *&fn = CGM.getObjCEntrypoints().objc_storeStrong;
  2130. if (!fn)
  2131. fn = getARCIntrinsic(llvm::Intrinsic::objc_storeStrong, CGM);
  2132. llvm::Value *args[] = {
  2133. Builder.CreateBitCast(addr.getPointer(), Int8PtrPtrTy),
  2134. Builder.CreateBitCast(value, Int8PtrTy)
  2135. };
  2136. EmitNounwindRuntimeCall(fn, args);
  2137. if (ignored) return nullptr;
  2138. return value;
  2139. }
  2140. /// Store into a strong object. Sometimes calls this:
  2141. /// call void \@objc_storeStrong(i8** %addr, i8* %value)
  2142. /// Other times, breaks it down into components.
  2143. llvm::Value *CodeGenFunction::EmitARCStoreStrong(LValue dst,
  2144. llvm::Value *newValue,
  2145. bool ignored) {
  2146. QualType type = dst.getType();
  2147. bool isBlock = type->isBlockPointerType();
  2148. // Use a store barrier at -O0 unless this is a block type or the
  2149. // lvalue is inadequately aligned.
  2150. if (shouldUseFusedARCCalls() &&
  2151. !isBlock &&
  2152. (dst.getAlignment().isZero() ||
  2153. dst.getAlignment() >= CharUnits::fromQuantity(PointerAlignInBytes))) {
  2154. return EmitARCStoreStrongCall(dst.getAddress(*this), newValue, ignored);
  2155. }
  2156. // Otherwise, split it out.
  2157. // Retain the new value.
  2158. newValue = EmitARCRetain(type, newValue);
  2159. // Read the old value.
  2160. llvm::Value *oldValue = EmitLoadOfScalar(dst, SourceLocation());
  2161. // Store. We do this before the release so that any deallocs won't
  2162. // see the old value.
  2163. EmitStoreOfScalar(newValue, dst);
  2164. // Finally, release the old value.
  2165. EmitARCRelease(oldValue, dst.isARCPreciseLifetime());
  2166. return newValue;
  2167. }
  2168. /// Autorelease the given object.
  2169. /// call i8* \@objc_autorelease(i8* %value)
  2170. llvm::Value *CodeGenFunction::EmitARCAutorelease(llvm::Value *value) {
  2171. return emitARCValueOperation(*this, value, nullptr,
  2172. CGM.getObjCEntrypoints().objc_autorelease,
  2173. llvm::Intrinsic::objc_autorelease);
  2174. }
  2175. /// Autorelease the given object.
  2176. /// call i8* \@objc_autoreleaseReturnValue(i8* %value)
  2177. llvm::Value *
  2178. CodeGenFunction::EmitARCAutoreleaseReturnValue(llvm::Value *value) {
  2179. return emitARCValueOperation(*this, value, nullptr,
  2180. CGM.getObjCEntrypoints().objc_autoreleaseReturnValue,
  2181. llvm::Intrinsic::objc_autoreleaseReturnValue,
  2182. llvm::CallInst::TCK_Tail);
  2183. }
  2184. /// Do a fused retain/autorelease of the given object.
  2185. /// call i8* \@objc_retainAutoreleaseReturnValue(i8* %value)
  2186. llvm::Value *
  2187. CodeGenFunction::EmitARCRetainAutoreleaseReturnValue(llvm::Value *value) {
  2188. return emitARCValueOperation(*this, value, nullptr,
  2189. CGM.getObjCEntrypoints().objc_retainAutoreleaseReturnValue,
  2190. llvm::Intrinsic::objc_retainAutoreleaseReturnValue,
  2191. llvm::CallInst::TCK_Tail);
  2192. }
  2193. /// Do a fused retain/autorelease of the given object.
  2194. /// call i8* \@objc_retainAutorelease(i8* %value)
  2195. /// or
  2196. /// %retain = call i8* \@objc_retainBlock(i8* %value)
  2197. /// call i8* \@objc_autorelease(i8* %retain)
  2198. llvm::Value *CodeGenFunction::EmitARCRetainAutorelease(QualType type,
  2199. llvm::Value *value) {
  2200. if (!type->isBlockPointerType())
  2201. return EmitARCRetainAutoreleaseNonBlock(value);
  2202. if (isa<llvm::ConstantPointerNull>(value)) return value;
  2203. llvm::Type *origType = value->getType();
  2204. value = Builder.CreateBitCast(value, Int8PtrTy);
  2205. value = EmitARCRetainBlock(value, /*mandatory*/ true);
  2206. value = EmitARCAutorelease(value);
  2207. return Builder.CreateBitCast(value, origType);
  2208. }
  2209. /// Do a fused retain/autorelease of the given object.
  2210. /// call i8* \@objc_retainAutorelease(i8* %value)
  2211. llvm::Value *
  2212. CodeGenFunction::EmitARCRetainAutoreleaseNonBlock(llvm::Value *value) {
  2213. return emitARCValueOperation(*this, value, nullptr,
  2214. CGM.getObjCEntrypoints().objc_retainAutorelease,
  2215. llvm::Intrinsic::objc_retainAutorelease);
  2216. }
  2217. /// i8* \@objc_loadWeak(i8** %addr)
  2218. /// Essentially objc_autorelease(objc_loadWeakRetained(addr)).
  2219. llvm::Value *CodeGenFunction::EmitARCLoadWeak(Address addr) {
  2220. return emitARCLoadOperation(*this, addr,
  2221. CGM.getObjCEntrypoints().objc_loadWeak,
  2222. llvm::Intrinsic::objc_loadWeak);
  2223. }
  2224. /// i8* \@objc_loadWeakRetained(i8** %addr)
  2225. llvm::Value *CodeGenFunction::EmitARCLoadWeakRetained(Address addr) {
  2226. return emitARCLoadOperation(*this, addr,
  2227. CGM.getObjCEntrypoints().objc_loadWeakRetained,
  2228. llvm::Intrinsic::objc_loadWeakRetained);
  2229. }
  2230. /// i8* \@objc_storeWeak(i8** %addr, i8* %value)
  2231. /// Returns %value.
  2232. llvm::Value *CodeGenFunction::EmitARCStoreWeak(Address addr,
  2233. llvm::Value *value,
  2234. bool ignored) {
  2235. return emitARCStoreOperation(*this, addr, value,
  2236. CGM.getObjCEntrypoints().objc_storeWeak,
  2237. llvm::Intrinsic::objc_storeWeak, ignored);
  2238. }
  2239. /// i8* \@objc_initWeak(i8** %addr, i8* %value)
  2240. /// Returns %value. %addr is known to not have a current weak entry.
  2241. /// Essentially equivalent to:
  2242. /// *addr = nil; objc_storeWeak(addr, value);
  2243. void CodeGenFunction::EmitARCInitWeak(Address addr, llvm::Value *value) {
  2244. // If we're initializing to null, just write null to memory; no need
  2245. // to get the runtime involved. But don't do this if optimization
  2246. // is enabled, because accounting for this would make the optimizer
  2247. // much more complicated.
  2248. if (isa<llvm::ConstantPointerNull>(value) &&
  2249. CGM.getCodeGenOpts().OptimizationLevel == 0) {
  2250. Builder.CreateStore(value, addr);
  2251. return;
  2252. }
  2253. emitARCStoreOperation(*this, addr, value,
  2254. CGM.getObjCEntrypoints().objc_initWeak,
  2255. llvm::Intrinsic::objc_initWeak, /*ignored*/ true);
  2256. }
  2257. /// void \@objc_destroyWeak(i8** %addr)
  2258. /// Essentially objc_storeWeak(addr, nil).
  2259. void CodeGenFunction::EmitARCDestroyWeak(Address addr) {
  2260. llvm::Function *&fn = CGM.getObjCEntrypoints().objc_destroyWeak;
  2261. if (!fn)
  2262. fn = getARCIntrinsic(llvm::Intrinsic::objc_destroyWeak, CGM);
  2263. // Cast the argument to 'id*'.
  2264. addr = Builder.CreateElementBitCast(addr, Int8PtrTy);
  2265. EmitNounwindRuntimeCall(fn, addr.getPointer());
  2266. }
  2267. /// void \@objc_moveWeak(i8** %dest, i8** %src)
  2268. /// Disregards the current value in %dest. Leaves %src pointing to nothing.
  2269. /// Essentially (objc_copyWeak(dest, src), objc_destroyWeak(src)).
  2270. void CodeGenFunction::EmitARCMoveWeak(Address dst, Address src) {
  2271. emitARCCopyOperation(*this, dst, src,
  2272. CGM.getObjCEntrypoints().objc_moveWeak,
  2273. llvm::Intrinsic::objc_moveWeak);
  2274. }
  2275. /// void \@objc_copyWeak(i8** %dest, i8** %src)
  2276. /// Disregards the current value in %dest. Essentially
  2277. /// objc_release(objc_initWeak(dest, objc_readWeakRetained(src)))
  2278. void CodeGenFunction::EmitARCCopyWeak(Address dst, Address src) {
  2279. emitARCCopyOperation(*this, dst, src,
  2280. CGM.getObjCEntrypoints().objc_copyWeak,
  2281. llvm::Intrinsic::objc_copyWeak);
  2282. }
  2283. void CodeGenFunction::emitARCCopyAssignWeak(QualType Ty, Address DstAddr,
  2284. Address SrcAddr) {
  2285. llvm::Value *Object = EmitARCLoadWeakRetained(SrcAddr);
  2286. Object = EmitObjCConsumeObject(Ty, Object);
  2287. EmitARCStoreWeak(DstAddr, Object, false);
  2288. }
  2289. void CodeGenFunction::emitARCMoveAssignWeak(QualType Ty, Address DstAddr,
  2290. Address SrcAddr) {
  2291. llvm::Value *Object = EmitARCLoadWeakRetained(SrcAddr);
  2292. Object = EmitObjCConsumeObject(Ty, Object);
  2293. EmitARCStoreWeak(DstAddr, Object, false);
  2294. EmitARCDestroyWeak(SrcAddr);
  2295. }
  2296. /// Produce the code to do a objc_autoreleasepool_push.
  2297. /// call i8* \@objc_autoreleasePoolPush(void)
  2298. llvm::Value *CodeGenFunction::EmitObjCAutoreleasePoolPush() {
  2299. llvm::Function *&fn = CGM.getObjCEntrypoints().objc_autoreleasePoolPush;
  2300. if (!fn)
  2301. fn = getARCIntrinsic(llvm::Intrinsic::objc_autoreleasePoolPush, CGM);
  2302. return EmitNounwindRuntimeCall(fn);
  2303. }
  2304. /// Produce the code to do a primitive release.
  2305. /// call void \@objc_autoreleasePoolPop(i8* %ptr)
  2306. void CodeGenFunction::EmitObjCAutoreleasePoolPop(llvm::Value *value) {
  2307. assert(value->getType() == Int8PtrTy);
  2308. if (getInvokeDest()) {
  2309. // Call the runtime method not the intrinsic if we are handling exceptions
  2310. llvm::FunctionCallee &fn =
  2311. CGM.getObjCEntrypoints().objc_autoreleasePoolPopInvoke;
  2312. if (!fn) {
  2313. llvm::FunctionType *fnType =
  2314. llvm::FunctionType::get(Builder.getVoidTy(), Int8PtrTy, false);
  2315. fn = CGM.CreateRuntimeFunction(fnType, "objc_autoreleasePoolPop");
  2316. setARCRuntimeFunctionLinkage(CGM, fn);
  2317. }
  2318. // objc_autoreleasePoolPop can throw.
  2319. EmitRuntimeCallOrInvoke(fn, value);
  2320. } else {
  2321. llvm::FunctionCallee &fn = CGM.getObjCEntrypoints().objc_autoreleasePoolPop;
  2322. if (!fn)
  2323. fn = getARCIntrinsic(llvm::Intrinsic::objc_autoreleasePoolPop, CGM);
  2324. EmitRuntimeCall(fn, value);
  2325. }
  2326. }
  2327. /// Produce the code to do an MRR version objc_autoreleasepool_push.
  2328. /// Which is: [[NSAutoreleasePool alloc] init];
  2329. /// Where alloc is declared as: + (id) alloc; in NSAutoreleasePool class.
  2330. /// init is declared as: - (id) init; in its NSObject super class.
  2331. ///
  2332. llvm::Value *CodeGenFunction::EmitObjCMRRAutoreleasePoolPush() {
  2333. CGObjCRuntime &Runtime = CGM.getObjCRuntime();
  2334. llvm::Value *Receiver = Runtime.EmitNSAutoreleasePoolClassRef(*this);
  2335. // [NSAutoreleasePool alloc]
  2336. IdentifierInfo *II = &CGM.getContext().Idents.get("alloc");
  2337. Selector AllocSel = getContext().Selectors.getSelector(0, &II);
  2338. CallArgList Args;
  2339. RValue AllocRV =
  2340. Runtime.GenerateMessageSend(*this, ReturnValueSlot(),
  2341. getContext().getObjCIdType(),
  2342. AllocSel, Receiver, Args);
  2343. // [Receiver init]
  2344. Receiver = AllocRV.getScalarVal();
  2345. II = &CGM.getContext().Idents.get("init");
  2346. Selector InitSel = getContext().Selectors.getSelector(0, &II);
  2347. RValue InitRV =
  2348. Runtime.GenerateMessageSend(*this, ReturnValueSlot(),
  2349. getContext().getObjCIdType(),
  2350. InitSel, Receiver, Args);
  2351. return InitRV.getScalarVal();
  2352. }
  2353. /// Allocate the given objc object.
  2354. /// call i8* \@objc_alloc(i8* %value)
  2355. llvm::Value *CodeGenFunction::EmitObjCAlloc(llvm::Value *value,
  2356. llvm::Type *resultType) {
  2357. return emitObjCValueOperation(*this, value, resultType,
  2358. CGM.getObjCEntrypoints().objc_alloc,
  2359. "objc_alloc");
  2360. }
  2361. /// Allocate the given objc object.
  2362. /// call i8* \@objc_allocWithZone(i8* %value)
  2363. llvm::Value *CodeGenFunction::EmitObjCAllocWithZone(llvm::Value *value,
  2364. llvm::Type *resultType) {
  2365. return emitObjCValueOperation(*this, value, resultType,
  2366. CGM.getObjCEntrypoints().objc_allocWithZone,
  2367. "objc_allocWithZone");
  2368. }
  2369. llvm::Value *CodeGenFunction::EmitObjCAllocInit(llvm::Value *value,
  2370. llvm::Type *resultType) {
  2371. return emitObjCValueOperation(*this, value, resultType,
  2372. CGM.getObjCEntrypoints().objc_alloc_init,
  2373. "objc_alloc_init");
  2374. }
  2375. /// Produce the code to do a primitive release.
  2376. /// [tmp drain];
  2377. void CodeGenFunction::EmitObjCMRRAutoreleasePoolPop(llvm::Value *Arg) {
  2378. IdentifierInfo *II = &CGM.getContext().Idents.get("drain");
  2379. Selector DrainSel = getContext().Selectors.getSelector(0, &II);
  2380. CallArgList Args;
  2381. CGM.getObjCRuntime().GenerateMessageSend(*this, ReturnValueSlot(),
  2382. getContext().VoidTy, DrainSel, Arg, Args);
  2383. }
  2384. void CodeGenFunction::destroyARCStrongPrecise(CodeGenFunction &CGF,
  2385. Address addr,
  2386. QualType type) {
  2387. CGF.EmitARCDestroyStrong(addr, ARCPreciseLifetime);
  2388. }
  2389. void CodeGenFunction::destroyARCStrongImprecise(CodeGenFunction &CGF,
  2390. Address addr,
  2391. QualType type) {
  2392. CGF.EmitARCDestroyStrong(addr, ARCImpreciseLifetime);
  2393. }
  2394. void CodeGenFunction::destroyARCWeak(CodeGenFunction &CGF,
  2395. Address addr,
  2396. QualType type) {
  2397. CGF.EmitARCDestroyWeak(addr);
  2398. }
  2399. void CodeGenFunction::emitARCIntrinsicUse(CodeGenFunction &CGF, Address addr,
  2400. QualType type) {
  2401. llvm::Value *value = CGF.Builder.CreateLoad(addr);
  2402. CGF.EmitARCIntrinsicUse(value);
  2403. }
  2404. /// Autorelease the given object.
  2405. /// call i8* \@objc_autorelease(i8* %value)
  2406. llvm::Value *CodeGenFunction::EmitObjCAutorelease(llvm::Value *value,
  2407. llvm::Type *returnType) {
  2408. return emitObjCValueOperation(
  2409. *this, value, returnType,
  2410. CGM.getObjCEntrypoints().objc_autoreleaseRuntimeFunction,
  2411. "objc_autorelease");
  2412. }
  2413. /// Retain the given object, with normal retain semantics.
  2414. /// call i8* \@objc_retain(i8* %value)
  2415. llvm::Value *CodeGenFunction::EmitObjCRetainNonBlock(llvm::Value *value,
  2416. llvm::Type *returnType) {
  2417. return emitObjCValueOperation(
  2418. *this, value, returnType,
  2419. CGM.getObjCEntrypoints().objc_retainRuntimeFunction, "objc_retain");
  2420. }
  2421. /// Release the given object.
  2422. /// call void \@objc_release(i8* %value)
  2423. void CodeGenFunction::EmitObjCRelease(llvm::Value *value,
  2424. ARCPreciseLifetime_t precise) {
  2425. if (isa<llvm::ConstantPointerNull>(value)) return;
  2426. llvm::FunctionCallee &fn =
  2427. CGM.getObjCEntrypoints().objc_releaseRuntimeFunction;
  2428. if (!fn) {
  2429. llvm::FunctionType *fnType =
  2430. llvm::FunctionType::get(Builder.getVoidTy(), Int8PtrTy, false);
  2431. fn = CGM.CreateRuntimeFunction(fnType, "objc_release");
  2432. setARCRuntimeFunctionLinkage(CGM, fn);
  2433. // We have Native ARC, so set nonlazybind attribute for performance
  2434. if (llvm::Function *f = dyn_cast<llvm::Function>(fn.getCallee()))
  2435. f->addFnAttr(llvm::Attribute::NonLazyBind);
  2436. }
  2437. // Cast the argument to 'id'.
  2438. value = Builder.CreateBitCast(value, Int8PtrTy);
  2439. // Call objc_release.
  2440. llvm::CallBase *call = EmitCallOrInvoke(fn, value);
  2441. if (precise == ARCImpreciseLifetime) {
  2442. call->setMetadata("clang.imprecise_release",
  2443. llvm::MDNode::get(Builder.getContext(), std::nullopt));
  2444. }
  2445. }
  2446. namespace {
  2447. struct CallObjCAutoreleasePoolObject final : EHScopeStack::Cleanup {
  2448. llvm::Value *Token;
  2449. CallObjCAutoreleasePoolObject(llvm::Value *token) : Token(token) {}
  2450. void Emit(CodeGenFunction &CGF, Flags flags) override {
  2451. CGF.EmitObjCAutoreleasePoolPop(Token);
  2452. }
  2453. };
  2454. struct CallObjCMRRAutoreleasePoolObject final : EHScopeStack::Cleanup {
  2455. llvm::Value *Token;
  2456. CallObjCMRRAutoreleasePoolObject(llvm::Value *token) : Token(token) {}
  2457. void Emit(CodeGenFunction &CGF, Flags flags) override {
  2458. CGF.EmitObjCMRRAutoreleasePoolPop(Token);
  2459. }
  2460. };
  2461. }
  2462. void CodeGenFunction::EmitObjCAutoreleasePoolCleanup(llvm::Value *Ptr) {
  2463. if (CGM.getLangOpts().ObjCAutoRefCount)
  2464. EHStack.pushCleanup<CallObjCAutoreleasePoolObject>(NormalCleanup, Ptr);
  2465. else
  2466. EHStack.pushCleanup<CallObjCMRRAutoreleasePoolObject>(NormalCleanup, Ptr);
  2467. }
  2468. static bool shouldRetainObjCLifetime(Qualifiers::ObjCLifetime lifetime) {
  2469. switch (lifetime) {
  2470. case Qualifiers::OCL_None:
  2471. case Qualifiers::OCL_ExplicitNone:
  2472. case Qualifiers::OCL_Strong:
  2473. case Qualifiers::OCL_Autoreleasing:
  2474. return true;
  2475. case Qualifiers::OCL_Weak:
  2476. return false;
  2477. }
  2478. llvm_unreachable("impossible lifetime!");
  2479. }
  2480. static TryEmitResult tryEmitARCRetainLoadOfScalar(CodeGenFunction &CGF,
  2481. LValue lvalue,
  2482. QualType type) {
  2483. llvm::Value *result;
  2484. bool shouldRetain = shouldRetainObjCLifetime(type.getObjCLifetime());
  2485. if (shouldRetain) {
  2486. result = CGF.EmitLoadOfLValue(lvalue, SourceLocation()).getScalarVal();
  2487. } else {
  2488. assert(type.getObjCLifetime() == Qualifiers::OCL_Weak);
  2489. result = CGF.EmitARCLoadWeakRetained(lvalue.getAddress(CGF));
  2490. }
  2491. return TryEmitResult(result, !shouldRetain);
  2492. }
  2493. static TryEmitResult tryEmitARCRetainLoadOfScalar(CodeGenFunction &CGF,
  2494. const Expr *e) {
  2495. e = e->IgnoreParens();
  2496. QualType type = e->getType();
  2497. // If we're loading retained from a __strong xvalue, we can avoid
  2498. // an extra retain/release pair by zeroing out the source of this
  2499. // "move" operation.
  2500. if (e->isXValue() &&
  2501. !type.isConstQualified() &&
  2502. type.getObjCLifetime() == Qualifiers::OCL_Strong) {
  2503. // Emit the lvalue.
  2504. LValue lv = CGF.EmitLValue(e);
  2505. // Load the object pointer.
  2506. llvm::Value *result = CGF.EmitLoadOfLValue(lv,
  2507. SourceLocation()).getScalarVal();
  2508. // Set the source pointer to NULL.
  2509. CGF.EmitStoreOfScalar(getNullForVariable(lv.getAddress(CGF)), lv);
  2510. return TryEmitResult(result, true);
  2511. }
  2512. // As a very special optimization, in ARC++, if the l-value is the
  2513. // result of a non-volatile assignment, do a simple retain of the
  2514. // result of the call to objc_storeWeak instead of reloading.
  2515. if (CGF.getLangOpts().CPlusPlus &&
  2516. !type.isVolatileQualified() &&
  2517. type.getObjCLifetime() == Qualifiers::OCL_Weak &&
  2518. isa<BinaryOperator>(e) &&
  2519. cast<BinaryOperator>(e)->getOpcode() == BO_Assign)
  2520. return TryEmitResult(CGF.EmitScalarExpr(e), false);
  2521. // Try to emit code for scalar constant instead of emitting LValue and
  2522. // loading it because we are not guaranteed to have an l-value. One of such
  2523. // cases is DeclRefExpr referencing non-odr-used constant-evaluated variable.
  2524. if (const auto *decl_expr = dyn_cast<DeclRefExpr>(e)) {
  2525. auto *DRE = const_cast<DeclRefExpr *>(decl_expr);
  2526. if (CodeGenFunction::ConstantEmission constant = CGF.tryEmitAsConstant(DRE))
  2527. return TryEmitResult(CGF.emitScalarConstant(constant, DRE),
  2528. !shouldRetainObjCLifetime(type.getObjCLifetime()));
  2529. }
  2530. return tryEmitARCRetainLoadOfScalar(CGF, CGF.EmitLValue(e), type);
  2531. }
  2532. typedef llvm::function_ref<llvm::Value *(CodeGenFunction &CGF,
  2533. llvm::Value *value)>
  2534. ValueTransform;
  2535. /// Insert code immediately after a call.
  2536. // FIXME: We should find a way to emit the runtime call immediately
  2537. // after the call is emitted to eliminate the need for this function.
  2538. static llvm::Value *emitARCOperationAfterCall(CodeGenFunction &CGF,
  2539. llvm::Value *value,
  2540. ValueTransform doAfterCall,
  2541. ValueTransform doFallback) {
  2542. CGBuilderTy::InsertPoint ip = CGF.Builder.saveIP();
  2543. auto *callBase = dyn_cast<llvm::CallBase>(value);
  2544. if (callBase && llvm::objcarc::hasAttachedCallOpBundle(callBase)) {
  2545. // Fall back if the call base has operand bundle "clang.arc.attachedcall".
  2546. value = doFallback(CGF, value);
  2547. } else if (llvm::CallInst *call = dyn_cast<llvm::CallInst>(value)) {
  2548. // Place the retain immediately following the call.
  2549. CGF.Builder.SetInsertPoint(call->getParent(),
  2550. ++llvm::BasicBlock::iterator(call));
  2551. value = doAfterCall(CGF, value);
  2552. } else if (llvm::InvokeInst *invoke = dyn_cast<llvm::InvokeInst>(value)) {
  2553. // Place the retain at the beginning of the normal destination block.
  2554. llvm::BasicBlock *BB = invoke->getNormalDest();
  2555. CGF.Builder.SetInsertPoint(BB, BB->begin());
  2556. value = doAfterCall(CGF, value);
  2557. // Bitcasts can arise because of related-result returns. Rewrite
  2558. // the operand.
  2559. } else if (llvm::BitCastInst *bitcast = dyn_cast<llvm::BitCastInst>(value)) {
  2560. // Change the insert point to avoid emitting the fall-back call after the
  2561. // bitcast.
  2562. CGF.Builder.SetInsertPoint(bitcast->getParent(), bitcast->getIterator());
  2563. llvm::Value *operand = bitcast->getOperand(0);
  2564. operand = emitARCOperationAfterCall(CGF, operand, doAfterCall, doFallback);
  2565. bitcast->setOperand(0, operand);
  2566. value = bitcast;
  2567. } else {
  2568. auto *phi = dyn_cast<llvm::PHINode>(value);
  2569. if (phi && phi->getNumIncomingValues() == 2 &&
  2570. isa<llvm::ConstantPointerNull>(phi->getIncomingValue(1)) &&
  2571. isa<llvm::CallBase>(phi->getIncomingValue(0))) {
  2572. // Handle phi instructions that are generated when it's necessary to check
  2573. // whether the receiver of a message is null.
  2574. llvm::Value *inVal = phi->getIncomingValue(0);
  2575. inVal = emitARCOperationAfterCall(CGF, inVal, doAfterCall, doFallback);
  2576. phi->setIncomingValue(0, inVal);
  2577. value = phi;
  2578. } else {
  2579. // Generic fall-back case.
  2580. // Retain using the non-block variant: we never need to do a copy
  2581. // of a block that's been returned to us.
  2582. value = doFallback(CGF, value);
  2583. }
  2584. }
  2585. CGF.Builder.restoreIP(ip);
  2586. return value;
  2587. }
  2588. /// Given that the given expression is some sort of call (which does
  2589. /// not return retained), emit a retain following it.
  2590. static llvm::Value *emitARCRetainCallResult(CodeGenFunction &CGF,
  2591. const Expr *e) {
  2592. llvm::Value *value = CGF.EmitScalarExpr(e);
  2593. return emitARCOperationAfterCall(CGF, value,
  2594. [](CodeGenFunction &CGF, llvm::Value *value) {
  2595. return CGF.EmitARCRetainAutoreleasedReturnValue(value);
  2596. },
  2597. [](CodeGenFunction &CGF, llvm::Value *value) {
  2598. return CGF.EmitARCRetainNonBlock(value);
  2599. });
  2600. }
  2601. /// Given that the given expression is some sort of call (which does
  2602. /// not return retained), perform an unsafeClaim following it.
  2603. static llvm::Value *emitARCUnsafeClaimCallResult(CodeGenFunction &CGF,
  2604. const Expr *e) {
  2605. llvm::Value *value = CGF.EmitScalarExpr(e);
  2606. return emitARCOperationAfterCall(CGF, value,
  2607. [](CodeGenFunction &CGF, llvm::Value *value) {
  2608. return CGF.EmitARCUnsafeClaimAutoreleasedReturnValue(value);
  2609. },
  2610. [](CodeGenFunction &CGF, llvm::Value *value) {
  2611. return value;
  2612. });
  2613. }
  2614. llvm::Value *CodeGenFunction::EmitARCReclaimReturnedObject(const Expr *E,
  2615. bool allowUnsafeClaim) {
  2616. if (allowUnsafeClaim &&
  2617. CGM.getLangOpts().ObjCRuntime.hasARCUnsafeClaimAutoreleasedReturnValue()) {
  2618. return emitARCUnsafeClaimCallResult(*this, E);
  2619. } else {
  2620. llvm::Value *value = emitARCRetainCallResult(*this, E);
  2621. return EmitObjCConsumeObject(E->getType(), value);
  2622. }
  2623. }
  2624. /// Determine whether it might be important to emit a separate
  2625. /// objc_retain_block on the result of the given expression, or
  2626. /// whether it's okay to just emit it in a +1 context.
  2627. static bool shouldEmitSeparateBlockRetain(const Expr *e) {
  2628. assert(e->getType()->isBlockPointerType());
  2629. e = e->IgnoreParens();
  2630. // For future goodness, emit block expressions directly in +1
  2631. // contexts if we can.
  2632. if (isa<BlockExpr>(e))
  2633. return false;
  2634. if (const CastExpr *cast = dyn_cast<CastExpr>(e)) {
  2635. switch (cast->getCastKind()) {
  2636. // Emitting these operations in +1 contexts is goodness.
  2637. case CK_LValueToRValue:
  2638. case CK_ARCReclaimReturnedObject:
  2639. case CK_ARCConsumeObject:
  2640. case CK_ARCProduceObject:
  2641. return false;
  2642. // These operations preserve a block type.
  2643. case CK_NoOp:
  2644. case CK_BitCast:
  2645. return shouldEmitSeparateBlockRetain(cast->getSubExpr());
  2646. // These operations are known to be bad (or haven't been considered).
  2647. case CK_AnyPointerToBlockPointerCast:
  2648. default:
  2649. return true;
  2650. }
  2651. }
  2652. return true;
  2653. }
  2654. namespace {
  2655. /// A CRTP base class for emitting expressions of retainable object
  2656. /// pointer type in ARC.
  2657. template <typename Impl, typename Result> class ARCExprEmitter {
  2658. protected:
  2659. CodeGenFunction &CGF;
  2660. Impl &asImpl() { return *static_cast<Impl*>(this); }
  2661. ARCExprEmitter(CodeGenFunction &CGF) : CGF(CGF) {}
  2662. public:
  2663. Result visit(const Expr *e);
  2664. Result visitCastExpr(const CastExpr *e);
  2665. Result visitPseudoObjectExpr(const PseudoObjectExpr *e);
  2666. Result visitBlockExpr(const BlockExpr *e);
  2667. Result visitBinaryOperator(const BinaryOperator *e);
  2668. Result visitBinAssign(const BinaryOperator *e);
  2669. Result visitBinAssignUnsafeUnretained(const BinaryOperator *e);
  2670. Result visitBinAssignAutoreleasing(const BinaryOperator *e);
  2671. Result visitBinAssignWeak(const BinaryOperator *e);
  2672. Result visitBinAssignStrong(const BinaryOperator *e);
  2673. // Minimal implementation:
  2674. // Result visitLValueToRValue(const Expr *e)
  2675. // Result visitConsumeObject(const Expr *e)
  2676. // Result visitExtendBlockObject(const Expr *e)
  2677. // Result visitReclaimReturnedObject(const Expr *e)
  2678. // Result visitCall(const Expr *e)
  2679. // Result visitExpr(const Expr *e)
  2680. //
  2681. // Result emitBitCast(Result result, llvm::Type *resultType)
  2682. // llvm::Value *getValueOfResult(Result result)
  2683. };
  2684. }
  2685. /// Try to emit a PseudoObjectExpr under special ARC rules.
  2686. ///
  2687. /// This massively duplicates emitPseudoObjectRValue.
  2688. template <typename Impl, typename Result>
  2689. Result
  2690. ARCExprEmitter<Impl,Result>::visitPseudoObjectExpr(const PseudoObjectExpr *E) {
  2691. SmallVector<CodeGenFunction::OpaqueValueMappingData, 4> opaques;
  2692. // Find the result expression.
  2693. const Expr *resultExpr = E->getResultExpr();
  2694. assert(resultExpr);
  2695. Result result;
  2696. for (PseudoObjectExpr::const_semantics_iterator
  2697. i = E->semantics_begin(), e = E->semantics_end(); i != e; ++i) {
  2698. const Expr *semantic = *i;
  2699. // If this semantic expression is an opaque value, bind it
  2700. // to the result of its source expression.
  2701. if (const OpaqueValueExpr *ov = dyn_cast<OpaqueValueExpr>(semantic)) {
  2702. typedef CodeGenFunction::OpaqueValueMappingData OVMA;
  2703. OVMA opaqueData;
  2704. // If this semantic is the result of the pseudo-object
  2705. // expression, try to evaluate the source as +1.
  2706. if (ov == resultExpr) {
  2707. assert(!OVMA::shouldBindAsLValue(ov));
  2708. result = asImpl().visit(ov->getSourceExpr());
  2709. opaqueData = OVMA::bind(CGF, ov,
  2710. RValue::get(asImpl().getValueOfResult(result)));
  2711. // Otherwise, just bind it.
  2712. } else {
  2713. opaqueData = OVMA::bind(CGF, ov, ov->getSourceExpr());
  2714. }
  2715. opaques.push_back(opaqueData);
  2716. // Otherwise, if the expression is the result, evaluate it
  2717. // and remember the result.
  2718. } else if (semantic == resultExpr) {
  2719. result = asImpl().visit(semantic);
  2720. // Otherwise, evaluate the expression in an ignored context.
  2721. } else {
  2722. CGF.EmitIgnoredExpr(semantic);
  2723. }
  2724. }
  2725. // Unbind all the opaques now.
  2726. for (unsigned i = 0, e = opaques.size(); i != e; ++i)
  2727. opaques[i].unbind(CGF);
  2728. return result;
  2729. }
  2730. template <typename Impl, typename Result>
  2731. Result ARCExprEmitter<Impl, Result>::visitBlockExpr(const BlockExpr *e) {
  2732. // The default implementation just forwards the expression to visitExpr.
  2733. return asImpl().visitExpr(e);
  2734. }
  2735. template <typename Impl, typename Result>
  2736. Result ARCExprEmitter<Impl,Result>::visitCastExpr(const CastExpr *e) {
  2737. switch (e->getCastKind()) {
  2738. // No-op casts don't change the type, so we just ignore them.
  2739. case CK_NoOp:
  2740. return asImpl().visit(e->getSubExpr());
  2741. // These casts can change the type.
  2742. case CK_CPointerToObjCPointerCast:
  2743. case CK_BlockPointerToObjCPointerCast:
  2744. case CK_AnyPointerToBlockPointerCast:
  2745. case CK_BitCast: {
  2746. llvm::Type *resultType = CGF.ConvertType(e->getType());
  2747. assert(e->getSubExpr()->getType()->hasPointerRepresentation());
  2748. Result result = asImpl().visit(e->getSubExpr());
  2749. return asImpl().emitBitCast(result, resultType);
  2750. }
  2751. // Handle some casts specially.
  2752. case CK_LValueToRValue:
  2753. return asImpl().visitLValueToRValue(e->getSubExpr());
  2754. case CK_ARCConsumeObject:
  2755. return asImpl().visitConsumeObject(e->getSubExpr());
  2756. case CK_ARCExtendBlockObject:
  2757. return asImpl().visitExtendBlockObject(e->getSubExpr());
  2758. case CK_ARCReclaimReturnedObject:
  2759. return asImpl().visitReclaimReturnedObject(e->getSubExpr());
  2760. // Otherwise, use the default logic.
  2761. default:
  2762. return asImpl().visitExpr(e);
  2763. }
  2764. }
  2765. template <typename Impl, typename Result>
  2766. Result
  2767. ARCExprEmitter<Impl,Result>::visitBinaryOperator(const BinaryOperator *e) {
  2768. switch (e->getOpcode()) {
  2769. case BO_Comma:
  2770. CGF.EmitIgnoredExpr(e->getLHS());
  2771. CGF.EnsureInsertPoint();
  2772. return asImpl().visit(e->getRHS());
  2773. case BO_Assign:
  2774. return asImpl().visitBinAssign(e);
  2775. default:
  2776. return asImpl().visitExpr(e);
  2777. }
  2778. }
  2779. template <typename Impl, typename Result>
  2780. Result ARCExprEmitter<Impl,Result>::visitBinAssign(const BinaryOperator *e) {
  2781. switch (e->getLHS()->getType().getObjCLifetime()) {
  2782. case Qualifiers::OCL_ExplicitNone:
  2783. return asImpl().visitBinAssignUnsafeUnretained(e);
  2784. case Qualifiers::OCL_Weak:
  2785. return asImpl().visitBinAssignWeak(e);
  2786. case Qualifiers::OCL_Autoreleasing:
  2787. return asImpl().visitBinAssignAutoreleasing(e);
  2788. case Qualifiers::OCL_Strong:
  2789. return asImpl().visitBinAssignStrong(e);
  2790. case Qualifiers::OCL_None:
  2791. return asImpl().visitExpr(e);
  2792. }
  2793. llvm_unreachable("bad ObjC ownership qualifier");
  2794. }
  2795. /// The default rule for __unsafe_unretained emits the RHS recursively,
  2796. /// stores into the unsafe variable, and propagates the result outward.
  2797. template <typename Impl, typename Result>
  2798. Result ARCExprEmitter<Impl,Result>::
  2799. visitBinAssignUnsafeUnretained(const BinaryOperator *e) {
  2800. // Recursively emit the RHS.
  2801. // For __block safety, do this before emitting the LHS.
  2802. Result result = asImpl().visit(e->getRHS());
  2803. // Perform the store.
  2804. LValue lvalue =
  2805. CGF.EmitCheckedLValue(e->getLHS(), CodeGenFunction::TCK_Store);
  2806. CGF.EmitStoreThroughLValue(RValue::get(asImpl().getValueOfResult(result)),
  2807. lvalue);
  2808. return result;
  2809. }
  2810. template <typename Impl, typename Result>
  2811. Result
  2812. ARCExprEmitter<Impl,Result>::visitBinAssignAutoreleasing(const BinaryOperator *e) {
  2813. return asImpl().visitExpr(e);
  2814. }
  2815. template <typename Impl, typename Result>
  2816. Result
  2817. ARCExprEmitter<Impl,Result>::visitBinAssignWeak(const BinaryOperator *e) {
  2818. return asImpl().visitExpr(e);
  2819. }
  2820. template <typename Impl, typename Result>
  2821. Result
  2822. ARCExprEmitter<Impl,Result>::visitBinAssignStrong(const BinaryOperator *e) {
  2823. return asImpl().visitExpr(e);
  2824. }
  2825. /// The general expression-emission logic.
  2826. template <typename Impl, typename Result>
  2827. Result ARCExprEmitter<Impl,Result>::visit(const Expr *e) {
  2828. // We should *never* see a nested full-expression here, because if
  2829. // we fail to emit at +1, our caller must not retain after we close
  2830. // out the full-expression. This isn't as important in the unsafe
  2831. // emitter.
  2832. assert(!isa<ExprWithCleanups>(e));
  2833. // Look through parens, __extension__, generic selection, etc.
  2834. e = e->IgnoreParens();
  2835. // Handle certain kinds of casts.
  2836. if (const CastExpr *ce = dyn_cast<CastExpr>(e)) {
  2837. return asImpl().visitCastExpr(ce);
  2838. // Handle the comma operator.
  2839. } else if (auto op = dyn_cast<BinaryOperator>(e)) {
  2840. return asImpl().visitBinaryOperator(op);
  2841. // TODO: handle conditional operators here
  2842. // For calls and message sends, use the retained-call logic.
  2843. // Delegate inits are a special case in that they're the only
  2844. // returns-retained expression that *isn't* surrounded by
  2845. // a consume.
  2846. } else if (isa<CallExpr>(e) ||
  2847. (isa<ObjCMessageExpr>(e) &&
  2848. !cast<ObjCMessageExpr>(e)->isDelegateInitCall())) {
  2849. return asImpl().visitCall(e);
  2850. // Look through pseudo-object expressions.
  2851. } else if (const PseudoObjectExpr *pseudo = dyn_cast<PseudoObjectExpr>(e)) {
  2852. return asImpl().visitPseudoObjectExpr(pseudo);
  2853. } else if (auto *be = dyn_cast<BlockExpr>(e))
  2854. return asImpl().visitBlockExpr(be);
  2855. return asImpl().visitExpr(e);
  2856. }
  2857. namespace {
  2858. /// An emitter for +1 results.
  2859. struct ARCRetainExprEmitter :
  2860. public ARCExprEmitter<ARCRetainExprEmitter, TryEmitResult> {
  2861. ARCRetainExprEmitter(CodeGenFunction &CGF) : ARCExprEmitter(CGF) {}
  2862. llvm::Value *getValueOfResult(TryEmitResult result) {
  2863. return result.getPointer();
  2864. }
  2865. TryEmitResult emitBitCast(TryEmitResult result, llvm::Type *resultType) {
  2866. llvm::Value *value = result.getPointer();
  2867. value = CGF.Builder.CreateBitCast(value, resultType);
  2868. result.setPointer(value);
  2869. return result;
  2870. }
  2871. TryEmitResult visitLValueToRValue(const Expr *e) {
  2872. return tryEmitARCRetainLoadOfScalar(CGF, e);
  2873. }
  2874. /// For consumptions, just emit the subexpression and thus elide
  2875. /// the retain/release pair.
  2876. TryEmitResult visitConsumeObject(const Expr *e) {
  2877. llvm::Value *result = CGF.EmitScalarExpr(e);
  2878. return TryEmitResult(result, true);
  2879. }
  2880. TryEmitResult visitBlockExpr(const BlockExpr *e) {
  2881. TryEmitResult result = visitExpr(e);
  2882. // Avoid the block-retain if this is a block literal that doesn't need to be
  2883. // copied to the heap.
  2884. if (CGF.CGM.getCodeGenOpts().ObjCAvoidHeapifyLocalBlocks &&
  2885. e->getBlockDecl()->canAvoidCopyToHeap())
  2886. result.setInt(true);
  2887. return result;
  2888. }
  2889. /// Block extends are net +0. Naively, we could just recurse on
  2890. /// the subexpression, but actually we need to ensure that the
  2891. /// value is copied as a block, so there's a little filter here.
  2892. TryEmitResult visitExtendBlockObject(const Expr *e) {
  2893. llvm::Value *result; // will be a +0 value
  2894. // If we can't safely assume the sub-expression will produce a
  2895. // block-copied value, emit the sub-expression at +0.
  2896. if (shouldEmitSeparateBlockRetain(e)) {
  2897. result = CGF.EmitScalarExpr(e);
  2898. // Otherwise, try to emit the sub-expression at +1 recursively.
  2899. } else {
  2900. TryEmitResult subresult = asImpl().visit(e);
  2901. // If that produced a retained value, just use that.
  2902. if (subresult.getInt()) {
  2903. return subresult;
  2904. }
  2905. // Otherwise it's +0.
  2906. result = subresult.getPointer();
  2907. }
  2908. // Retain the object as a block.
  2909. result = CGF.EmitARCRetainBlock(result, /*mandatory*/ true);
  2910. return TryEmitResult(result, true);
  2911. }
  2912. /// For reclaims, emit the subexpression as a retained call and
  2913. /// skip the consumption.
  2914. TryEmitResult visitReclaimReturnedObject(const Expr *e) {
  2915. llvm::Value *result = emitARCRetainCallResult(CGF, e);
  2916. return TryEmitResult(result, true);
  2917. }
  2918. /// When we have an undecorated call, retroactively do a claim.
  2919. TryEmitResult visitCall(const Expr *e) {
  2920. llvm::Value *result = emitARCRetainCallResult(CGF, e);
  2921. return TryEmitResult(result, true);
  2922. }
  2923. // TODO: maybe special-case visitBinAssignWeak?
  2924. TryEmitResult visitExpr(const Expr *e) {
  2925. // We didn't find an obvious production, so emit what we've got and
  2926. // tell the caller that we didn't manage to retain.
  2927. llvm::Value *result = CGF.EmitScalarExpr(e);
  2928. return TryEmitResult(result, false);
  2929. }
  2930. };
  2931. }
  2932. static TryEmitResult
  2933. tryEmitARCRetainScalarExpr(CodeGenFunction &CGF, const Expr *e) {
  2934. return ARCRetainExprEmitter(CGF).visit(e);
  2935. }
  2936. static llvm::Value *emitARCRetainLoadOfScalar(CodeGenFunction &CGF,
  2937. LValue lvalue,
  2938. QualType type) {
  2939. TryEmitResult result = tryEmitARCRetainLoadOfScalar(CGF, lvalue, type);
  2940. llvm::Value *value = result.getPointer();
  2941. if (!result.getInt())
  2942. value = CGF.EmitARCRetain(type, value);
  2943. return value;
  2944. }
  2945. /// EmitARCRetainScalarExpr - Semantically equivalent to
  2946. /// EmitARCRetainObject(e->getType(), EmitScalarExpr(e)), but making a
  2947. /// best-effort attempt to peephole expressions that naturally produce
  2948. /// retained objects.
  2949. llvm::Value *CodeGenFunction::EmitARCRetainScalarExpr(const Expr *e) {
  2950. // The retain needs to happen within the full-expression.
  2951. if (const ExprWithCleanups *cleanups = dyn_cast<ExprWithCleanups>(e)) {
  2952. RunCleanupsScope scope(*this);
  2953. return EmitARCRetainScalarExpr(cleanups->getSubExpr());
  2954. }
  2955. TryEmitResult result = tryEmitARCRetainScalarExpr(*this, e);
  2956. llvm::Value *value = result.getPointer();
  2957. if (!result.getInt())
  2958. value = EmitARCRetain(e->getType(), value);
  2959. return value;
  2960. }
  2961. llvm::Value *
  2962. CodeGenFunction::EmitARCRetainAutoreleaseScalarExpr(const Expr *e) {
  2963. // The retain needs to happen within the full-expression.
  2964. if (const ExprWithCleanups *cleanups = dyn_cast<ExprWithCleanups>(e)) {
  2965. RunCleanupsScope scope(*this);
  2966. return EmitARCRetainAutoreleaseScalarExpr(cleanups->getSubExpr());
  2967. }
  2968. TryEmitResult result = tryEmitARCRetainScalarExpr(*this, e);
  2969. llvm::Value *value = result.getPointer();
  2970. if (result.getInt())
  2971. value = EmitARCAutorelease(value);
  2972. else
  2973. value = EmitARCRetainAutorelease(e->getType(), value);
  2974. return value;
  2975. }
  2976. llvm::Value *CodeGenFunction::EmitARCExtendBlockObject(const Expr *e) {
  2977. llvm::Value *result;
  2978. bool doRetain;
  2979. if (shouldEmitSeparateBlockRetain(e)) {
  2980. result = EmitScalarExpr(e);
  2981. doRetain = true;
  2982. } else {
  2983. TryEmitResult subresult = tryEmitARCRetainScalarExpr(*this, e);
  2984. result = subresult.getPointer();
  2985. doRetain = !subresult.getInt();
  2986. }
  2987. if (doRetain)
  2988. result = EmitARCRetainBlock(result, /*mandatory*/ true);
  2989. return EmitObjCConsumeObject(e->getType(), result);
  2990. }
  2991. llvm::Value *CodeGenFunction::EmitObjCThrowOperand(const Expr *expr) {
  2992. // In ARC, retain and autorelease the expression.
  2993. if (getLangOpts().ObjCAutoRefCount) {
  2994. // Do so before running any cleanups for the full-expression.
  2995. // EmitARCRetainAutoreleaseScalarExpr does this for us.
  2996. return EmitARCRetainAutoreleaseScalarExpr(expr);
  2997. }
  2998. // Otherwise, use the normal scalar-expression emission. The
  2999. // exception machinery doesn't do anything special with the
  3000. // exception like retaining it, so there's no safety associated with
  3001. // only running cleanups after the throw has started, and when it
  3002. // matters it tends to be substantially inferior code.
  3003. return EmitScalarExpr(expr);
  3004. }
  3005. namespace {
  3006. /// An emitter for assigning into an __unsafe_unretained context.
  3007. struct ARCUnsafeUnretainedExprEmitter :
  3008. public ARCExprEmitter<ARCUnsafeUnretainedExprEmitter, llvm::Value*> {
  3009. ARCUnsafeUnretainedExprEmitter(CodeGenFunction &CGF) : ARCExprEmitter(CGF) {}
  3010. llvm::Value *getValueOfResult(llvm::Value *value) {
  3011. return value;
  3012. }
  3013. llvm::Value *emitBitCast(llvm::Value *value, llvm::Type *resultType) {
  3014. return CGF.Builder.CreateBitCast(value, resultType);
  3015. }
  3016. llvm::Value *visitLValueToRValue(const Expr *e) {
  3017. return CGF.EmitScalarExpr(e);
  3018. }
  3019. /// For consumptions, just emit the subexpression and perform the
  3020. /// consumption like normal.
  3021. llvm::Value *visitConsumeObject(const Expr *e) {
  3022. llvm::Value *value = CGF.EmitScalarExpr(e);
  3023. return CGF.EmitObjCConsumeObject(e->getType(), value);
  3024. }
  3025. /// No special logic for block extensions. (This probably can't
  3026. /// actually happen in this emitter, though.)
  3027. llvm::Value *visitExtendBlockObject(const Expr *e) {
  3028. return CGF.EmitARCExtendBlockObject(e);
  3029. }
  3030. /// For reclaims, perform an unsafeClaim if that's enabled.
  3031. llvm::Value *visitReclaimReturnedObject(const Expr *e) {
  3032. return CGF.EmitARCReclaimReturnedObject(e, /*unsafe*/ true);
  3033. }
  3034. /// When we have an undecorated call, just emit it without adding
  3035. /// the unsafeClaim.
  3036. llvm::Value *visitCall(const Expr *e) {
  3037. return CGF.EmitScalarExpr(e);
  3038. }
  3039. /// Just do normal scalar emission in the default case.
  3040. llvm::Value *visitExpr(const Expr *e) {
  3041. return CGF.EmitScalarExpr(e);
  3042. }
  3043. };
  3044. }
  3045. static llvm::Value *emitARCUnsafeUnretainedScalarExpr(CodeGenFunction &CGF,
  3046. const Expr *e) {
  3047. return ARCUnsafeUnretainedExprEmitter(CGF).visit(e);
  3048. }
  3049. /// EmitARCUnsafeUnretainedScalarExpr - Semantically equivalent to
  3050. /// immediately releasing the resut of EmitARCRetainScalarExpr, but
  3051. /// avoiding any spurious retains, including by performing reclaims
  3052. /// with objc_unsafeClaimAutoreleasedReturnValue.
  3053. llvm::Value *CodeGenFunction::EmitARCUnsafeUnretainedScalarExpr(const Expr *e) {
  3054. // Look through full-expressions.
  3055. if (const ExprWithCleanups *cleanups = dyn_cast<ExprWithCleanups>(e)) {
  3056. RunCleanupsScope scope(*this);
  3057. return emitARCUnsafeUnretainedScalarExpr(*this, cleanups->getSubExpr());
  3058. }
  3059. return emitARCUnsafeUnretainedScalarExpr(*this, e);
  3060. }
  3061. std::pair<LValue,llvm::Value*>
  3062. CodeGenFunction::EmitARCStoreUnsafeUnretained(const BinaryOperator *e,
  3063. bool ignored) {
  3064. // Evaluate the RHS first. If we're ignoring the result, assume
  3065. // that we can emit at an unsafe +0.
  3066. llvm::Value *value;
  3067. if (ignored) {
  3068. value = EmitARCUnsafeUnretainedScalarExpr(e->getRHS());
  3069. } else {
  3070. value = EmitScalarExpr(e->getRHS());
  3071. }
  3072. // Emit the LHS and perform the store.
  3073. LValue lvalue = EmitLValue(e->getLHS());
  3074. EmitStoreOfScalar(value, lvalue);
  3075. return std::pair<LValue,llvm::Value*>(std::move(lvalue), value);
  3076. }
  3077. std::pair<LValue,llvm::Value*>
  3078. CodeGenFunction::EmitARCStoreStrong(const BinaryOperator *e,
  3079. bool ignored) {
  3080. // Evaluate the RHS first.
  3081. TryEmitResult result = tryEmitARCRetainScalarExpr(*this, e->getRHS());
  3082. llvm::Value *value = result.getPointer();
  3083. bool hasImmediateRetain = result.getInt();
  3084. // If we didn't emit a retained object, and the l-value is of block
  3085. // type, then we need to emit the block-retain immediately in case
  3086. // it invalidates the l-value.
  3087. if (!hasImmediateRetain && e->getType()->isBlockPointerType()) {
  3088. value = EmitARCRetainBlock(value, /*mandatory*/ false);
  3089. hasImmediateRetain = true;
  3090. }
  3091. LValue lvalue = EmitLValue(e->getLHS());
  3092. // If the RHS was emitted retained, expand this.
  3093. if (hasImmediateRetain) {
  3094. llvm::Value *oldValue = EmitLoadOfScalar(lvalue, SourceLocation());
  3095. EmitStoreOfScalar(value, lvalue);
  3096. EmitARCRelease(oldValue, lvalue.isARCPreciseLifetime());
  3097. } else {
  3098. value = EmitARCStoreStrong(lvalue, value, ignored);
  3099. }
  3100. return std::pair<LValue,llvm::Value*>(lvalue, value);
  3101. }
  3102. std::pair<LValue,llvm::Value*>
  3103. CodeGenFunction::EmitARCStoreAutoreleasing(const BinaryOperator *e) {
  3104. llvm::Value *value = EmitARCRetainAutoreleaseScalarExpr(e->getRHS());
  3105. LValue lvalue = EmitLValue(e->getLHS());
  3106. EmitStoreOfScalar(value, lvalue);
  3107. return std::pair<LValue,llvm::Value*>(lvalue, value);
  3108. }
  3109. void CodeGenFunction::EmitObjCAutoreleasePoolStmt(
  3110. const ObjCAutoreleasePoolStmt &ARPS) {
  3111. const Stmt *subStmt = ARPS.getSubStmt();
  3112. const CompoundStmt &S = cast<CompoundStmt>(*subStmt);
  3113. CGDebugInfo *DI = getDebugInfo();
  3114. if (DI)
  3115. DI->EmitLexicalBlockStart(Builder, S.getLBracLoc());
  3116. // Keep track of the current cleanup stack depth.
  3117. RunCleanupsScope Scope(*this);
  3118. if (CGM.getLangOpts().ObjCRuntime.hasNativeARC()) {
  3119. llvm::Value *token = EmitObjCAutoreleasePoolPush();
  3120. EHStack.pushCleanup<CallObjCAutoreleasePoolObject>(NormalCleanup, token);
  3121. } else {
  3122. llvm::Value *token = EmitObjCMRRAutoreleasePoolPush();
  3123. EHStack.pushCleanup<CallObjCMRRAutoreleasePoolObject>(NormalCleanup, token);
  3124. }
  3125. for (const auto *I : S.body())
  3126. EmitStmt(I);
  3127. if (DI)
  3128. DI->EmitLexicalBlockEnd(Builder, S.getRBracLoc());
  3129. }
  3130. /// EmitExtendGCLifetime - Given a pointer to an Objective-C object,
  3131. /// make sure it survives garbage collection until this point.
  3132. void CodeGenFunction::EmitExtendGCLifetime(llvm::Value *object) {
  3133. // We just use an inline assembly.
  3134. llvm::FunctionType *extenderType
  3135. = llvm::FunctionType::get(VoidTy, VoidPtrTy, RequiredArgs::All);
  3136. llvm::InlineAsm *extender = llvm::InlineAsm::get(extenderType,
  3137. /* assembly */ "",
  3138. /* constraints */ "r",
  3139. /* side effects */ true);
  3140. object = Builder.CreateBitCast(object, VoidPtrTy);
  3141. EmitNounwindRuntimeCall(extender, object);
  3142. }
  3143. /// GenerateObjCAtomicSetterCopyHelperFunction - Given a c++ object type with
  3144. /// non-trivial copy assignment function, produce following helper function.
  3145. /// static void copyHelper(Ty *dest, const Ty *source) { *dest = *source; }
  3146. ///
  3147. llvm::Constant *
  3148. CodeGenFunction::GenerateObjCAtomicSetterCopyHelperFunction(
  3149. const ObjCPropertyImplDecl *PID) {
  3150. const ObjCPropertyDecl *PD = PID->getPropertyDecl();
  3151. if ((!(PD->getPropertyAttributes() & ObjCPropertyAttribute::kind_atomic)))
  3152. return nullptr;
  3153. QualType Ty = PID->getPropertyIvarDecl()->getType();
  3154. ASTContext &C = getContext();
  3155. if (Ty.isNonTrivialToPrimitiveCopy() == QualType::PCK_Struct) {
  3156. // Call the move assignment operator instead of calling the copy assignment
  3157. // operator and destructor.
  3158. CharUnits Alignment = C.getTypeAlignInChars(Ty);
  3159. llvm::Constant *Fn = getNonTrivialCStructMoveAssignmentOperator(
  3160. CGM, Alignment, Alignment, Ty.isVolatileQualified(), Ty);
  3161. return llvm::ConstantExpr::getBitCast(Fn, VoidPtrTy);
  3162. }
  3163. if (!getLangOpts().CPlusPlus ||
  3164. !getLangOpts().ObjCRuntime.hasAtomicCopyHelper())
  3165. return nullptr;
  3166. if (!Ty->isRecordType())
  3167. return nullptr;
  3168. llvm::Constant *HelperFn = nullptr;
  3169. if (hasTrivialSetExpr(PID))
  3170. return nullptr;
  3171. assert(PID->getSetterCXXAssignment() && "SetterCXXAssignment - null");
  3172. if ((HelperFn = CGM.getAtomicSetterHelperFnMap(Ty)))
  3173. return HelperFn;
  3174. IdentifierInfo *II
  3175. = &CGM.getContext().Idents.get("__assign_helper_atomic_property_");
  3176. QualType ReturnTy = C.VoidTy;
  3177. QualType DestTy = C.getPointerType(Ty);
  3178. QualType SrcTy = Ty;
  3179. SrcTy.addConst();
  3180. SrcTy = C.getPointerType(SrcTy);
  3181. SmallVector<QualType, 2> ArgTys;
  3182. ArgTys.push_back(DestTy);
  3183. ArgTys.push_back(SrcTy);
  3184. QualType FunctionTy = C.getFunctionType(ReturnTy, ArgTys, {});
  3185. FunctionDecl *FD = FunctionDecl::Create(
  3186. C, C.getTranslationUnitDecl(), SourceLocation(), SourceLocation(), II,
  3187. FunctionTy, nullptr, SC_Static, false, false, false);
  3188. FunctionArgList args;
  3189. ParmVarDecl *Params[2];
  3190. ParmVarDecl *DstDecl = ParmVarDecl::Create(
  3191. C, FD, SourceLocation(), SourceLocation(), nullptr, DestTy,
  3192. C.getTrivialTypeSourceInfo(DestTy, SourceLocation()), SC_None,
  3193. /*DefArg=*/nullptr);
  3194. args.push_back(Params[0] = DstDecl);
  3195. ParmVarDecl *SrcDecl = ParmVarDecl::Create(
  3196. C, FD, SourceLocation(), SourceLocation(), nullptr, SrcTy,
  3197. C.getTrivialTypeSourceInfo(SrcTy, SourceLocation()), SC_None,
  3198. /*DefArg=*/nullptr);
  3199. args.push_back(Params[1] = SrcDecl);
  3200. FD->setParams(Params);
  3201. const CGFunctionInfo &FI =
  3202. CGM.getTypes().arrangeBuiltinFunctionDeclaration(ReturnTy, args);
  3203. llvm::FunctionType *LTy = CGM.getTypes().GetFunctionType(FI);
  3204. llvm::Function *Fn =
  3205. llvm::Function::Create(LTy, llvm::GlobalValue::InternalLinkage,
  3206. "__assign_helper_atomic_property_",
  3207. &CGM.getModule());
  3208. CGM.SetInternalFunctionAttributes(GlobalDecl(), Fn, FI);
  3209. StartFunction(FD, ReturnTy, Fn, FI, args);
  3210. DeclRefExpr DstExpr(C, DstDecl, false, DestTy, VK_PRValue, SourceLocation());
  3211. UnaryOperator *DST = UnaryOperator::Create(
  3212. C, &DstExpr, UO_Deref, DestTy->getPointeeType(), VK_LValue, OK_Ordinary,
  3213. SourceLocation(), false, FPOptionsOverride());
  3214. DeclRefExpr SrcExpr(C, SrcDecl, false, SrcTy, VK_PRValue, SourceLocation());
  3215. UnaryOperator *SRC = UnaryOperator::Create(
  3216. C, &SrcExpr, UO_Deref, SrcTy->getPointeeType(), VK_LValue, OK_Ordinary,
  3217. SourceLocation(), false, FPOptionsOverride());
  3218. Expr *Args[2] = {DST, SRC};
  3219. CallExpr *CalleeExp = cast<CallExpr>(PID->getSetterCXXAssignment());
  3220. CXXOperatorCallExpr *TheCall = CXXOperatorCallExpr::Create(
  3221. C, OO_Equal, CalleeExp->getCallee(), Args, DestTy->getPointeeType(),
  3222. VK_LValue, SourceLocation(), FPOptionsOverride());
  3223. EmitStmt(TheCall);
  3224. FinishFunction();
  3225. HelperFn = llvm::ConstantExpr::getBitCast(Fn, VoidPtrTy);
  3226. CGM.setAtomicSetterHelperFnMap(Ty, HelperFn);
  3227. return HelperFn;
  3228. }
  3229. llvm::Constant *CodeGenFunction::GenerateObjCAtomicGetterCopyHelperFunction(
  3230. const ObjCPropertyImplDecl *PID) {
  3231. const ObjCPropertyDecl *PD = PID->getPropertyDecl();
  3232. if ((!(PD->getPropertyAttributes() & ObjCPropertyAttribute::kind_atomic)))
  3233. return nullptr;
  3234. QualType Ty = PD->getType();
  3235. ASTContext &C = getContext();
  3236. if (Ty.isNonTrivialToPrimitiveCopy() == QualType::PCK_Struct) {
  3237. CharUnits Alignment = C.getTypeAlignInChars(Ty);
  3238. llvm::Constant *Fn = getNonTrivialCStructCopyConstructor(
  3239. CGM, Alignment, Alignment, Ty.isVolatileQualified(), Ty);
  3240. return llvm::ConstantExpr::getBitCast(Fn, VoidPtrTy);
  3241. }
  3242. if (!getLangOpts().CPlusPlus ||
  3243. !getLangOpts().ObjCRuntime.hasAtomicCopyHelper())
  3244. return nullptr;
  3245. if (!Ty->isRecordType())
  3246. return nullptr;
  3247. llvm::Constant *HelperFn = nullptr;
  3248. if (hasTrivialGetExpr(PID))
  3249. return nullptr;
  3250. assert(PID->getGetterCXXConstructor() && "getGetterCXXConstructor - null");
  3251. if ((HelperFn = CGM.getAtomicGetterHelperFnMap(Ty)))
  3252. return HelperFn;
  3253. IdentifierInfo *II =
  3254. &CGM.getContext().Idents.get("__copy_helper_atomic_property_");
  3255. QualType ReturnTy = C.VoidTy;
  3256. QualType DestTy = C.getPointerType(Ty);
  3257. QualType SrcTy = Ty;
  3258. SrcTy.addConst();
  3259. SrcTy = C.getPointerType(SrcTy);
  3260. SmallVector<QualType, 2> ArgTys;
  3261. ArgTys.push_back(DestTy);
  3262. ArgTys.push_back(SrcTy);
  3263. QualType FunctionTy = C.getFunctionType(ReturnTy, ArgTys, {});
  3264. FunctionDecl *FD = FunctionDecl::Create(
  3265. C, C.getTranslationUnitDecl(), SourceLocation(), SourceLocation(), II,
  3266. FunctionTy, nullptr, SC_Static, false, false, false);
  3267. FunctionArgList args;
  3268. ParmVarDecl *Params[2];
  3269. ParmVarDecl *DstDecl = ParmVarDecl::Create(
  3270. C, FD, SourceLocation(), SourceLocation(), nullptr, DestTy,
  3271. C.getTrivialTypeSourceInfo(DestTy, SourceLocation()), SC_None,
  3272. /*DefArg=*/nullptr);
  3273. args.push_back(Params[0] = DstDecl);
  3274. ParmVarDecl *SrcDecl = ParmVarDecl::Create(
  3275. C, FD, SourceLocation(), SourceLocation(), nullptr, SrcTy,
  3276. C.getTrivialTypeSourceInfo(SrcTy, SourceLocation()), SC_None,
  3277. /*DefArg=*/nullptr);
  3278. args.push_back(Params[1] = SrcDecl);
  3279. FD->setParams(Params);
  3280. const CGFunctionInfo &FI =
  3281. CGM.getTypes().arrangeBuiltinFunctionDeclaration(ReturnTy, args);
  3282. llvm::FunctionType *LTy = CGM.getTypes().GetFunctionType(FI);
  3283. llvm::Function *Fn = llvm::Function::Create(
  3284. LTy, llvm::GlobalValue::InternalLinkage, "__copy_helper_atomic_property_",
  3285. &CGM.getModule());
  3286. CGM.SetInternalFunctionAttributes(GlobalDecl(), Fn, FI);
  3287. StartFunction(FD, ReturnTy, Fn, FI, args);
  3288. DeclRefExpr SrcExpr(getContext(), SrcDecl, false, SrcTy, VK_PRValue,
  3289. SourceLocation());
  3290. UnaryOperator *SRC = UnaryOperator::Create(
  3291. C, &SrcExpr, UO_Deref, SrcTy->getPointeeType(), VK_LValue, OK_Ordinary,
  3292. SourceLocation(), false, FPOptionsOverride());
  3293. CXXConstructExpr *CXXConstExpr =
  3294. cast<CXXConstructExpr>(PID->getGetterCXXConstructor());
  3295. SmallVector<Expr*, 4> ConstructorArgs;
  3296. ConstructorArgs.push_back(SRC);
  3297. ConstructorArgs.append(std::next(CXXConstExpr->arg_begin()),
  3298. CXXConstExpr->arg_end());
  3299. CXXConstructExpr *TheCXXConstructExpr =
  3300. CXXConstructExpr::Create(C, Ty, SourceLocation(),
  3301. CXXConstExpr->getConstructor(),
  3302. CXXConstExpr->isElidable(),
  3303. ConstructorArgs,
  3304. CXXConstExpr->hadMultipleCandidates(),
  3305. CXXConstExpr->isListInitialization(),
  3306. CXXConstExpr->isStdInitListInitialization(),
  3307. CXXConstExpr->requiresZeroInitialization(),
  3308. CXXConstExpr->getConstructionKind(),
  3309. SourceRange());
  3310. DeclRefExpr DstExpr(getContext(), DstDecl, false, DestTy, VK_PRValue,
  3311. SourceLocation());
  3312. RValue DV = EmitAnyExpr(&DstExpr);
  3313. CharUnits Alignment =
  3314. getContext().getTypeAlignInChars(TheCXXConstructExpr->getType());
  3315. EmitAggExpr(TheCXXConstructExpr,
  3316. AggValueSlot::forAddr(
  3317. Address(DV.getScalarVal(), ConvertTypeForMem(Ty), Alignment),
  3318. Qualifiers(), AggValueSlot::IsDestructed,
  3319. AggValueSlot::DoesNotNeedGCBarriers,
  3320. AggValueSlot::IsNotAliased, AggValueSlot::DoesNotOverlap));
  3321. FinishFunction();
  3322. HelperFn = llvm::ConstantExpr::getBitCast(Fn, VoidPtrTy);
  3323. CGM.setAtomicGetterHelperFnMap(Ty, HelperFn);
  3324. return HelperFn;
  3325. }
  3326. llvm::Value *
  3327. CodeGenFunction::EmitBlockCopyAndAutorelease(llvm::Value *Block, QualType Ty) {
  3328. // Get selectors for retain/autorelease.
  3329. IdentifierInfo *CopyID = &getContext().Idents.get("copy");
  3330. Selector CopySelector =
  3331. getContext().Selectors.getNullarySelector(CopyID);
  3332. IdentifierInfo *AutoreleaseID = &getContext().Idents.get("autorelease");
  3333. Selector AutoreleaseSelector =
  3334. getContext().Selectors.getNullarySelector(AutoreleaseID);
  3335. // Emit calls to retain/autorelease.
  3336. CGObjCRuntime &Runtime = CGM.getObjCRuntime();
  3337. llvm::Value *Val = Block;
  3338. RValue Result;
  3339. Result = Runtime.GenerateMessageSend(*this, ReturnValueSlot(),
  3340. Ty, CopySelector,
  3341. Val, CallArgList(), nullptr, nullptr);
  3342. Val = Result.getScalarVal();
  3343. Result = Runtime.GenerateMessageSend(*this, ReturnValueSlot(),
  3344. Ty, AutoreleaseSelector,
  3345. Val, CallArgList(), nullptr, nullptr);
  3346. Val = Result.getScalarVal();
  3347. return Val;
  3348. }
  3349. static unsigned getBaseMachOPlatformID(const llvm::Triple &TT) {
  3350. switch (TT.getOS()) {
  3351. case llvm::Triple::Darwin:
  3352. case llvm::Triple::MacOSX:
  3353. return llvm::MachO::PLATFORM_MACOS;
  3354. case llvm::Triple::IOS:
  3355. return llvm::MachO::PLATFORM_IOS;
  3356. case llvm::Triple::TvOS:
  3357. return llvm::MachO::PLATFORM_TVOS;
  3358. case llvm::Triple::WatchOS:
  3359. return llvm::MachO::PLATFORM_WATCHOS;
  3360. case llvm::Triple::DriverKit:
  3361. return llvm::MachO::PLATFORM_DRIVERKIT;
  3362. default:
  3363. return /*Unknown platform*/ 0;
  3364. }
  3365. }
  3366. static llvm::Value *emitIsPlatformVersionAtLeast(CodeGenFunction &CGF,
  3367. const VersionTuple &Version) {
  3368. CodeGenModule &CGM = CGF.CGM;
  3369. // Note: we intend to support multi-platform version checks, so reserve
  3370. // the room for a dual platform checking invocation that will be
  3371. // implemented in the future.
  3372. llvm::SmallVector<llvm::Value *, 8> Args;
  3373. auto EmitArgs = [&](const VersionTuple &Version, const llvm::Triple &TT) {
  3374. std::optional<unsigned> Min = Version.getMinor(),
  3375. SMin = Version.getSubminor();
  3376. Args.push_back(
  3377. llvm::ConstantInt::get(CGM.Int32Ty, getBaseMachOPlatformID(TT)));
  3378. Args.push_back(llvm::ConstantInt::get(CGM.Int32Ty, Version.getMajor()));
  3379. Args.push_back(llvm::ConstantInt::get(CGM.Int32Ty, Min.value_or(0)));
  3380. Args.push_back(llvm::ConstantInt::get(CGM.Int32Ty, SMin.value_or(0)));
  3381. };
  3382. assert(!Version.empty() && "unexpected empty version");
  3383. EmitArgs(Version, CGM.getTarget().getTriple());
  3384. if (!CGM.IsPlatformVersionAtLeastFn) {
  3385. llvm::FunctionType *FTy = llvm::FunctionType::get(
  3386. CGM.Int32Ty, {CGM.Int32Ty, CGM.Int32Ty, CGM.Int32Ty, CGM.Int32Ty},
  3387. false);
  3388. CGM.IsPlatformVersionAtLeastFn =
  3389. CGM.CreateRuntimeFunction(FTy, "__isPlatformVersionAtLeast");
  3390. }
  3391. llvm::Value *Check =
  3392. CGF.EmitNounwindRuntimeCall(CGM.IsPlatformVersionAtLeastFn, Args);
  3393. return CGF.Builder.CreateICmpNE(Check,
  3394. llvm::Constant::getNullValue(CGM.Int32Ty));
  3395. }
  3396. llvm::Value *
  3397. CodeGenFunction::EmitBuiltinAvailable(const VersionTuple &Version) {
  3398. // Darwin uses the new __isPlatformVersionAtLeast family of routines.
  3399. if (CGM.getTarget().getTriple().isOSDarwin())
  3400. return emitIsPlatformVersionAtLeast(*this, Version);
  3401. if (!CGM.IsOSVersionAtLeastFn) {
  3402. llvm::FunctionType *FTy =
  3403. llvm::FunctionType::get(Int32Ty, {Int32Ty, Int32Ty, Int32Ty}, false);
  3404. CGM.IsOSVersionAtLeastFn =
  3405. CGM.CreateRuntimeFunction(FTy, "__isOSVersionAtLeast");
  3406. }
  3407. std::optional<unsigned> Min = Version.getMinor(),
  3408. SMin = Version.getSubminor();
  3409. llvm::Value *Args[] = {
  3410. llvm::ConstantInt::get(CGM.Int32Ty, Version.getMajor()),
  3411. llvm::ConstantInt::get(CGM.Int32Ty, Min.value_or(0)),
  3412. llvm::ConstantInt::get(CGM.Int32Ty, SMin.value_or(0))};
  3413. llvm::Value *CallRes =
  3414. EmitNounwindRuntimeCall(CGM.IsOSVersionAtLeastFn, Args);
  3415. return Builder.CreateICmpNE(CallRes, llvm::Constant::getNullValue(Int32Ty));
  3416. }
  3417. static bool isFoundationNeededForDarwinAvailabilityCheck(
  3418. const llvm::Triple &TT, const VersionTuple &TargetVersion) {
  3419. VersionTuple FoundationDroppedInVersion;
  3420. switch (TT.getOS()) {
  3421. case llvm::Triple::IOS:
  3422. case llvm::Triple::TvOS:
  3423. FoundationDroppedInVersion = VersionTuple(/*Major=*/13);
  3424. break;
  3425. case llvm::Triple::WatchOS:
  3426. FoundationDroppedInVersion = VersionTuple(/*Major=*/6);
  3427. break;
  3428. case llvm::Triple::Darwin:
  3429. case llvm::Triple::MacOSX:
  3430. FoundationDroppedInVersion = VersionTuple(/*Major=*/10, /*Minor=*/15);
  3431. break;
  3432. case llvm::Triple::DriverKit:
  3433. // DriverKit doesn't need Foundation.
  3434. return false;
  3435. default:
  3436. llvm_unreachable("Unexpected OS");
  3437. }
  3438. return TargetVersion < FoundationDroppedInVersion;
  3439. }
  3440. void CodeGenModule::emitAtAvailableLinkGuard() {
  3441. if (!IsPlatformVersionAtLeastFn)
  3442. return;
  3443. // @available requires CoreFoundation only on Darwin.
  3444. if (!Target.getTriple().isOSDarwin())
  3445. return;
  3446. // @available doesn't need Foundation on macOS 10.15+, iOS/tvOS 13+, or
  3447. // watchOS 6+.
  3448. if (!isFoundationNeededForDarwinAvailabilityCheck(
  3449. Target.getTriple(), Target.getPlatformMinVersion()))
  3450. return;
  3451. // Add -framework CoreFoundation to the linker commands. We still want to
  3452. // emit the core foundation reference down below because otherwise if
  3453. // CoreFoundation is not used in the code, the linker won't link the
  3454. // framework.
  3455. auto &Context = getLLVMContext();
  3456. llvm::Metadata *Args[2] = {llvm::MDString::get(Context, "-framework"),
  3457. llvm::MDString::get(Context, "CoreFoundation")};
  3458. LinkerOptionsMetadata.push_back(llvm::MDNode::get(Context, Args));
  3459. // Emit a reference to a symbol from CoreFoundation to ensure that
  3460. // CoreFoundation is linked into the final binary.
  3461. llvm::FunctionType *FTy =
  3462. llvm::FunctionType::get(Int32Ty, {VoidPtrTy}, false);
  3463. llvm::FunctionCallee CFFunc =
  3464. CreateRuntimeFunction(FTy, "CFBundleGetVersionNumber");
  3465. llvm::FunctionType *CheckFTy = llvm::FunctionType::get(VoidTy, {}, false);
  3466. llvm::FunctionCallee CFLinkCheckFuncRef = CreateRuntimeFunction(
  3467. CheckFTy, "__clang_at_available_requires_core_foundation_framework",
  3468. llvm::AttributeList(), /*Local=*/true);
  3469. llvm::Function *CFLinkCheckFunc =
  3470. cast<llvm::Function>(CFLinkCheckFuncRef.getCallee()->stripPointerCasts());
  3471. if (CFLinkCheckFunc->empty()) {
  3472. CFLinkCheckFunc->setLinkage(llvm::GlobalValue::LinkOnceAnyLinkage);
  3473. CFLinkCheckFunc->setVisibility(llvm::GlobalValue::HiddenVisibility);
  3474. CodeGenFunction CGF(*this);
  3475. CGF.Builder.SetInsertPoint(CGF.createBasicBlock("", CFLinkCheckFunc));
  3476. CGF.EmitNounwindRuntimeCall(CFFunc,
  3477. llvm::Constant::getNullValue(VoidPtrTy));
  3478. CGF.Builder.CreateUnreachable();
  3479. addCompilerUsedGlobal(CFLinkCheckFunc);
  3480. }
  3481. }
  3482. CGObjCRuntime::~CGObjCRuntime() {}