ObjCARCContract.cpp 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761
  1. //===- ObjCARCContract.cpp - ObjC ARC Optimization ------------------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. /// \file
  9. /// This file defines late ObjC ARC optimizations. ARC stands for Automatic
  10. /// Reference Counting and is a system for managing reference counts for objects
  11. /// in Objective C.
  12. ///
  13. /// This specific file mainly deals with ``contracting'' multiple lower level
  14. /// operations into singular higher level operations through pattern matching.
  15. ///
  16. /// WARNING: This file knows about certain library functions. It recognizes them
  17. /// by name, and hardwires knowledge of their semantics.
  18. ///
  19. /// WARNING: This file knows about how certain Objective-C library functions are
  20. /// used. Naive LLVM IR transformations which would otherwise be
  21. /// behavior-preserving may break these assumptions.
  22. ///
  23. //===----------------------------------------------------------------------===//
  24. // TODO: ObjCARCContract could insert PHI nodes when uses aren't
  25. // dominated by single calls.
  26. #include "ARCRuntimeEntryPoints.h"
  27. #include "DependencyAnalysis.h"
  28. #include "ObjCARC.h"
  29. #include "ProvenanceAnalysis.h"
  30. #include "llvm/ADT/Statistic.h"
  31. #include "llvm/Analysis/AliasAnalysis.h"
  32. #include "llvm/Analysis/EHPersonalities.h"
  33. #include "llvm/Analysis/ObjCARCUtil.h"
  34. #include "llvm/IR/Dominators.h"
  35. #include "llvm/IR/InlineAsm.h"
  36. #include "llvm/IR/InstIterator.h"
  37. #include "llvm/IR/Operator.h"
  38. #include "llvm/IR/PassManager.h"
  39. #include "llvm/InitializePasses.h"
  40. #include "llvm/Support/CommandLine.h"
  41. #include "llvm/Support/Debug.h"
  42. #include "llvm/Support/raw_ostream.h"
  43. #include "llvm/Transforms/ObjCARC.h"
  44. using namespace llvm;
  45. using namespace llvm::objcarc;
  46. #define DEBUG_TYPE "objc-arc-contract"
  47. STATISTIC(NumPeeps, "Number of calls peephole-optimized");
  48. STATISTIC(NumStoreStrongs, "Number objc_storeStrong calls formed");
  49. //===----------------------------------------------------------------------===//
  50. // Declarations
  51. //===----------------------------------------------------------------------===//
  52. namespace {
  53. /// Late ARC optimizations
  54. ///
  55. /// These change the IR in a way that makes it difficult to be analyzed by
  56. /// ObjCARCOpt, so it's run late.
  57. class ObjCARCContract {
  58. bool Changed;
  59. bool CFGChanged;
  60. AAResults *AA;
  61. DominatorTree *DT;
  62. ProvenanceAnalysis PA;
  63. ARCRuntimeEntryPoints EP;
  64. BundledRetainClaimRVs *BundledInsts = nullptr;
  65. /// The inline asm string to insert between calls and RetainRV calls to make
  66. /// the optimization work on targets which need it.
  67. const MDString *RVInstMarker;
  68. /// The set of inserted objc_storeStrong calls. If at the end of walking the
  69. /// function we have found no alloca instructions, these calls can be marked
  70. /// "tail".
  71. SmallPtrSet<CallInst *, 8> StoreStrongCalls;
  72. /// Returns true if we eliminated Inst.
  73. bool tryToPeepholeInstruction(
  74. Function &F, Instruction *Inst, inst_iterator &Iter,
  75. bool &TailOkForStoreStrong,
  76. const DenseMap<BasicBlock *, ColorVector> &BlockColors);
  77. bool optimizeRetainCall(Function &F, Instruction *Retain);
  78. bool contractAutorelease(Function &F, Instruction *Autorelease,
  79. ARCInstKind Class);
  80. void tryToContractReleaseIntoStoreStrong(
  81. Instruction *Release, inst_iterator &Iter,
  82. const DenseMap<BasicBlock *, ColorVector> &BlockColors);
  83. public:
  84. bool init(Module &M);
  85. bool run(Function &F, AAResults *AA, DominatorTree *DT);
  86. bool hasCFGChanged() const { return CFGChanged; }
  87. };
  88. class ObjCARCContractLegacyPass : public FunctionPass {
  89. public:
  90. void getAnalysisUsage(AnalysisUsage &AU) const override;
  91. bool runOnFunction(Function &F) override;
  92. static char ID;
  93. ObjCARCContractLegacyPass() : FunctionPass(ID) {
  94. initializeObjCARCContractLegacyPassPass(*PassRegistry::getPassRegistry());
  95. }
  96. };
  97. }
  98. //===----------------------------------------------------------------------===//
  99. // Implementation
  100. //===----------------------------------------------------------------------===//
  101. /// Turn objc_retain into objc_retainAutoreleasedReturnValue if the operand is a
  102. /// return value. We do this late so we do not disrupt the dataflow analysis in
  103. /// ObjCARCOpt.
  104. bool ObjCARCContract::optimizeRetainCall(Function &F, Instruction *Retain) {
  105. const auto *Call = dyn_cast<CallBase>(GetArgRCIdentityRoot(Retain));
  106. if (!Call)
  107. return false;
  108. if (Call->getParent() != Retain->getParent())
  109. return false;
  110. // Check that the call is next to the retain.
  111. BasicBlock::const_iterator I = ++Call->getIterator();
  112. while (IsNoopInstruction(&*I))
  113. ++I;
  114. if (&*I != Retain)
  115. return false;
  116. // Turn it to an objc_retainAutoreleasedReturnValue.
  117. Changed = true;
  118. ++NumPeeps;
  119. LLVM_DEBUG(
  120. dbgs() << "Transforming objc_retain => "
  121. "objc_retainAutoreleasedReturnValue since the operand is a "
  122. "return value.\nOld: "
  123. << *Retain << "\n");
  124. // We do not have to worry about tail calls/does not throw since
  125. // retain/retainRV have the same properties.
  126. Function *Decl = EP.get(ARCRuntimeEntryPointKind::RetainRV);
  127. cast<CallInst>(Retain)->setCalledFunction(Decl);
  128. LLVM_DEBUG(dbgs() << "New: " << *Retain << "\n");
  129. return true;
  130. }
  131. /// Merge an autorelease with a retain into a fused call.
  132. bool ObjCARCContract::contractAutorelease(Function &F, Instruction *Autorelease,
  133. ARCInstKind Class) {
  134. const Value *Arg = GetArgRCIdentityRoot(Autorelease);
  135. // Check that there are no instructions between the retain and the autorelease
  136. // (such as an autorelease_pop) which may change the count.
  137. DependenceKind DK = Class == ARCInstKind::AutoreleaseRV
  138. ? RetainAutoreleaseRVDep
  139. : RetainAutoreleaseDep;
  140. auto *Retain = dyn_cast_or_null<CallInst>(
  141. findSingleDependency(DK, Arg, Autorelease->getParent(), Autorelease, PA));
  142. if (!Retain || GetBasicARCInstKind(Retain) != ARCInstKind::Retain ||
  143. GetArgRCIdentityRoot(Retain) != Arg)
  144. return false;
  145. Changed = true;
  146. ++NumPeeps;
  147. LLVM_DEBUG(dbgs() << " Fusing retain/autorelease!\n"
  148. " Autorelease:"
  149. << *Autorelease
  150. << "\n"
  151. " Retain: "
  152. << *Retain << "\n");
  153. Function *Decl = EP.get(Class == ARCInstKind::AutoreleaseRV
  154. ? ARCRuntimeEntryPointKind::RetainAutoreleaseRV
  155. : ARCRuntimeEntryPointKind::RetainAutorelease);
  156. Retain->setCalledFunction(Decl);
  157. LLVM_DEBUG(dbgs() << " New RetainAutorelease: " << *Retain << "\n");
  158. EraseInstruction(Autorelease);
  159. return true;
  160. }
  161. static StoreInst *findSafeStoreForStoreStrongContraction(LoadInst *Load,
  162. Instruction *Release,
  163. ProvenanceAnalysis &PA,
  164. AAResults *AA) {
  165. StoreInst *Store = nullptr;
  166. bool SawRelease = false;
  167. // Get the location associated with Load.
  168. MemoryLocation Loc = MemoryLocation::get(Load);
  169. auto *LocPtr = Loc.Ptr->stripPointerCasts();
  170. // Walk down to find the store and the release, which may be in either order.
  171. for (auto I = std::next(BasicBlock::iterator(Load)),
  172. E = Load->getParent()->end();
  173. I != E; ++I) {
  174. // If we found the store we were looking for and saw the release,
  175. // break. There is no more work to be done.
  176. if (Store && SawRelease)
  177. break;
  178. // Now we know that we have not seen either the store or the release. If I
  179. // is the release, mark that we saw the release and continue.
  180. Instruction *Inst = &*I;
  181. if (Inst == Release) {
  182. SawRelease = true;
  183. continue;
  184. }
  185. // Otherwise, we check if Inst is a "good" store. Grab the instruction class
  186. // of Inst.
  187. ARCInstKind Class = GetBasicARCInstKind(Inst);
  188. // If we have seen the store, but not the release...
  189. if (Store) {
  190. // We need to make sure that it is safe to move the release from its
  191. // current position to the store. This implies proving that any
  192. // instruction in between Store and the Release conservatively can not use
  193. // the RCIdentityRoot of Release. If we can prove we can ignore Inst, so
  194. // continue...
  195. if (!CanUse(Inst, Load, PA, Class)) {
  196. continue;
  197. }
  198. // Otherwise, be conservative and return nullptr.
  199. return nullptr;
  200. }
  201. // Ok, now we know we have not seen a store yet.
  202. // If Inst is a retain, we don't care about it as it doesn't prevent moving
  203. // the load to the store.
  204. //
  205. // TODO: This is one area where the optimization could be made more
  206. // aggressive.
  207. if (IsRetain(Class))
  208. continue;
  209. // See if Inst can write to our load location, if it can not, just ignore
  210. // the instruction.
  211. if (!isModSet(AA->getModRefInfo(Inst, Loc)))
  212. continue;
  213. Store = dyn_cast<StoreInst>(Inst);
  214. // If Inst can, then check if Inst is a simple store. If Inst is not a
  215. // store or a store that is not simple, then we have some we do not
  216. // understand writing to this memory implying we can not move the load
  217. // over the write to any subsequent store that we may find.
  218. if (!Store || !Store->isSimple())
  219. return nullptr;
  220. // Then make sure that the pointer we are storing to is Ptr. If so, we
  221. // found our Store!
  222. if (Store->getPointerOperand()->stripPointerCasts() == LocPtr)
  223. continue;
  224. // Otherwise, we have an unknown store to some other ptr that clobbers
  225. // Loc.Ptr. Bail!
  226. return nullptr;
  227. }
  228. // If we did not find the store or did not see the release, fail.
  229. if (!Store || !SawRelease)
  230. return nullptr;
  231. // We succeeded!
  232. return Store;
  233. }
  234. static Instruction *
  235. findRetainForStoreStrongContraction(Value *New, StoreInst *Store,
  236. Instruction *Release,
  237. ProvenanceAnalysis &PA) {
  238. // Walk up from the Store to find the retain.
  239. BasicBlock::iterator I = Store->getIterator();
  240. BasicBlock::iterator Begin = Store->getParent()->begin();
  241. while (I != Begin && GetBasicARCInstKind(&*I) != ARCInstKind::Retain) {
  242. Instruction *Inst = &*I;
  243. // It is only safe to move the retain to the store if we can prove
  244. // conservatively that nothing besides the release can decrement reference
  245. // counts in between the retain and the store.
  246. if (CanDecrementRefCount(Inst, New, PA) && Inst != Release)
  247. return nullptr;
  248. --I;
  249. }
  250. Instruction *Retain = &*I;
  251. if (GetBasicARCInstKind(Retain) != ARCInstKind::Retain)
  252. return nullptr;
  253. if (GetArgRCIdentityRoot(Retain) != New)
  254. return nullptr;
  255. return Retain;
  256. }
  257. /// Attempt to merge an objc_release with a store, load, and objc_retain to form
  258. /// an objc_storeStrong. An objc_storeStrong:
  259. ///
  260. /// objc_storeStrong(i8** %old_ptr, i8* new_value)
  261. ///
  262. /// is equivalent to the following IR sequence:
  263. ///
  264. /// ; Load old value.
  265. /// %old_value = load i8** %old_ptr (1)
  266. ///
  267. /// ; Increment the new value and then release the old value. This must occur
  268. /// ; in order in case old_value releases new_value in its destructor causing
  269. /// ; us to potentially have a dangling ptr.
  270. /// tail call i8* @objc_retain(i8* %new_value) (2)
  271. /// tail call void @objc_release(i8* %old_value) (3)
  272. ///
  273. /// ; Store the new_value into old_ptr
  274. /// store i8* %new_value, i8** %old_ptr (4)
  275. ///
  276. /// The safety of this optimization is based around the following
  277. /// considerations:
  278. ///
  279. /// 1. We are forming the store strong at the store. Thus to perform this
  280. /// optimization it must be safe to move the retain, load, and release to
  281. /// (4).
  282. /// 2. We need to make sure that any re-orderings of (1), (2), (3), (4) are
  283. /// safe.
  284. void ObjCARCContract::tryToContractReleaseIntoStoreStrong(
  285. Instruction *Release, inst_iterator &Iter,
  286. const DenseMap<BasicBlock *, ColorVector> &BlockColors) {
  287. // See if we are releasing something that we just loaded.
  288. auto *Load = dyn_cast<LoadInst>(GetArgRCIdentityRoot(Release));
  289. if (!Load || !Load->isSimple())
  290. return;
  291. // For now, require everything to be in one basic block.
  292. BasicBlock *BB = Release->getParent();
  293. if (Load->getParent() != BB)
  294. return;
  295. // First scan down the BB from Load, looking for a store of the RCIdentityRoot
  296. // of Load's
  297. StoreInst *Store =
  298. findSafeStoreForStoreStrongContraction(Load, Release, PA, AA);
  299. // If we fail, bail.
  300. if (!Store)
  301. return;
  302. // Then find what new_value's RCIdentity Root is.
  303. Value *New = GetRCIdentityRoot(Store->getValueOperand());
  304. // Then walk up the BB and look for a retain on New without any intervening
  305. // instructions which conservatively might decrement ref counts.
  306. Instruction *Retain =
  307. findRetainForStoreStrongContraction(New, Store, Release, PA);
  308. // If we fail, bail.
  309. if (!Retain)
  310. return;
  311. Changed = true;
  312. ++NumStoreStrongs;
  313. LLVM_DEBUG(
  314. llvm::dbgs() << " Contracting retain, release into objc_storeStrong.\n"
  315. << " Old:\n"
  316. << " Store: " << *Store << "\n"
  317. << " Release: " << *Release << "\n"
  318. << " Retain: " << *Retain << "\n"
  319. << " Load: " << *Load << "\n");
  320. LLVMContext &C = Release->getContext();
  321. Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
  322. Type *I8XX = PointerType::getUnqual(I8X);
  323. Value *Args[] = { Load->getPointerOperand(), New };
  324. if (Args[0]->getType() != I8XX)
  325. Args[0] = new BitCastInst(Args[0], I8XX, "", Store);
  326. if (Args[1]->getType() != I8X)
  327. Args[1] = new BitCastInst(Args[1], I8X, "", Store);
  328. Function *Decl = EP.get(ARCRuntimeEntryPointKind::StoreStrong);
  329. CallInst *StoreStrong =
  330. objcarc::createCallInstWithColors(Decl, Args, "", Store, BlockColors);
  331. StoreStrong->setDoesNotThrow();
  332. StoreStrong->setDebugLoc(Store->getDebugLoc());
  333. // We can't set the tail flag yet, because we haven't yet determined
  334. // whether there are any escaping allocas. Remember this call, so that
  335. // we can set the tail flag once we know it's safe.
  336. StoreStrongCalls.insert(StoreStrong);
  337. LLVM_DEBUG(llvm::dbgs() << " New Store Strong: " << *StoreStrong
  338. << "\n");
  339. if (&*Iter == Retain) ++Iter;
  340. if (&*Iter == Store) ++Iter;
  341. Store->eraseFromParent();
  342. Release->eraseFromParent();
  343. EraseInstruction(Retain);
  344. if (Load->use_empty())
  345. Load->eraseFromParent();
  346. }
  347. bool ObjCARCContract::tryToPeepholeInstruction(
  348. Function &F, Instruction *Inst, inst_iterator &Iter,
  349. bool &TailOkForStoreStrongs,
  350. const DenseMap<BasicBlock *, ColorVector> &BlockColors) {
  351. // Only these library routines return their argument. In particular,
  352. // objc_retainBlock does not necessarily return its argument.
  353. ARCInstKind Class = GetBasicARCInstKind(Inst);
  354. switch (Class) {
  355. case ARCInstKind::FusedRetainAutorelease:
  356. case ARCInstKind::FusedRetainAutoreleaseRV:
  357. return false;
  358. case ARCInstKind::Autorelease:
  359. case ARCInstKind::AutoreleaseRV:
  360. return contractAutorelease(F, Inst, Class);
  361. case ARCInstKind::Retain:
  362. // Attempt to convert retains to retainrvs if they are next to function
  363. // calls.
  364. if (!optimizeRetainCall(F, Inst))
  365. return false;
  366. // If we succeed in our optimization, fall through.
  367. [[fallthrough]];
  368. case ARCInstKind::RetainRV:
  369. case ARCInstKind::UnsafeClaimRV: {
  370. // Return true if this is a bundled retainRV/claimRV call, which is always
  371. // redundant with the attachedcall in the bundle, and is going to be erased
  372. // at the end of this pass. This avoids undoing objc-arc-expand and
  373. // replacing uses of the retainRV/claimRV call's argument with its result.
  374. if (BundledInsts->contains(Inst))
  375. return true;
  376. // If this isn't a bundled call, and the target doesn't need a special
  377. // inline-asm marker, we're done: return now, and undo objc-arc-expand.
  378. if (!RVInstMarker)
  379. return false;
  380. // The target needs a special inline-asm marker. Insert it.
  381. BasicBlock::iterator BBI = Inst->getIterator();
  382. BasicBlock *InstParent = Inst->getParent();
  383. // Step up to see if the call immediately precedes the RV call.
  384. // If it's an invoke, we have to cross a block boundary. And we have
  385. // to carefully dodge no-op instructions.
  386. do {
  387. if (BBI == InstParent->begin()) {
  388. BasicBlock *Pred = InstParent->getSinglePredecessor();
  389. if (!Pred)
  390. goto decline_rv_optimization;
  391. BBI = Pred->getTerminator()->getIterator();
  392. break;
  393. }
  394. --BBI;
  395. } while (IsNoopInstruction(&*BBI));
  396. if (GetRCIdentityRoot(&*BBI) == GetArgRCIdentityRoot(Inst)) {
  397. LLVM_DEBUG(dbgs() << "Adding inline asm marker for the return value "
  398. "optimization.\n");
  399. Changed = true;
  400. InlineAsm *IA =
  401. InlineAsm::get(FunctionType::get(Type::getVoidTy(Inst->getContext()),
  402. /*isVarArg=*/false),
  403. RVInstMarker->getString(),
  404. /*Constraints=*/"", /*hasSideEffects=*/true);
  405. objcarc::createCallInstWithColors(IA, std::nullopt, "", Inst,
  406. BlockColors);
  407. }
  408. decline_rv_optimization:
  409. return false;
  410. }
  411. case ARCInstKind::InitWeak: {
  412. // objc_initWeak(p, null) => *p = null
  413. CallInst *CI = cast<CallInst>(Inst);
  414. if (IsNullOrUndef(CI->getArgOperand(1))) {
  415. Value *Null = ConstantPointerNull::get(cast<PointerType>(CI->getType()));
  416. Changed = true;
  417. new StoreInst(Null, CI->getArgOperand(0), CI);
  418. LLVM_DEBUG(dbgs() << "OBJCARCContract: Old = " << *CI << "\n"
  419. << " New = " << *Null << "\n");
  420. CI->replaceAllUsesWith(Null);
  421. CI->eraseFromParent();
  422. }
  423. return true;
  424. }
  425. case ARCInstKind::Release:
  426. // Try to form an objc store strong from our release. If we fail, there is
  427. // nothing further to do below, so continue.
  428. tryToContractReleaseIntoStoreStrong(Inst, Iter, BlockColors);
  429. return true;
  430. case ARCInstKind::User:
  431. // Be conservative if the function has any alloca instructions.
  432. // Technically we only care about escaping alloca instructions,
  433. // but this is sufficient to handle some interesting cases.
  434. if (isa<AllocaInst>(Inst))
  435. TailOkForStoreStrongs = false;
  436. return true;
  437. case ARCInstKind::IntrinsicUser:
  438. // Remove calls to @llvm.objc.clang.arc.use(...).
  439. Changed = true;
  440. Inst->eraseFromParent();
  441. return true;
  442. default:
  443. if (auto *CI = dyn_cast<CallInst>(Inst))
  444. if (CI->getIntrinsicID() == Intrinsic::objc_clang_arc_noop_use) {
  445. // Remove calls to @llvm.objc.clang.arc.noop.use(...).
  446. Changed = true;
  447. CI->eraseFromParent();
  448. }
  449. return true;
  450. }
  451. }
  452. //===----------------------------------------------------------------------===//
  453. // Top Level Driver
  454. //===----------------------------------------------------------------------===//
  455. bool ObjCARCContract::init(Module &M) {
  456. EP.init(&M);
  457. // Initialize RVInstMarker.
  458. RVInstMarker = getRVInstMarker(M);
  459. return false;
  460. }
  461. bool ObjCARCContract::run(Function &F, AAResults *A, DominatorTree *D) {
  462. if (!EnableARCOpts)
  463. return false;
  464. Changed = CFGChanged = false;
  465. AA = A;
  466. DT = D;
  467. PA.setAA(A);
  468. BundledRetainClaimRVs BRV(/*ContractPass=*/true);
  469. BundledInsts = &BRV;
  470. std::pair<bool, bool> R = BundledInsts->insertAfterInvokes(F, DT);
  471. Changed |= R.first;
  472. CFGChanged |= R.second;
  473. DenseMap<BasicBlock *, ColorVector> BlockColors;
  474. if (F.hasPersonalityFn() &&
  475. isScopedEHPersonality(classifyEHPersonality(F.getPersonalityFn())))
  476. BlockColors = colorEHFunclets(F);
  477. LLVM_DEBUG(llvm::dbgs() << "**** ObjCARC Contract ****\n");
  478. // Track whether it's ok to mark objc_storeStrong calls with the "tail"
  479. // keyword. Be conservative if the function has variadic arguments.
  480. // It seems that functions which "return twice" are also unsafe for the
  481. // "tail" argument, because they are setjmp, which could need to
  482. // return to an earlier stack state.
  483. bool TailOkForStoreStrongs =
  484. !F.isVarArg() && !F.callsFunctionThatReturnsTwice();
  485. // For ObjC library calls which return their argument, replace uses of the
  486. // argument with uses of the call return value, if it dominates the use. This
  487. // reduces register pressure.
  488. for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E;) {
  489. Instruction *Inst = &*I++;
  490. LLVM_DEBUG(dbgs() << "Visiting: " << *Inst << "\n");
  491. if (auto *CI = dyn_cast<CallInst>(Inst))
  492. if (objcarc::hasAttachedCallOpBundle(CI)) {
  493. BundledInsts->insertRVCallWithColors(&*I, CI, BlockColors);
  494. --I;
  495. Changed = true;
  496. }
  497. // First try to peephole Inst. If there is nothing further we can do in
  498. // terms of undoing objc-arc-expand, process the next inst.
  499. if (tryToPeepholeInstruction(F, Inst, I, TailOkForStoreStrongs,
  500. BlockColors))
  501. continue;
  502. // Otherwise, try to undo objc-arc-expand.
  503. // Don't use GetArgRCIdentityRoot because we don't want to look through bitcasts
  504. // and such; to do the replacement, the argument must have type i8*.
  505. // Function for replacing uses of Arg dominated by Inst.
  506. auto ReplaceArgUses = [Inst, this](Value *Arg) {
  507. // If we're compiling bugpointed code, don't get in trouble.
  508. if (!isa<Instruction>(Arg) && !isa<Argument>(Arg))
  509. return;
  510. // Look through the uses of the pointer.
  511. for (Value::use_iterator UI = Arg->use_begin(), UE = Arg->use_end();
  512. UI != UE; ) {
  513. // Increment UI now, because we may unlink its element.
  514. Use &U = *UI++;
  515. unsigned OperandNo = U.getOperandNo();
  516. // If the call's return value dominates a use of the call's argument
  517. // value, rewrite the use to use the return value. We check for
  518. // reachability here because an unreachable call is considered to
  519. // trivially dominate itself, which would lead us to rewriting its
  520. // argument in terms of its return value, which would lead to
  521. // infinite loops in GetArgRCIdentityRoot.
  522. if (!DT->isReachableFromEntry(U) || !DT->dominates(Inst, U))
  523. continue;
  524. Changed = true;
  525. Instruction *Replacement = Inst;
  526. Type *UseTy = U.get()->getType();
  527. if (PHINode *PHI = dyn_cast<PHINode>(U.getUser())) {
  528. // For PHI nodes, insert the bitcast in the predecessor block.
  529. unsigned ValNo = PHINode::getIncomingValueNumForOperand(OperandNo);
  530. BasicBlock *IncomingBB = PHI->getIncomingBlock(ValNo);
  531. if (Replacement->getType() != UseTy) {
  532. // A catchswitch is both a pad and a terminator, meaning a basic
  533. // block with a catchswitch has no insertion point. Keep going up
  534. // the dominator tree until we find a non-catchswitch.
  535. BasicBlock *InsertBB = IncomingBB;
  536. while (isa<CatchSwitchInst>(InsertBB->getFirstNonPHI())) {
  537. InsertBB = DT->getNode(InsertBB)->getIDom()->getBlock();
  538. }
  539. assert(DT->dominates(Inst, &InsertBB->back()) &&
  540. "Invalid insertion point for bitcast");
  541. Replacement =
  542. new BitCastInst(Replacement, UseTy, "", &InsertBB->back());
  543. }
  544. // While we're here, rewrite all edges for this PHI, rather
  545. // than just one use at a time, to minimize the number of
  546. // bitcasts we emit.
  547. for (unsigned i = 0, e = PHI->getNumIncomingValues(); i != e; ++i)
  548. if (PHI->getIncomingBlock(i) == IncomingBB) {
  549. // Keep the UI iterator valid.
  550. if (UI != UE &&
  551. &PHI->getOperandUse(
  552. PHINode::getOperandNumForIncomingValue(i)) == &*UI)
  553. ++UI;
  554. PHI->setIncomingValue(i, Replacement);
  555. }
  556. } else {
  557. if (Replacement->getType() != UseTy)
  558. Replacement = new BitCastInst(Replacement, UseTy, "",
  559. cast<Instruction>(U.getUser()));
  560. U.set(Replacement);
  561. }
  562. }
  563. };
  564. Value *Arg = cast<CallInst>(Inst)->getArgOperand(0);
  565. Value *OrigArg = Arg;
  566. // TODO: Change this to a do-while.
  567. for (;;) {
  568. ReplaceArgUses(Arg);
  569. // If Arg is a no-op casted pointer, strip one level of casts and iterate.
  570. if (const BitCastInst *BI = dyn_cast<BitCastInst>(Arg))
  571. Arg = BI->getOperand(0);
  572. else if (isa<GEPOperator>(Arg) &&
  573. cast<GEPOperator>(Arg)->hasAllZeroIndices())
  574. Arg = cast<GEPOperator>(Arg)->getPointerOperand();
  575. else if (isa<GlobalAlias>(Arg) &&
  576. !cast<GlobalAlias>(Arg)->isInterposable())
  577. Arg = cast<GlobalAlias>(Arg)->getAliasee();
  578. else {
  579. // If Arg is a PHI node, get PHIs that are equivalent to it and replace
  580. // their uses.
  581. if (PHINode *PN = dyn_cast<PHINode>(Arg)) {
  582. SmallVector<Value *, 1> PHIList;
  583. getEquivalentPHIs(*PN, PHIList);
  584. for (Value *PHI : PHIList)
  585. ReplaceArgUses(PHI);
  586. }
  587. break;
  588. }
  589. }
  590. // Replace bitcast users of Arg that are dominated by Inst.
  591. SmallVector<BitCastInst *, 2> BitCastUsers;
  592. // Add all bitcast users of the function argument first.
  593. for (User *U : OrigArg->users())
  594. if (auto *BC = dyn_cast<BitCastInst>(U))
  595. BitCastUsers.push_back(BC);
  596. // Replace the bitcasts with the call return. Iterate until list is empty.
  597. while (!BitCastUsers.empty()) {
  598. auto *BC = BitCastUsers.pop_back_val();
  599. for (User *U : BC->users())
  600. if (auto *B = dyn_cast<BitCastInst>(U))
  601. BitCastUsers.push_back(B);
  602. ReplaceArgUses(BC);
  603. }
  604. }
  605. // If this function has no escaping allocas or suspicious vararg usage,
  606. // objc_storeStrong calls can be marked with the "tail" keyword.
  607. if (TailOkForStoreStrongs)
  608. for (CallInst *CI : StoreStrongCalls)
  609. CI->setTailCall();
  610. StoreStrongCalls.clear();
  611. return Changed;
  612. }
  613. //===----------------------------------------------------------------------===//
  614. // Misc Pass Manager
  615. //===----------------------------------------------------------------------===//
  616. char ObjCARCContractLegacyPass::ID = 0;
  617. INITIALIZE_PASS_BEGIN(ObjCARCContractLegacyPass, "objc-arc-contract",
  618. "ObjC ARC contraction", false, false)
  619. INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass)
  620. INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass)
  621. INITIALIZE_PASS_END(ObjCARCContractLegacyPass, "objc-arc-contract",
  622. "ObjC ARC contraction", false, false)
  623. void ObjCARCContractLegacyPass::getAnalysisUsage(AnalysisUsage &AU) const {
  624. AU.addRequired<AAResultsWrapperPass>();
  625. AU.addRequired<DominatorTreeWrapperPass>();
  626. }
  627. Pass *llvm::createObjCARCContractPass() {
  628. return new ObjCARCContractLegacyPass();
  629. }
  630. bool ObjCARCContractLegacyPass::runOnFunction(Function &F) {
  631. ObjCARCContract OCARCC;
  632. OCARCC.init(*F.getParent());
  633. auto *AA = &getAnalysis<AAResultsWrapperPass>().getAAResults();
  634. auto *DT = &getAnalysis<DominatorTreeWrapperPass>().getDomTree();
  635. return OCARCC.run(F, AA, DT);
  636. }
  637. PreservedAnalyses ObjCARCContractPass::run(Function &F,
  638. FunctionAnalysisManager &AM) {
  639. ObjCARCContract OCAC;
  640. OCAC.init(*F.getParent());
  641. bool Changed = OCAC.run(F, &AM.getResult<AAManager>(F),
  642. &AM.getResult<DominatorTreeAnalysis>(F));
  643. bool CFGChanged = OCAC.hasCFGChanged();
  644. if (Changed) {
  645. PreservedAnalyses PA;
  646. if (!CFGChanged)
  647. PA.preserveSet<CFGAnalyses>();
  648. return PA;
  649. }
  650. return PreservedAnalyses::all();
  651. }