ObjCARCContract.cpp 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765
  1. //===- ObjCARCContract.cpp - ObjC ARC Optimization ------------------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. /// \file
  9. /// This file defines late ObjC ARC optimizations. ARC stands for Automatic
  10. /// Reference Counting and is a system for managing reference counts for objects
  11. /// in Objective C.
  12. ///
  13. /// This specific file mainly deals with ``contracting'' multiple lower level
  14. /// operations into singular higher level operations through pattern matching.
  15. ///
  16. /// WARNING: This file knows about certain library functions. It recognizes them
  17. /// by name, and hardwires knowledge of their semantics.
  18. ///
  19. /// WARNING: This file knows about how certain Objective-C library functions are
  20. /// used. Naive LLVM IR transformations which would otherwise be
  21. /// behavior-preserving may break these assumptions.
  22. ///
  23. //===----------------------------------------------------------------------===//
  24. // TODO: ObjCARCContract could insert PHI nodes when uses aren't
  25. // dominated by single calls.
  26. #include "ARCRuntimeEntryPoints.h"
  27. #include "DependencyAnalysis.h"
  28. #include "ObjCARC.h"
  29. #include "ProvenanceAnalysis.h"
  30. #include "llvm/ADT/Statistic.h"
  31. #include "llvm/Analysis/AliasAnalysis.h"
  32. #include "llvm/Analysis/EHPersonalities.h"
  33. #include "llvm/Analysis/ObjCARCUtil.h"
  34. #include "llvm/IR/Dominators.h"
  35. #include "llvm/IR/InlineAsm.h"
  36. #include "llvm/IR/InstIterator.h"
  37. #include "llvm/IR/Operator.h"
  38. #include "llvm/IR/PassManager.h"
  39. #include "llvm/InitializePasses.h"
  40. #include "llvm/Support/CommandLine.h"
  41. #include "llvm/Support/Debug.h"
  42. #include "llvm/Support/raw_ostream.h"
  43. #include "llvm/Transforms/ObjCARC.h"
  44. using namespace llvm;
  45. using namespace llvm::objcarc;
  46. #define DEBUG_TYPE "objc-arc-contract"
  47. STATISTIC(NumPeeps, "Number of calls peephole-optimized");
  48. STATISTIC(NumStoreStrongs, "Number objc_storeStrong calls formed");
  49. //===----------------------------------------------------------------------===//
  50. // Declarations
  51. //===----------------------------------------------------------------------===//
  52. namespace {
  53. /// Late ARC optimizations
  54. ///
  55. /// These change the IR in a way that makes it difficult to be analyzed by
  56. /// ObjCARCOpt, so it's run late.
  57. class ObjCARCContract {
  58. bool Changed;
  59. bool CFGChanged;
  60. AAResults *AA;
  61. DominatorTree *DT;
  62. ProvenanceAnalysis PA;
  63. ARCRuntimeEntryPoints EP;
  64. BundledRetainClaimRVs *BundledInsts = nullptr;
  65. /// The inline asm string to insert between calls and RetainRV calls to make
  66. /// the optimization work on targets which need it.
  67. const MDString *RVInstMarker;
  68. /// The set of inserted objc_storeStrong calls. If at the end of walking the
  69. /// function we have found no alloca instructions, these calls can be marked
  70. /// "tail".
  71. SmallPtrSet<CallInst *, 8> StoreStrongCalls;
  72. /// Returns true if we eliminated Inst.
  73. bool tryToPeepholeInstruction(
  74. Function &F, Instruction *Inst, inst_iterator &Iter,
  75. bool &TailOkForStoreStrong,
  76. const DenseMap<BasicBlock *, ColorVector> &BlockColors);
  77. bool optimizeRetainCall(Function &F, Instruction *Retain);
  78. bool contractAutorelease(Function &F, Instruction *Autorelease,
  79. ARCInstKind Class);
  80. void tryToContractReleaseIntoStoreStrong(
  81. Instruction *Release, inst_iterator &Iter,
  82. const DenseMap<BasicBlock *, ColorVector> &BlockColors);
  83. public:
  84. bool init(Module &M);
  85. bool run(Function &F, AAResults *AA, DominatorTree *DT);
  86. bool hasCFGChanged() const { return CFGChanged; }
  87. };
  88. class ObjCARCContractLegacyPass : public FunctionPass {
  89. ObjCARCContract OCARCC;
  90. public:
  91. void getAnalysisUsage(AnalysisUsage &AU) const override;
  92. bool doInitialization(Module &M) override;
  93. bool runOnFunction(Function &F) override;
  94. static char ID;
  95. ObjCARCContractLegacyPass() : FunctionPass(ID) {
  96. initializeObjCARCContractLegacyPassPass(*PassRegistry::getPassRegistry());
  97. }
  98. };
  99. }
  100. //===----------------------------------------------------------------------===//
  101. // Implementation
  102. //===----------------------------------------------------------------------===//
  103. /// Turn objc_retain into objc_retainAutoreleasedReturnValue if the operand is a
  104. /// return value. We do this late so we do not disrupt the dataflow analysis in
  105. /// ObjCARCOpt.
  106. bool ObjCARCContract::optimizeRetainCall(Function &F, Instruction *Retain) {
  107. const auto *Call = dyn_cast<CallBase>(GetArgRCIdentityRoot(Retain));
  108. if (!Call)
  109. return false;
  110. if (Call->getParent() != Retain->getParent())
  111. return false;
  112. // Check that the call is next to the retain.
  113. BasicBlock::const_iterator I = ++Call->getIterator();
  114. while (IsNoopInstruction(&*I))
  115. ++I;
  116. if (&*I != Retain)
  117. return false;
  118. // Turn it to an objc_retainAutoreleasedReturnValue.
  119. Changed = true;
  120. ++NumPeeps;
  121. LLVM_DEBUG(
  122. dbgs() << "Transforming objc_retain => "
  123. "objc_retainAutoreleasedReturnValue since the operand is a "
  124. "return value.\nOld: "
  125. << *Retain << "\n");
  126. // We do not have to worry about tail calls/does not throw since
  127. // retain/retainRV have the same properties.
  128. Function *Decl = EP.get(ARCRuntimeEntryPointKind::RetainRV);
  129. cast<CallInst>(Retain)->setCalledFunction(Decl);
  130. LLVM_DEBUG(dbgs() << "New: " << *Retain << "\n");
  131. return true;
  132. }
  133. /// Merge an autorelease with a retain into a fused call.
  134. bool ObjCARCContract::contractAutorelease(Function &F, Instruction *Autorelease,
  135. ARCInstKind Class) {
  136. const Value *Arg = GetArgRCIdentityRoot(Autorelease);
  137. // Check that there are no instructions between the retain and the autorelease
  138. // (such as an autorelease_pop) which may change the count.
  139. DependenceKind DK = Class == ARCInstKind::AutoreleaseRV
  140. ? RetainAutoreleaseRVDep
  141. : RetainAutoreleaseDep;
  142. auto *Retain = dyn_cast_or_null<CallInst>(
  143. findSingleDependency(DK, Arg, Autorelease->getParent(), Autorelease, PA));
  144. if (!Retain || GetBasicARCInstKind(Retain) != ARCInstKind::Retain ||
  145. GetArgRCIdentityRoot(Retain) != Arg)
  146. return false;
  147. Changed = true;
  148. ++NumPeeps;
  149. LLVM_DEBUG(dbgs() << " Fusing retain/autorelease!\n"
  150. " Autorelease:"
  151. << *Autorelease
  152. << "\n"
  153. " Retain: "
  154. << *Retain << "\n");
  155. Function *Decl = EP.get(Class == ARCInstKind::AutoreleaseRV
  156. ? ARCRuntimeEntryPointKind::RetainAutoreleaseRV
  157. : ARCRuntimeEntryPointKind::RetainAutorelease);
  158. Retain->setCalledFunction(Decl);
  159. LLVM_DEBUG(dbgs() << " New RetainAutorelease: " << *Retain << "\n");
  160. EraseInstruction(Autorelease);
  161. return true;
  162. }
  163. static StoreInst *findSafeStoreForStoreStrongContraction(LoadInst *Load,
  164. Instruction *Release,
  165. ProvenanceAnalysis &PA,
  166. AAResults *AA) {
  167. StoreInst *Store = nullptr;
  168. bool SawRelease = false;
  169. // Get the location associated with Load.
  170. MemoryLocation Loc = MemoryLocation::get(Load);
  171. auto *LocPtr = Loc.Ptr->stripPointerCasts();
  172. // Walk down to find the store and the release, which may be in either order.
  173. for (auto I = std::next(BasicBlock::iterator(Load)),
  174. E = Load->getParent()->end();
  175. I != E; ++I) {
  176. // If we found the store we were looking for and saw the release,
  177. // break. There is no more work to be done.
  178. if (Store && SawRelease)
  179. break;
  180. // Now we know that we have not seen either the store or the release. If I
  181. // is the release, mark that we saw the release and continue.
  182. Instruction *Inst = &*I;
  183. if (Inst == Release) {
  184. SawRelease = true;
  185. continue;
  186. }
  187. // Otherwise, we check if Inst is a "good" store. Grab the instruction class
  188. // of Inst.
  189. ARCInstKind Class = GetBasicARCInstKind(Inst);
  190. // If we have seen the store, but not the release...
  191. if (Store) {
  192. // We need to make sure that it is safe to move the release from its
  193. // current position to the store. This implies proving that any
  194. // instruction in between Store and the Release conservatively can not use
  195. // the RCIdentityRoot of Release. If we can prove we can ignore Inst, so
  196. // continue...
  197. if (!CanUse(Inst, Load, PA, Class)) {
  198. continue;
  199. }
  200. // Otherwise, be conservative and return nullptr.
  201. return nullptr;
  202. }
  203. // Ok, now we know we have not seen a store yet.
  204. // If Inst is a retain, we don't care about it as it doesn't prevent moving
  205. // the load to the store.
  206. //
  207. // TODO: This is one area where the optimization could be made more
  208. // aggressive.
  209. if (IsRetain(Class))
  210. continue;
  211. // See if Inst can write to our load location, if it can not, just ignore
  212. // the instruction.
  213. if (!isModSet(AA->getModRefInfo(Inst, Loc)))
  214. continue;
  215. Store = dyn_cast<StoreInst>(Inst);
  216. // If Inst can, then check if Inst is a simple store. If Inst is not a
  217. // store or a store that is not simple, then we have some we do not
  218. // understand writing to this memory implying we can not move the load
  219. // over the write to any subsequent store that we may find.
  220. if (!Store || !Store->isSimple())
  221. return nullptr;
  222. // Then make sure that the pointer we are storing to is Ptr. If so, we
  223. // found our Store!
  224. if (Store->getPointerOperand()->stripPointerCasts() == LocPtr)
  225. continue;
  226. // Otherwise, we have an unknown store to some other ptr that clobbers
  227. // Loc.Ptr. Bail!
  228. return nullptr;
  229. }
  230. // If we did not find the store or did not see the release, fail.
  231. if (!Store || !SawRelease)
  232. return nullptr;
  233. // We succeeded!
  234. return Store;
  235. }
  236. static Instruction *
  237. findRetainForStoreStrongContraction(Value *New, StoreInst *Store,
  238. Instruction *Release,
  239. ProvenanceAnalysis &PA) {
  240. // Walk up from the Store to find the retain.
  241. BasicBlock::iterator I = Store->getIterator();
  242. BasicBlock::iterator Begin = Store->getParent()->begin();
  243. while (I != Begin && GetBasicARCInstKind(&*I) != ARCInstKind::Retain) {
  244. Instruction *Inst = &*I;
  245. // It is only safe to move the retain to the store if we can prove
  246. // conservatively that nothing besides the release can decrement reference
  247. // counts in between the retain and the store.
  248. if (CanDecrementRefCount(Inst, New, PA) && Inst != Release)
  249. return nullptr;
  250. --I;
  251. }
  252. Instruction *Retain = &*I;
  253. if (GetBasicARCInstKind(Retain) != ARCInstKind::Retain)
  254. return nullptr;
  255. if (GetArgRCIdentityRoot(Retain) != New)
  256. return nullptr;
  257. return Retain;
  258. }
  259. /// Attempt to merge an objc_release with a store, load, and objc_retain to form
  260. /// an objc_storeStrong. An objc_storeStrong:
  261. ///
  262. /// objc_storeStrong(i8** %old_ptr, i8* new_value)
  263. ///
  264. /// is equivalent to the following IR sequence:
  265. ///
  266. /// ; Load old value.
  267. /// %old_value = load i8** %old_ptr (1)
  268. ///
  269. /// ; Increment the new value and then release the old value. This must occur
  270. /// ; in order in case old_value releases new_value in its destructor causing
  271. /// ; us to potentially have a dangling ptr.
  272. /// tail call i8* @objc_retain(i8* %new_value) (2)
  273. /// tail call void @objc_release(i8* %old_value) (3)
  274. ///
  275. /// ; Store the new_value into old_ptr
  276. /// store i8* %new_value, i8** %old_ptr (4)
  277. ///
  278. /// The safety of this optimization is based around the following
  279. /// considerations:
  280. ///
  281. /// 1. We are forming the store strong at the store. Thus to perform this
  282. /// optimization it must be safe to move the retain, load, and release to
  283. /// (4).
  284. /// 2. We need to make sure that any re-orderings of (1), (2), (3), (4) are
  285. /// safe.
  286. void ObjCARCContract::tryToContractReleaseIntoStoreStrong(
  287. Instruction *Release, inst_iterator &Iter,
  288. const DenseMap<BasicBlock *, ColorVector> &BlockColors) {
  289. // See if we are releasing something that we just loaded.
  290. auto *Load = dyn_cast<LoadInst>(GetArgRCIdentityRoot(Release));
  291. if (!Load || !Load->isSimple())
  292. return;
  293. // For now, require everything to be in one basic block.
  294. BasicBlock *BB = Release->getParent();
  295. if (Load->getParent() != BB)
  296. return;
  297. // First scan down the BB from Load, looking for a store of the RCIdentityRoot
  298. // of Load's
  299. StoreInst *Store =
  300. findSafeStoreForStoreStrongContraction(Load, Release, PA, AA);
  301. // If we fail, bail.
  302. if (!Store)
  303. return;
  304. // Then find what new_value's RCIdentity Root is.
  305. Value *New = GetRCIdentityRoot(Store->getValueOperand());
  306. // Then walk up the BB and look for a retain on New without any intervening
  307. // instructions which conservatively might decrement ref counts.
  308. Instruction *Retain =
  309. findRetainForStoreStrongContraction(New, Store, Release, PA);
  310. // If we fail, bail.
  311. if (!Retain)
  312. return;
  313. Changed = true;
  314. ++NumStoreStrongs;
  315. LLVM_DEBUG(
  316. llvm::dbgs() << " Contracting retain, release into objc_storeStrong.\n"
  317. << " Old:\n"
  318. << " Store: " << *Store << "\n"
  319. << " Release: " << *Release << "\n"
  320. << " Retain: " << *Retain << "\n"
  321. << " Load: " << *Load << "\n");
  322. LLVMContext &C = Release->getContext();
  323. Type *I8X = PointerType::getUnqual(Type::getInt8Ty(C));
  324. Type *I8XX = PointerType::getUnqual(I8X);
  325. Value *Args[] = { Load->getPointerOperand(), New };
  326. if (Args[0]->getType() != I8XX)
  327. Args[0] = new BitCastInst(Args[0], I8XX, "", Store);
  328. if (Args[1]->getType() != I8X)
  329. Args[1] = new BitCastInst(Args[1], I8X, "", Store);
  330. Function *Decl = EP.get(ARCRuntimeEntryPointKind::StoreStrong);
  331. CallInst *StoreStrong =
  332. objcarc::createCallInstWithColors(Decl, Args, "", Store, BlockColors);
  333. StoreStrong->setDoesNotThrow();
  334. StoreStrong->setDebugLoc(Store->getDebugLoc());
  335. // We can't set the tail flag yet, because we haven't yet determined
  336. // whether there are any escaping allocas. Remember this call, so that
  337. // we can set the tail flag once we know it's safe.
  338. StoreStrongCalls.insert(StoreStrong);
  339. LLVM_DEBUG(llvm::dbgs() << " New Store Strong: " << *StoreStrong
  340. << "\n");
  341. if (&*Iter == Retain) ++Iter;
  342. if (&*Iter == Store) ++Iter;
  343. Store->eraseFromParent();
  344. Release->eraseFromParent();
  345. EraseInstruction(Retain);
  346. if (Load->use_empty())
  347. Load->eraseFromParent();
  348. }
  349. bool ObjCARCContract::tryToPeepholeInstruction(
  350. Function &F, Instruction *Inst, inst_iterator &Iter,
  351. bool &TailOkForStoreStrongs,
  352. const DenseMap<BasicBlock *, ColorVector> &BlockColors) {
  353. // Only these library routines return their argument. In particular,
  354. // objc_retainBlock does not necessarily return its argument.
  355. ARCInstKind Class = GetBasicARCInstKind(Inst);
  356. switch (Class) {
  357. case ARCInstKind::FusedRetainAutorelease:
  358. case ARCInstKind::FusedRetainAutoreleaseRV:
  359. return false;
  360. case ARCInstKind::Autorelease:
  361. case ARCInstKind::AutoreleaseRV:
  362. return contractAutorelease(F, Inst, Class);
  363. case ARCInstKind::Retain:
  364. // Attempt to convert retains to retainrvs if they are next to function
  365. // calls.
  366. if (!optimizeRetainCall(F, Inst))
  367. return false;
  368. // If we succeed in our optimization, fall through.
  369. LLVM_FALLTHROUGH;
  370. case ARCInstKind::RetainRV:
  371. case ARCInstKind::UnsafeClaimRV: {
  372. // Return true if this is a bundled retainRV/claimRV call, which is always
  373. // redundant with the attachedcall in the bundle, and is going to be erased
  374. // at the end of this pass. This avoids undoing objc-arc-expand and
  375. // replacing uses of the retainRV/claimRV call's argument with its result.
  376. if (BundledInsts->contains(Inst))
  377. return true;
  378. // If this isn't a bundled call, and the target doesn't need a special
  379. // inline-asm marker, we're done: return now, and undo objc-arc-expand.
  380. if (!RVInstMarker)
  381. return false;
  382. // The target needs a special inline-asm marker. Insert it.
  383. BasicBlock::iterator BBI = Inst->getIterator();
  384. BasicBlock *InstParent = Inst->getParent();
  385. // Step up to see if the call immediately precedes the RV call.
  386. // If it's an invoke, we have to cross a block boundary. And we have
  387. // to carefully dodge no-op instructions.
  388. do {
  389. if (BBI == InstParent->begin()) {
  390. BasicBlock *Pred = InstParent->getSinglePredecessor();
  391. if (!Pred)
  392. goto decline_rv_optimization;
  393. BBI = Pred->getTerminator()->getIterator();
  394. break;
  395. }
  396. --BBI;
  397. } while (IsNoopInstruction(&*BBI));
  398. if (GetRCIdentityRoot(&*BBI) == GetArgRCIdentityRoot(Inst)) {
  399. LLVM_DEBUG(dbgs() << "Adding inline asm marker for the return value "
  400. "optimization.\n");
  401. Changed = true;
  402. InlineAsm *IA =
  403. InlineAsm::get(FunctionType::get(Type::getVoidTy(Inst->getContext()),
  404. /*isVarArg=*/false),
  405. RVInstMarker->getString(),
  406. /*Constraints=*/"", /*hasSideEffects=*/true);
  407. objcarc::createCallInstWithColors(IA, None, "", Inst, BlockColors);
  408. }
  409. decline_rv_optimization:
  410. return false;
  411. }
  412. case ARCInstKind::InitWeak: {
  413. // objc_initWeak(p, null) => *p = null
  414. CallInst *CI = cast<CallInst>(Inst);
  415. if (IsNullOrUndef(CI->getArgOperand(1))) {
  416. Value *Null = ConstantPointerNull::get(cast<PointerType>(CI->getType()));
  417. Changed = true;
  418. new StoreInst(Null, CI->getArgOperand(0), CI);
  419. LLVM_DEBUG(dbgs() << "OBJCARCContract: Old = " << *CI << "\n"
  420. << " New = " << *Null << "\n");
  421. CI->replaceAllUsesWith(Null);
  422. CI->eraseFromParent();
  423. }
  424. return true;
  425. }
  426. case ARCInstKind::Release:
  427. // Try to form an objc store strong from our release. If we fail, there is
  428. // nothing further to do below, so continue.
  429. tryToContractReleaseIntoStoreStrong(Inst, Iter, BlockColors);
  430. return true;
  431. case ARCInstKind::User:
  432. // Be conservative if the function has any alloca instructions.
  433. // Technically we only care about escaping alloca instructions,
  434. // but this is sufficient to handle some interesting cases.
  435. if (isa<AllocaInst>(Inst))
  436. TailOkForStoreStrongs = false;
  437. return true;
  438. case ARCInstKind::IntrinsicUser:
  439. // Remove calls to @llvm.objc.clang.arc.use(...).
  440. Changed = true;
  441. Inst->eraseFromParent();
  442. return true;
  443. default:
  444. if (auto *CI = dyn_cast<CallInst>(Inst))
  445. if (CI->getIntrinsicID() == Intrinsic::objc_clang_arc_noop_use) {
  446. // Remove calls to @llvm.objc.clang.arc.noop.use(...).
  447. Changed = true;
  448. CI->eraseFromParent();
  449. }
  450. return true;
  451. }
  452. }
  453. //===----------------------------------------------------------------------===//
  454. // Top Level Driver
  455. //===----------------------------------------------------------------------===//
  456. bool ObjCARCContract::init(Module &M) {
  457. EP.init(&M);
  458. // Initialize RVInstMarker.
  459. RVInstMarker = getRVInstMarker(M);
  460. return false;
  461. }
  462. bool ObjCARCContract::run(Function &F, AAResults *A, DominatorTree *D) {
  463. if (!EnableARCOpts)
  464. return false;
  465. Changed = CFGChanged = false;
  466. AA = A;
  467. DT = D;
  468. PA.setAA(A);
  469. BundledRetainClaimRVs BRV(/*ContractPass=*/true);
  470. BundledInsts = &BRV;
  471. std::pair<bool, bool> R = BundledInsts->insertAfterInvokes(F, DT);
  472. Changed |= R.first;
  473. CFGChanged |= R.second;
  474. DenseMap<BasicBlock *, ColorVector> BlockColors;
  475. if (F.hasPersonalityFn() &&
  476. isScopedEHPersonality(classifyEHPersonality(F.getPersonalityFn())))
  477. BlockColors = colorEHFunclets(F);
  478. LLVM_DEBUG(llvm::dbgs() << "**** ObjCARC Contract ****\n");
  479. // Track whether it's ok to mark objc_storeStrong calls with the "tail"
  480. // keyword. Be conservative if the function has variadic arguments.
  481. // It seems that functions which "return twice" are also unsafe for the
  482. // "tail" argument, because they are setjmp, which could need to
  483. // return to an earlier stack state.
  484. bool TailOkForStoreStrongs =
  485. !F.isVarArg() && !F.callsFunctionThatReturnsTwice();
  486. // For ObjC library calls which return their argument, replace uses of the
  487. // argument with uses of the call return value, if it dominates the use. This
  488. // reduces register pressure.
  489. for (inst_iterator I = inst_begin(&F), E = inst_end(&F); I != E;) {
  490. Instruction *Inst = &*I++;
  491. LLVM_DEBUG(dbgs() << "Visiting: " << *Inst << "\n");
  492. if (auto *CI = dyn_cast<CallInst>(Inst))
  493. if (objcarc::hasAttachedCallOpBundle(CI)) {
  494. BundledInsts->insertRVCallWithColors(&*I, CI, BlockColors);
  495. --I;
  496. Changed = true;
  497. }
  498. // First try to peephole Inst. If there is nothing further we can do in
  499. // terms of undoing objc-arc-expand, process the next inst.
  500. if (tryToPeepholeInstruction(F, Inst, I, TailOkForStoreStrongs,
  501. BlockColors))
  502. continue;
  503. // Otherwise, try to undo objc-arc-expand.
  504. // Don't use GetArgRCIdentityRoot because we don't want to look through bitcasts
  505. // and such; to do the replacement, the argument must have type i8*.
  506. // Function for replacing uses of Arg dominated by Inst.
  507. auto ReplaceArgUses = [Inst, this](Value *Arg) {
  508. // If we're compiling bugpointed code, don't get in trouble.
  509. if (!isa<Instruction>(Arg) && !isa<Argument>(Arg))
  510. return;
  511. // Look through the uses of the pointer.
  512. for (Value::use_iterator UI = Arg->use_begin(), UE = Arg->use_end();
  513. UI != UE; ) {
  514. // Increment UI now, because we may unlink its element.
  515. Use &U = *UI++;
  516. unsigned OperandNo = U.getOperandNo();
  517. // If the call's return value dominates a use of the call's argument
  518. // value, rewrite the use to use the return value. We check for
  519. // reachability here because an unreachable call is considered to
  520. // trivially dominate itself, which would lead us to rewriting its
  521. // argument in terms of its return value, which would lead to
  522. // infinite loops in GetArgRCIdentityRoot.
  523. if (!DT->isReachableFromEntry(U) || !DT->dominates(Inst, U))
  524. continue;
  525. Changed = true;
  526. Instruction *Replacement = Inst;
  527. Type *UseTy = U.get()->getType();
  528. if (PHINode *PHI = dyn_cast<PHINode>(U.getUser())) {
  529. // For PHI nodes, insert the bitcast in the predecessor block.
  530. unsigned ValNo = PHINode::getIncomingValueNumForOperand(OperandNo);
  531. BasicBlock *IncomingBB = PHI->getIncomingBlock(ValNo);
  532. if (Replacement->getType() != UseTy) {
  533. // A catchswitch is both a pad and a terminator, meaning a basic
  534. // block with a catchswitch has no insertion point. Keep going up
  535. // the dominator tree until we find a non-catchswitch.
  536. BasicBlock *InsertBB = IncomingBB;
  537. while (isa<CatchSwitchInst>(InsertBB->getFirstNonPHI())) {
  538. InsertBB = DT->getNode(InsertBB)->getIDom()->getBlock();
  539. }
  540. assert(DT->dominates(Inst, &InsertBB->back()) &&
  541. "Invalid insertion point for bitcast");
  542. Replacement =
  543. new BitCastInst(Replacement, UseTy, "", &InsertBB->back());
  544. }
  545. // While we're here, rewrite all edges for this PHI, rather
  546. // than just one use at a time, to minimize the number of
  547. // bitcasts we emit.
  548. for (unsigned i = 0, e = PHI->getNumIncomingValues(); i != e; ++i)
  549. if (PHI->getIncomingBlock(i) == IncomingBB) {
  550. // Keep the UI iterator valid.
  551. if (UI != UE &&
  552. &PHI->getOperandUse(
  553. PHINode::getOperandNumForIncomingValue(i)) == &*UI)
  554. ++UI;
  555. PHI->setIncomingValue(i, Replacement);
  556. }
  557. } else {
  558. if (Replacement->getType() != UseTy)
  559. Replacement = new BitCastInst(Replacement, UseTy, "",
  560. cast<Instruction>(U.getUser()));
  561. U.set(Replacement);
  562. }
  563. }
  564. };
  565. Value *Arg = cast<CallInst>(Inst)->getArgOperand(0);
  566. Value *OrigArg = Arg;
  567. // TODO: Change this to a do-while.
  568. for (;;) {
  569. ReplaceArgUses(Arg);
  570. // If Arg is a no-op casted pointer, strip one level of casts and iterate.
  571. if (const BitCastInst *BI = dyn_cast<BitCastInst>(Arg))
  572. Arg = BI->getOperand(0);
  573. else if (isa<GEPOperator>(Arg) &&
  574. cast<GEPOperator>(Arg)->hasAllZeroIndices())
  575. Arg = cast<GEPOperator>(Arg)->getPointerOperand();
  576. else if (isa<GlobalAlias>(Arg) &&
  577. !cast<GlobalAlias>(Arg)->isInterposable())
  578. Arg = cast<GlobalAlias>(Arg)->getAliasee();
  579. else {
  580. // If Arg is a PHI node, get PHIs that are equivalent to it and replace
  581. // their uses.
  582. if (PHINode *PN = dyn_cast<PHINode>(Arg)) {
  583. SmallVector<Value *, 1> PHIList;
  584. getEquivalentPHIs(*PN, PHIList);
  585. for (Value *PHI : PHIList)
  586. ReplaceArgUses(PHI);
  587. }
  588. break;
  589. }
  590. }
  591. // Replace bitcast users of Arg that are dominated by Inst.
  592. SmallVector<BitCastInst *, 2> BitCastUsers;
  593. // Add all bitcast users of the function argument first.
  594. for (User *U : OrigArg->users())
  595. if (auto *BC = dyn_cast<BitCastInst>(U))
  596. BitCastUsers.push_back(BC);
  597. // Replace the bitcasts with the call return. Iterate until list is empty.
  598. while (!BitCastUsers.empty()) {
  599. auto *BC = BitCastUsers.pop_back_val();
  600. for (User *U : BC->users())
  601. if (auto *B = dyn_cast<BitCastInst>(U))
  602. BitCastUsers.push_back(B);
  603. ReplaceArgUses(BC);
  604. }
  605. }
  606. // If this function has no escaping allocas or suspicious vararg usage,
  607. // objc_storeStrong calls can be marked with the "tail" keyword.
  608. if (TailOkForStoreStrongs)
  609. for (CallInst *CI : StoreStrongCalls)
  610. CI->setTailCall();
  611. StoreStrongCalls.clear();
  612. return Changed;
  613. }
  614. //===----------------------------------------------------------------------===//
  615. // Misc Pass Manager
  616. //===----------------------------------------------------------------------===//
  617. char ObjCARCContractLegacyPass::ID = 0;
  618. INITIALIZE_PASS_BEGIN(ObjCARCContractLegacyPass, "objc-arc-contract",
  619. "ObjC ARC contraction", false, false)
  620. INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass)
  621. INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass)
  622. INITIALIZE_PASS_END(ObjCARCContractLegacyPass, "objc-arc-contract",
  623. "ObjC ARC contraction", false, false)
  624. void ObjCARCContractLegacyPass::getAnalysisUsage(AnalysisUsage &AU) const {
  625. AU.addRequired<AAResultsWrapperPass>();
  626. AU.addRequired<DominatorTreeWrapperPass>();
  627. }
  628. Pass *llvm::createObjCARCContractPass() {
  629. return new ObjCARCContractLegacyPass();
  630. }
  631. bool ObjCARCContractLegacyPass::doInitialization(Module &M) {
  632. return OCARCC.init(M);
  633. }
  634. bool ObjCARCContractLegacyPass::runOnFunction(Function &F) {
  635. auto *AA = &getAnalysis<AAResultsWrapperPass>().getAAResults();
  636. auto *DT = &getAnalysis<DominatorTreeWrapperPass>().getDomTree();
  637. return OCARCC.run(F, AA, DT);
  638. }
  639. PreservedAnalyses ObjCARCContractPass::run(Function &F,
  640. FunctionAnalysisManager &AM) {
  641. ObjCARCContract OCAC;
  642. OCAC.init(*F.getParent());
  643. bool Changed = OCAC.run(F, &AM.getResult<AAManager>(F),
  644. &AM.getResult<DominatorTreeAnalysis>(F));
  645. bool CFGChanged = OCAC.hasCFGChanged();
  646. if (Changed) {
  647. PreservedAnalyses PA;
  648. if (!CFGChanged)
  649. PA.preserveSet<CFGAnalyses>();
  650. return PA;
  651. }
  652. return PreservedAnalyses::all();
  653. }