Value.cpp 42 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231
  1. //===-- Value.cpp - Implement the Value class -----------------------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file implements the Value, ValueHandle, and User classes.
  10. //
  11. //===----------------------------------------------------------------------===//
  12. #include "llvm/IR/Value.h"
  13. #include "LLVMContextImpl.h"
  14. #include "llvm/ADT/DenseMap.h"
  15. #include "llvm/ADT/SmallString.h"
  16. #include "llvm/IR/Constant.h"
  17. #include "llvm/IR/Constants.h"
  18. #include "llvm/IR/DataLayout.h"
  19. #include "llvm/IR/DebugInfo.h"
  20. #include "llvm/IR/DerivedTypes.h"
  21. #include "llvm/IR/DerivedUser.h"
  22. #include "llvm/IR/InstrTypes.h"
  23. #include "llvm/IR/Instructions.h"
  24. #include "llvm/IR/IntrinsicInst.h"
  25. #include "llvm/IR/Module.h"
  26. #include "llvm/IR/Operator.h"
  27. #include "llvm/IR/TypedPointerType.h"
  28. #include "llvm/IR/ValueHandle.h"
  29. #include "llvm/IR/ValueSymbolTable.h"
  30. #include "llvm/Support/CommandLine.h"
  31. #include "llvm/Support/ErrorHandling.h"
  32. #include "llvm/Support/raw_ostream.h"
  33. #include <algorithm>
  34. using namespace llvm;
  35. static cl::opt<unsigned> UseDerefAtPointSemantics(
  36. "use-dereferenceable-at-point-semantics", cl::Hidden, cl::init(false),
  37. cl::desc("Deref attributes and metadata infer facts at definition only"));
  38. //===----------------------------------------------------------------------===//
  39. // Value Class
  40. //===----------------------------------------------------------------------===//
  41. static inline Type *checkType(Type *Ty) {
  42. assert(Ty && "Value defined with a null type: Error!");
  43. assert(!isa<TypedPointerType>(Ty->getScalarType()) &&
  44. "Cannot have values with typed pointer types");
  45. return Ty;
  46. }
  47. Value::Value(Type *ty, unsigned scid)
  48. : VTy(checkType(ty)), UseList(nullptr), SubclassID(scid), HasValueHandle(0),
  49. SubclassOptionalData(0), SubclassData(0), NumUserOperands(0),
  50. IsUsedByMD(false), HasName(false), HasMetadata(false) {
  51. static_assert(ConstantFirstVal == 0, "!(SubclassID < ConstantFirstVal)");
  52. // FIXME: Why isn't this in the subclass gunk??
  53. // Note, we cannot call isa<CallInst> before the CallInst has been
  54. // constructed.
  55. unsigned OpCode = 0;
  56. if (SubclassID >= InstructionVal)
  57. OpCode = SubclassID - InstructionVal;
  58. if (OpCode == Instruction::Call || OpCode == Instruction::Invoke ||
  59. OpCode == Instruction::CallBr)
  60. assert((VTy->isFirstClassType() || VTy->isVoidTy() || VTy->isStructTy()) &&
  61. "invalid CallBase type!");
  62. else if (SubclassID != BasicBlockVal &&
  63. (/*SubclassID < ConstantFirstVal ||*/ SubclassID > ConstantLastVal))
  64. assert((VTy->isFirstClassType() || VTy->isVoidTy()) &&
  65. "Cannot create non-first-class values except for constants!");
  66. static_assert(sizeof(Value) == 2 * sizeof(void *) + 2 * sizeof(unsigned),
  67. "Value too big");
  68. }
  69. Value::~Value() {
  70. // Notify all ValueHandles (if present) that this value is going away.
  71. if (HasValueHandle)
  72. ValueHandleBase::ValueIsDeleted(this);
  73. if (isUsedByMetadata())
  74. ValueAsMetadata::handleDeletion(this);
  75. // Remove associated metadata from context.
  76. if (HasMetadata)
  77. clearMetadata();
  78. #ifndef NDEBUG // Only in -g mode...
  79. // Check to make sure that there are no uses of this value that are still
  80. // around when the value is destroyed. If there are, then we have a dangling
  81. // reference and something is wrong. This code is here to print out where
  82. // the value is still being referenced.
  83. //
  84. // Note that use_empty() cannot be called here, as it eventually downcasts
  85. // 'this' to GlobalValue (derived class of Value), but GlobalValue has already
  86. // been destructed, so accessing it is UB.
  87. //
  88. if (!materialized_use_empty()) {
  89. dbgs() << "While deleting: " << *VTy << " %" << getName() << "\n";
  90. for (auto *U : users())
  91. dbgs() << "Use still stuck around after Def is destroyed:" << *U << "\n";
  92. }
  93. #endif
  94. assert(materialized_use_empty() && "Uses remain when a value is destroyed!");
  95. // If this value is named, destroy the name. This should not be in a symtab
  96. // at this point.
  97. destroyValueName();
  98. }
  99. void Value::deleteValue() {
  100. switch (getValueID()) {
  101. #define HANDLE_VALUE(Name) \
  102. case Value::Name##Val: \
  103. delete static_cast<Name *>(this); \
  104. break;
  105. #define HANDLE_MEMORY_VALUE(Name) \
  106. case Value::Name##Val: \
  107. static_cast<DerivedUser *>(this)->DeleteValue( \
  108. static_cast<DerivedUser *>(this)); \
  109. break;
  110. #define HANDLE_CONSTANT(Name) \
  111. case Value::Name##Val: \
  112. llvm_unreachable("constants should be destroyed with destroyConstant"); \
  113. break;
  114. #define HANDLE_INSTRUCTION(Name) /* nothing */
  115. #include "llvm/IR/Value.def"
  116. #define HANDLE_INST(N, OPC, CLASS) \
  117. case Value::InstructionVal + Instruction::OPC: \
  118. delete static_cast<CLASS *>(this); \
  119. break;
  120. #define HANDLE_USER_INST(N, OPC, CLASS)
  121. #include "llvm/IR/Instruction.def"
  122. default:
  123. llvm_unreachable("attempting to delete unknown value kind");
  124. }
  125. }
  126. void Value::destroyValueName() {
  127. ValueName *Name = getValueName();
  128. if (Name) {
  129. MallocAllocator Allocator;
  130. Name->Destroy(Allocator);
  131. }
  132. setValueName(nullptr);
  133. }
  134. bool Value::hasNUses(unsigned N) const {
  135. return hasNItems(use_begin(), use_end(), N);
  136. }
  137. bool Value::hasNUsesOrMore(unsigned N) const {
  138. return hasNItemsOrMore(use_begin(), use_end(), N);
  139. }
  140. bool Value::hasOneUser() const {
  141. if (use_empty())
  142. return false;
  143. if (hasOneUse())
  144. return true;
  145. return std::equal(++user_begin(), user_end(), user_begin());
  146. }
  147. static bool isUnDroppableUser(const User *U) { return !U->isDroppable(); }
  148. Use *Value::getSingleUndroppableUse() {
  149. Use *Result = nullptr;
  150. for (Use &U : uses()) {
  151. if (!U.getUser()->isDroppable()) {
  152. if (Result)
  153. return nullptr;
  154. Result = &U;
  155. }
  156. }
  157. return Result;
  158. }
  159. User *Value::getUniqueUndroppableUser() {
  160. User *Result = nullptr;
  161. for (auto *U : users()) {
  162. if (!U->isDroppable()) {
  163. if (Result && Result != U)
  164. return nullptr;
  165. Result = U;
  166. }
  167. }
  168. return Result;
  169. }
  170. bool Value::hasNUndroppableUses(unsigned int N) const {
  171. return hasNItems(user_begin(), user_end(), N, isUnDroppableUser);
  172. }
  173. bool Value::hasNUndroppableUsesOrMore(unsigned int N) const {
  174. return hasNItemsOrMore(user_begin(), user_end(), N, isUnDroppableUser);
  175. }
  176. void Value::dropDroppableUses(
  177. llvm::function_ref<bool(const Use *)> ShouldDrop) {
  178. SmallVector<Use *, 8> ToBeEdited;
  179. for (Use &U : uses())
  180. if (U.getUser()->isDroppable() && ShouldDrop(&U))
  181. ToBeEdited.push_back(&U);
  182. for (Use *U : ToBeEdited)
  183. dropDroppableUse(*U);
  184. }
  185. void Value::dropDroppableUsesIn(User &Usr) {
  186. assert(Usr.isDroppable() && "Expected a droppable user!");
  187. for (Use &UsrOp : Usr.operands()) {
  188. if (UsrOp.get() == this)
  189. dropDroppableUse(UsrOp);
  190. }
  191. }
  192. void Value::dropDroppableUse(Use &U) {
  193. U.removeFromList();
  194. if (auto *Assume = dyn_cast<AssumeInst>(U.getUser())) {
  195. unsigned OpNo = U.getOperandNo();
  196. if (OpNo == 0)
  197. U.set(ConstantInt::getTrue(Assume->getContext()));
  198. else {
  199. U.set(UndefValue::get(U.get()->getType()));
  200. CallInst::BundleOpInfo &BOI = Assume->getBundleOpInfoForOperand(OpNo);
  201. BOI.Tag = Assume->getContext().pImpl->getOrInsertBundleTag("ignore");
  202. }
  203. return;
  204. }
  205. llvm_unreachable("unkown droppable use");
  206. }
  207. bool Value::isUsedInBasicBlock(const BasicBlock *BB) const {
  208. // This can be computed either by scanning the instructions in BB, or by
  209. // scanning the use list of this Value. Both lists can be very long, but
  210. // usually one is quite short.
  211. //
  212. // Scan both lists simultaneously until one is exhausted. This limits the
  213. // search to the shorter list.
  214. BasicBlock::const_iterator BI = BB->begin(), BE = BB->end();
  215. const_user_iterator UI = user_begin(), UE = user_end();
  216. for (; BI != BE && UI != UE; ++BI, ++UI) {
  217. // Scan basic block: Check if this Value is used by the instruction at BI.
  218. if (is_contained(BI->operands(), this))
  219. return true;
  220. // Scan use list: Check if the use at UI is in BB.
  221. const auto *User = dyn_cast<Instruction>(*UI);
  222. if (User && User->getParent() == BB)
  223. return true;
  224. }
  225. return false;
  226. }
  227. unsigned Value::getNumUses() const {
  228. return (unsigned)std::distance(use_begin(), use_end());
  229. }
  230. static bool getSymTab(Value *V, ValueSymbolTable *&ST) {
  231. ST = nullptr;
  232. if (Instruction *I = dyn_cast<Instruction>(V)) {
  233. if (BasicBlock *P = I->getParent())
  234. if (Function *PP = P->getParent())
  235. ST = PP->getValueSymbolTable();
  236. } else if (BasicBlock *BB = dyn_cast<BasicBlock>(V)) {
  237. if (Function *P = BB->getParent())
  238. ST = P->getValueSymbolTable();
  239. } else if (GlobalValue *GV = dyn_cast<GlobalValue>(V)) {
  240. if (Module *P = GV->getParent())
  241. ST = &P->getValueSymbolTable();
  242. } else if (Argument *A = dyn_cast<Argument>(V)) {
  243. if (Function *P = A->getParent())
  244. ST = P->getValueSymbolTable();
  245. } else {
  246. assert(isa<Constant>(V) && "Unknown value type!");
  247. return true; // no name is setable for this.
  248. }
  249. return false;
  250. }
  251. ValueName *Value::getValueName() const {
  252. if (!HasName) return nullptr;
  253. LLVMContext &Ctx = getContext();
  254. auto I = Ctx.pImpl->ValueNames.find(this);
  255. assert(I != Ctx.pImpl->ValueNames.end() &&
  256. "No name entry found!");
  257. return I->second;
  258. }
  259. void Value::setValueName(ValueName *VN) {
  260. LLVMContext &Ctx = getContext();
  261. assert(HasName == Ctx.pImpl->ValueNames.count(this) &&
  262. "HasName bit out of sync!");
  263. if (!VN) {
  264. if (HasName)
  265. Ctx.pImpl->ValueNames.erase(this);
  266. HasName = false;
  267. return;
  268. }
  269. HasName = true;
  270. Ctx.pImpl->ValueNames[this] = VN;
  271. }
  272. StringRef Value::getName() const {
  273. // Make sure the empty string is still a C string. For historical reasons,
  274. // some clients want to call .data() on the result and expect it to be null
  275. // terminated.
  276. if (!hasName())
  277. return StringRef("", 0);
  278. return getValueName()->getKey();
  279. }
  280. void Value::setNameImpl(const Twine &NewName) {
  281. // Fast-path: LLVMContext can be set to strip out non-GlobalValue names
  282. if (getContext().shouldDiscardValueNames() && !isa<GlobalValue>(this))
  283. return;
  284. // Fast path for common IRBuilder case of setName("") when there is no name.
  285. if (NewName.isTriviallyEmpty() && !hasName())
  286. return;
  287. SmallString<256> NameData;
  288. StringRef NameRef = NewName.toStringRef(NameData);
  289. assert(NameRef.find_first_of(0) == StringRef::npos &&
  290. "Null bytes are not allowed in names");
  291. // Name isn't changing?
  292. if (getName() == NameRef)
  293. return;
  294. assert(!getType()->isVoidTy() && "Cannot assign a name to void values!");
  295. // Get the symbol table to update for this object.
  296. ValueSymbolTable *ST;
  297. if (getSymTab(this, ST))
  298. return; // Cannot set a name on this value (e.g. constant).
  299. if (!ST) { // No symbol table to update? Just do the change.
  300. if (NameRef.empty()) {
  301. // Free the name for this value.
  302. destroyValueName();
  303. return;
  304. }
  305. // NOTE: Could optimize for the case the name is shrinking to not deallocate
  306. // then reallocated.
  307. destroyValueName();
  308. // Create the new name.
  309. MallocAllocator Allocator;
  310. setValueName(ValueName::create(NameRef, Allocator));
  311. getValueName()->setValue(this);
  312. return;
  313. }
  314. // NOTE: Could optimize for the case the name is shrinking to not deallocate
  315. // then reallocated.
  316. if (hasName()) {
  317. // Remove old name.
  318. ST->removeValueName(getValueName());
  319. destroyValueName();
  320. if (NameRef.empty())
  321. return;
  322. }
  323. // Name is changing to something new.
  324. setValueName(ST->createValueName(NameRef, this));
  325. }
  326. void Value::setName(const Twine &NewName) {
  327. setNameImpl(NewName);
  328. if (Function *F = dyn_cast<Function>(this))
  329. F->recalculateIntrinsicID();
  330. }
  331. void Value::takeName(Value *V) {
  332. assert(V != this && "Illegal call to this->takeName(this)!");
  333. ValueSymbolTable *ST = nullptr;
  334. // If this value has a name, drop it.
  335. if (hasName()) {
  336. // Get the symtab this is in.
  337. if (getSymTab(this, ST)) {
  338. // We can't set a name on this value, but we need to clear V's name if
  339. // it has one.
  340. if (V->hasName()) V->setName("");
  341. return; // Cannot set a name on this value (e.g. constant).
  342. }
  343. // Remove old name.
  344. if (ST)
  345. ST->removeValueName(getValueName());
  346. destroyValueName();
  347. }
  348. // Now we know that this has no name.
  349. // If V has no name either, we're done.
  350. if (!V->hasName()) return;
  351. // Get this's symtab if we didn't before.
  352. if (!ST) {
  353. if (getSymTab(this, ST)) {
  354. // Clear V's name.
  355. V->setName("");
  356. return; // Cannot set a name on this value (e.g. constant).
  357. }
  358. }
  359. // Get V's ST, this should always succeed, because V has a name.
  360. ValueSymbolTable *VST;
  361. bool Failure = getSymTab(V, VST);
  362. assert(!Failure && "V has a name, so it should have a ST!"); (void)Failure;
  363. // If these values are both in the same symtab, we can do this very fast.
  364. // This works even if both values have no symtab yet.
  365. if (ST == VST) {
  366. // Take the name!
  367. setValueName(V->getValueName());
  368. V->setValueName(nullptr);
  369. getValueName()->setValue(this);
  370. return;
  371. }
  372. // Otherwise, things are slightly more complex. Remove V's name from VST and
  373. // then reinsert it into ST.
  374. if (VST)
  375. VST->removeValueName(V->getValueName());
  376. setValueName(V->getValueName());
  377. V->setValueName(nullptr);
  378. getValueName()->setValue(this);
  379. if (ST)
  380. ST->reinsertValue(this);
  381. }
  382. #ifndef NDEBUG
  383. std::string Value::getNameOrAsOperand() const {
  384. if (!getName().empty())
  385. return std::string(getName());
  386. std::string BBName;
  387. raw_string_ostream OS(BBName);
  388. printAsOperand(OS, false);
  389. return OS.str();
  390. }
  391. #endif
  392. void Value::assertModuleIsMaterializedImpl() const {
  393. #ifndef NDEBUG
  394. const GlobalValue *GV = dyn_cast<GlobalValue>(this);
  395. if (!GV)
  396. return;
  397. const Module *M = GV->getParent();
  398. if (!M)
  399. return;
  400. assert(M->isMaterialized());
  401. #endif
  402. }
  403. #ifndef NDEBUG
  404. static bool contains(SmallPtrSetImpl<ConstantExpr *> &Cache, ConstantExpr *Expr,
  405. Constant *C) {
  406. if (!Cache.insert(Expr).second)
  407. return false;
  408. for (auto &O : Expr->operands()) {
  409. if (O == C)
  410. return true;
  411. auto *CE = dyn_cast<ConstantExpr>(O);
  412. if (!CE)
  413. continue;
  414. if (contains(Cache, CE, C))
  415. return true;
  416. }
  417. return false;
  418. }
  419. static bool contains(Value *Expr, Value *V) {
  420. if (Expr == V)
  421. return true;
  422. auto *C = dyn_cast<Constant>(V);
  423. if (!C)
  424. return false;
  425. auto *CE = dyn_cast<ConstantExpr>(Expr);
  426. if (!CE)
  427. return false;
  428. SmallPtrSet<ConstantExpr *, 4> Cache;
  429. return contains(Cache, CE, C);
  430. }
  431. #endif // NDEBUG
  432. void Value::doRAUW(Value *New, ReplaceMetadataUses ReplaceMetaUses) {
  433. assert(New && "Value::replaceAllUsesWith(<null>) is invalid!");
  434. assert(!contains(New, this) &&
  435. "this->replaceAllUsesWith(expr(this)) is NOT valid!");
  436. assert(New->getType() == getType() &&
  437. "replaceAllUses of value with new value of different type!");
  438. // Notify all ValueHandles (if present) that this value is going away.
  439. if (HasValueHandle)
  440. ValueHandleBase::ValueIsRAUWd(this, New);
  441. if (ReplaceMetaUses == ReplaceMetadataUses::Yes && isUsedByMetadata())
  442. ValueAsMetadata::handleRAUW(this, New);
  443. while (!materialized_use_empty()) {
  444. Use &U = *UseList;
  445. // Must handle Constants specially, we cannot call replaceUsesOfWith on a
  446. // constant because they are uniqued.
  447. if (auto *C = dyn_cast<Constant>(U.getUser())) {
  448. if (!isa<GlobalValue>(C)) {
  449. C->handleOperandChange(this, New);
  450. continue;
  451. }
  452. }
  453. U.set(New);
  454. }
  455. if (BasicBlock *BB = dyn_cast<BasicBlock>(this))
  456. BB->replaceSuccessorsPhiUsesWith(cast<BasicBlock>(New));
  457. }
  458. void Value::replaceAllUsesWith(Value *New) {
  459. doRAUW(New, ReplaceMetadataUses::Yes);
  460. }
  461. void Value::replaceNonMetadataUsesWith(Value *New) {
  462. doRAUW(New, ReplaceMetadataUses::No);
  463. }
  464. void Value::replaceUsesWithIf(Value *New,
  465. llvm::function_ref<bool(Use &U)> ShouldReplace) {
  466. assert(New && "Value::replaceUsesWithIf(<null>) is invalid!");
  467. assert(New->getType() == getType() &&
  468. "replaceUses of value with new value of different type!");
  469. SmallVector<TrackingVH<Constant>, 8> Consts;
  470. SmallPtrSet<Constant *, 8> Visited;
  471. for (Use &U : llvm::make_early_inc_range(uses())) {
  472. if (!ShouldReplace(U))
  473. continue;
  474. // Must handle Constants specially, we cannot call replaceUsesOfWith on a
  475. // constant because they are uniqued.
  476. if (auto *C = dyn_cast<Constant>(U.getUser())) {
  477. if (!isa<GlobalValue>(C)) {
  478. if (Visited.insert(C).second)
  479. Consts.push_back(TrackingVH<Constant>(C));
  480. continue;
  481. }
  482. }
  483. U.set(New);
  484. }
  485. while (!Consts.empty()) {
  486. // FIXME: handleOperandChange() updates all the uses in a given Constant,
  487. // not just the one passed to ShouldReplace
  488. Consts.pop_back_val()->handleOperandChange(this, New);
  489. }
  490. }
  491. /// Replace llvm.dbg.* uses of MetadataAsValue(ValueAsMetadata(V)) outside BB
  492. /// with New.
  493. static void replaceDbgUsesOutsideBlock(Value *V, Value *New, BasicBlock *BB) {
  494. SmallVector<DbgVariableIntrinsic *> DbgUsers;
  495. findDbgUsers(DbgUsers, V);
  496. for (auto *DVI : DbgUsers) {
  497. if (DVI->getParent() != BB)
  498. DVI->replaceVariableLocationOp(V, New);
  499. }
  500. }
  501. // Like replaceAllUsesWith except it does not handle constants or basic blocks.
  502. // This routine leaves uses within BB.
  503. void Value::replaceUsesOutsideBlock(Value *New, BasicBlock *BB) {
  504. assert(New && "Value::replaceUsesOutsideBlock(<null>, BB) is invalid!");
  505. assert(!contains(New, this) &&
  506. "this->replaceUsesOutsideBlock(expr(this), BB) is NOT valid!");
  507. assert(New->getType() == getType() &&
  508. "replaceUses of value with new value of different type!");
  509. assert(BB && "Basic block that may contain a use of 'New' must be defined\n");
  510. replaceDbgUsesOutsideBlock(this, New, BB);
  511. replaceUsesWithIf(New, [BB](Use &U) {
  512. auto *I = dyn_cast<Instruction>(U.getUser());
  513. // Don't replace if it's an instruction in the BB basic block.
  514. return !I || I->getParent() != BB;
  515. });
  516. }
  517. namespace {
  518. // Various metrics for how much to strip off of pointers.
  519. enum PointerStripKind {
  520. PSK_ZeroIndices,
  521. PSK_ZeroIndicesAndAliases,
  522. PSK_ZeroIndicesSameRepresentation,
  523. PSK_ForAliasAnalysis,
  524. PSK_InBoundsConstantIndices,
  525. PSK_InBounds
  526. };
  527. template <PointerStripKind StripKind> static void NoopCallback(const Value *) {}
  528. template <PointerStripKind StripKind>
  529. static const Value *stripPointerCastsAndOffsets(
  530. const Value *V,
  531. function_ref<void(const Value *)> Func = NoopCallback<StripKind>) {
  532. if (!V->getType()->isPointerTy())
  533. return V;
  534. // Even though we don't look through PHI nodes, we could be called on an
  535. // instruction in an unreachable block, which may be on a cycle.
  536. SmallPtrSet<const Value *, 4> Visited;
  537. Visited.insert(V);
  538. do {
  539. Func(V);
  540. if (auto *GEP = dyn_cast<GEPOperator>(V)) {
  541. switch (StripKind) {
  542. case PSK_ZeroIndices:
  543. case PSK_ZeroIndicesAndAliases:
  544. case PSK_ZeroIndicesSameRepresentation:
  545. case PSK_ForAliasAnalysis:
  546. if (!GEP->hasAllZeroIndices())
  547. return V;
  548. break;
  549. case PSK_InBoundsConstantIndices:
  550. if (!GEP->hasAllConstantIndices())
  551. return V;
  552. [[fallthrough]];
  553. case PSK_InBounds:
  554. if (!GEP->isInBounds())
  555. return V;
  556. break;
  557. }
  558. V = GEP->getPointerOperand();
  559. } else if (Operator::getOpcode(V) == Instruction::BitCast) {
  560. V = cast<Operator>(V)->getOperand(0);
  561. if (!V->getType()->isPointerTy())
  562. return V;
  563. } else if (StripKind != PSK_ZeroIndicesSameRepresentation &&
  564. Operator::getOpcode(V) == Instruction::AddrSpaceCast) {
  565. // TODO: If we know an address space cast will not change the
  566. // representation we could look through it here as well.
  567. V = cast<Operator>(V)->getOperand(0);
  568. } else if (StripKind == PSK_ZeroIndicesAndAliases && isa<GlobalAlias>(V)) {
  569. V = cast<GlobalAlias>(V)->getAliasee();
  570. } else if (StripKind == PSK_ForAliasAnalysis && isa<PHINode>(V) &&
  571. cast<PHINode>(V)->getNumIncomingValues() == 1) {
  572. V = cast<PHINode>(V)->getIncomingValue(0);
  573. } else {
  574. if (const auto *Call = dyn_cast<CallBase>(V)) {
  575. if (const Value *RV = Call->getReturnedArgOperand()) {
  576. V = RV;
  577. continue;
  578. }
  579. // The result of launder.invariant.group must alias it's argument,
  580. // but it can't be marked with returned attribute, that's why it needs
  581. // special case.
  582. if (StripKind == PSK_ForAliasAnalysis &&
  583. (Call->getIntrinsicID() == Intrinsic::launder_invariant_group ||
  584. Call->getIntrinsicID() == Intrinsic::strip_invariant_group)) {
  585. V = Call->getArgOperand(0);
  586. continue;
  587. }
  588. }
  589. return V;
  590. }
  591. assert(V->getType()->isPointerTy() && "Unexpected operand type!");
  592. } while (Visited.insert(V).second);
  593. return V;
  594. }
  595. } // end anonymous namespace
  596. const Value *Value::stripPointerCasts() const {
  597. return stripPointerCastsAndOffsets<PSK_ZeroIndices>(this);
  598. }
  599. const Value *Value::stripPointerCastsAndAliases() const {
  600. return stripPointerCastsAndOffsets<PSK_ZeroIndicesAndAliases>(this);
  601. }
  602. const Value *Value::stripPointerCastsSameRepresentation() const {
  603. return stripPointerCastsAndOffsets<PSK_ZeroIndicesSameRepresentation>(this);
  604. }
  605. const Value *Value::stripInBoundsConstantOffsets() const {
  606. return stripPointerCastsAndOffsets<PSK_InBoundsConstantIndices>(this);
  607. }
  608. const Value *Value::stripPointerCastsForAliasAnalysis() const {
  609. return stripPointerCastsAndOffsets<PSK_ForAliasAnalysis>(this);
  610. }
  611. const Value *Value::stripAndAccumulateConstantOffsets(
  612. const DataLayout &DL, APInt &Offset, bool AllowNonInbounds,
  613. bool AllowInvariantGroup,
  614. function_ref<bool(Value &, APInt &)> ExternalAnalysis) const {
  615. if (!getType()->isPtrOrPtrVectorTy())
  616. return this;
  617. unsigned BitWidth = Offset.getBitWidth();
  618. assert(BitWidth == DL.getIndexTypeSizeInBits(getType()) &&
  619. "The offset bit width does not match the DL specification.");
  620. // Even though we don't look through PHI nodes, we could be called on an
  621. // instruction in an unreachable block, which may be on a cycle.
  622. SmallPtrSet<const Value *, 4> Visited;
  623. Visited.insert(this);
  624. const Value *V = this;
  625. do {
  626. if (auto *GEP = dyn_cast<GEPOperator>(V)) {
  627. // If in-bounds was requested, we do not strip non-in-bounds GEPs.
  628. if (!AllowNonInbounds && !GEP->isInBounds())
  629. return V;
  630. // If one of the values we have visited is an addrspacecast, then
  631. // the pointer type of this GEP may be different from the type
  632. // of the Ptr parameter which was passed to this function. This
  633. // means when we construct GEPOffset, we need to use the size
  634. // of GEP's pointer type rather than the size of the original
  635. // pointer type.
  636. APInt GEPOffset(DL.getIndexTypeSizeInBits(V->getType()), 0);
  637. if (!GEP->accumulateConstantOffset(DL, GEPOffset, ExternalAnalysis))
  638. return V;
  639. // Stop traversal if the pointer offset wouldn't fit in the bit-width
  640. // provided by the Offset argument. This can happen due to AddrSpaceCast
  641. // stripping.
  642. if (GEPOffset.getMinSignedBits() > BitWidth)
  643. return V;
  644. // External Analysis can return a result higher/lower than the value
  645. // represents. We need to detect overflow/underflow.
  646. APInt GEPOffsetST = GEPOffset.sextOrTrunc(BitWidth);
  647. if (!ExternalAnalysis) {
  648. Offset += GEPOffsetST;
  649. } else {
  650. bool Overflow = false;
  651. APInt OldOffset = Offset;
  652. Offset = Offset.sadd_ov(GEPOffsetST, Overflow);
  653. if (Overflow) {
  654. Offset = OldOffset;
  655. return V;
  656. }
  657. }
  658. V = GEP->getPointerOperand();
  659. } else if (Operator::getOpcode(V) == Instruction::BitCast ||
  660. Operator::getOpcode(V) == Instruction::AddrSpaceCast) {
  661. V = cast<Operator>(V)->getOperand(0);
  662. } else if (auto *GA = dyn_cast<GlobalAlias>(V)) {
  663. if (!GA->isInterposable())
  664. V = GA->getAliasee();
  665. } else if (const auto *Call = dyn_cast<CallBase>(V)) {
  666. if (const Value *RV = Call->getReturnedArgOperand())
  667. V = RV;
  668. if (AllowInvariantGroup && Call->isLaunderOrStripInvariantGroup())
  669. V = Call->getArgOperand(0);
  670. }
  671. assert(V->getType()->isPtrOrPtrVectorTy() && "Unexpected operand type!");
  672. } while (Visited.insert(V).second);
  673. return V;
  674. }
  675. const Value *
  676. Value::stripInBoundsOffsets(function_ref<void(const Value *)> Func) const {
  677. return stripPointerCastsAndOffsets<PSK_InBounds>(this, Func);
  678. }
  679. bool Value::canBeFreed() const {
  680. assert(getType()->isPointerTy());
  681. // Cases that can simply never be deallocated
  682. // *) Constants aren't allocated per se, thus not deallocated either.
  683. if (isa<Constant>(this))
  684. return false;
  685. // Handle byval/byref/sret/inalloca/preallocated arguments. The storage
  686. // lifetime is guaranteed to be longer than the callee's lifetime.
  687. if (auto *A = dyn_cast<Argument>(this)) {
  688. if (A->hasPointeeInMemoryValueAttr())
  689. return false;
  690. // A pointer to an object in a function which neither frees, nor can arrange
  691. // for another thread to free on its behalf, can not be freed in the scope
  692. // of the function. Note that this logic is restricted to memory
  693. // allocations in existance before the call; a nofree function *is* allowed
  694. // to free memory it allocated.
  695. const Function *F = A->getParent();
  696. if (F->doesNotFreeMemory() && F->hasNoSync())
  697. return false;
  698. }
  699. const Function *F = nullptr;
  700. if (auto *I = dyn_cast<Instruction>(this))
  701. F = I->getFunction();
  702. if (auto *A = dyn_cast<Argument>(this))
  703. F = A->getParent();
  704. if (!F)
  705. return true;
  706. // With garbage collection, deallocation typically occurs solely at or after
  707. // safepoints. If we're compiling for a collector which uses the
  708. // gc.statepoint infrastructure, safepoints aren't explicitly present
  709. // in the IR until after lowering from abstract to physical machine model.
  710. // The collector could chose to mix explicit deallocation and gc'd objects
  711. // which is why we need the explicit opt in on a per collector basis.
  712. if (!F->hasGC())
  713. return true;
  714. const auto &GCName = F->getGC();
  715. if (GCName == "statepoint-example") {
  716. auto *PT = cast<PointerType>(this->getType());
  717. if (PT->getAddressSpace() != 1)
  718. // For the sake of this example GC, we arbitrarily pick addrspace(1) as
  719. // our GC managed heap. This must match the same check in
  720. // RewriteStatepointsForGC (and probably needs better factored.)
  721. return true;
  722. // It is cheaper to scan for a declaration than to scan for a use in this
  723. // function. Note that gc.statepoint is a type overloaded function so the
  724. // usual trick of requesting declaration of the intrinsic from the module
  725. // doesn't work.
  726. for (auto &Fn : *F->getParent())
  727. if (Fn.getIntrinsicID() == Intrinsic::experimental_gc_statepoint)
  728. return true;
  729. return false;
  730. }
  731. return true;
  732. }
  733. uint64_t Value::getPointerDereferenceableBytes(const DataLayout &DL,
  734. bool &CanBeNull,
  735. bool &CanBeFreed) const {
  736. assert(getType()->isPointerTy() && "must be pointer");
  737. uint64_t DerefBytes = 0;
  738. CanBeNull = false;
  739. CanBeFreed = UseDerefAtPointSemantics && canBeFreed();
  740. if (const Argument *A = dyn_cast<Argument>(this)) {
  741. DerefBytes = A->getDereferenceableBytes();
  742. if (DerefBytes == 0) {
  743. // Handle byval/byref/inalloca/preallocated arguments
  744. if (Type *ArgMemTy = A->getPointeeInMemoryValueType()) {
  745. if (ArgMemTy->isSized()) {
  746. // FIXME: Why isn't this the type alloc size?
  747. DerefBytes = DL.getTypeStoreSize(ArgMemTy).getKnownMinValue();
  748. }
  749. }
  750. }
  751. if (DerefBytes == 0) {
  752. DerefBytes = A->getDereferenceableOrNullBytes();
  753. CanBeNull = true;
  754. }
  755. } else if (const auto *Call = dyn_cast<CallBase>(this)) {
  756. DerefBytes = Call->getRetDereferenceableBytes();
  757. if (DerefBytes == 0) {
  758. DerefBytes = Call->getRetDereferenceableOrNullBytes();
  759. CanBeNull = true;
  760. }
  761. } else if (const LoadInst *LI = dyn_cast<LoadInst>(this)) {
  762. if (MDNode *MD = LI->getMetadata(LLVMContext::MD_dereferenceable)) {
  763. ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0));
  764. DerefBytes = CI->getLimitedValue();
  765. }
  766. if (DerefBytes == 0) {
  767. if (MDNode *MD =
  768. LI->getMetadata(LLVMContext::MD_dereferenceable_or_null)) {
  769. ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0));
  770. DerefBytes = CI->getLimitedValue();
  771. }
  772. CanBeNull = true;
  773. }
  774. } else if (auto *IP = dyn_cast<IntToPtrInst>(this)) {
  775. if (MDNode *MD = IP->getMetadata(LLVMContext::MD_dereferenceable)) {
  776. ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0));
  777. DerefBytes = CI->getLimitedValue();
  778. }
  779. if (DerefBytes == 0) {
  780. if (MDNode *MD =
  781. IP->getMetadata(LLVMContext::MD_dereferenceable_or_null)) {
  782. ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0));
  783. DerefBytes = CI->getLimitedValue();
  784. }
  785. CanBeNull = true;
  786. }
  787. } else if (auto *AI = dyn_cast<AllocaInst>(this)) {
  788. if (!AI->isArrayAllocation()) {
  789. DerefBytes =
  790. DL.getTypeStoreSize(AI->getAllocatedType()).getKnownMinValue();
  791. CanBeNull = false;
  792. CanBeFreed = false;
  793. }
  794. } else if (auto *GV = dyn_cast<GlobalVariable>(this)) {
  795. if (GV->getValueType()->isSized() && !GV->hasExternalWeakLinkage()) {
  796. // TODO: Don't outright reject hasExternalWeakLinkage but set the
  797. // CanBeNull flag.
  798. DerefBytes = DL.getTypeStoreSize(GV->getValueType()).getFixedValue();
  799. CanBeNull = false;
  800. CanBeFreed = false;
  801. }
  802. }
  803. return DerefBytes;
  804. }
  805. Align Value::getPointerAlignment(const DataLayout &DL) const {
  806. assert(getType()->isPointerTy() && "must be pointer");
  807. if (auto *GO = dyn_cast<GlobalObject>(this)) {
  808. if (isa<Function>(GO)) {
  809. Align FunctionPtrAlign = DL.getFunctionPtrAlign().valueOrOne();
  810. switch (DL.getFunctionPtrAlignType()) {
  811. case DataLayout::FunctionPtrAlignType::Independent:
  812. return FunctionPtrAlign;
  813. case DataLayout::FunctionPtrAlignType::MultipleOfFunctionAlign:
  814. return std::max(FunctionPtrAlign, GO->getAlign().valueOrOne());
  815. }
  816. llvm_unreachable("Unhandled FunctionPtrAlignType");
  817. }
  818. const MaybeAlign Alignment(GO->getAlign());
  819. if (!Alignment) {
  820. if (auto *GVar = dyn_cast<GlobalVariable>(GO)) {
  821. Type *ObjectType = GVar->getValueType();
  822. if (ObjectType->isSized()) {
  823. // If the object is defined in the current Module, we'll be giving
  824. // it the preferred alignment. Otherwise, we have to assume that it
  825. // may only have the minimum ABI alignment.
  826. if (GVar->isStrongDefinitionForLinker())
  827. return DL.getPreferredAlign(GVar);
  828. else
  829. return DL.getABITypeAlign(ObjectType);
  830. }
  831. }
  832. }
  833. return Alignment.valueOrOne();
  834. } else if (const Argument *A = dyn_cast<Argument>(this)) {
  835. const MaybeAlign Alignment = A->getParamAlign();
  836. if (!Alignment && A->hasStructRetAttr()) {
  837. // An sret parameter has at least the ABI alignment of the return type.
  838. Type *EltTy = A->getParamStructRetType();
  839. if (EltTy->isSized())
  840. return DL.getABITypeAlign(EltTy);
  841. }
  842. return Alignment.valueOrOne();
  843. } else if (const AllocaInst *AI = dyn_cast<AllocaInst>(this)) {
  844. return AI->getAlign();
  845. } else if (const auto *Call = dyn_cast<CallBase>(this)) {
  846. MaybeAlign Alignment = Call->getRetAlign();
  847. if (!Alignment && Call->getCalledFunction())
  848. Alignment = Call->getCalledFunction()->getAttributes().getRetAlignment();
  849. return Alignment.valueOrOne();
  850. } else if (const LoadInst *LI = dyn_cast<LoadInst>(this)) {
  851. if (MDNode *MD = LI->getMetadata(LLVMContext::MD_align)) {
  852. ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0));
  853. return Align(CI->getLimitedValue());
  854. }
  855. } else if (auto *CstPtr = dyn_cast<Constant>(this)) {
  856. // Strip pointer casts to avoid creating unnecessary ptrtoint expression
  857. // if the only "reduction" is combining a bitcast + ptrtoint.
  858. CstPtr = CstPtr->stripPointerCasts();
  859. if (auto *CstInt = dyn_cast_or_null<ConstantInt>(ConstantExpr::getPtrToInt(
  860. const_cast<Constant *>(CstPtr), DL.getIntPtrType(getType()),
  861. /*OnlyIfReduced=*/true))) {
  862. size_t TrailingZeros = CstInt->getValue().countTrailingZeros();
  863. // While the actual alignment may be large, elsewhere we have
  864. // an arbitrary upper alignmet limit, so let's clamp to it.
  865. return Align(TrailingZeros < Value::MaxAlignmentExponent
  866. ? uint64_t(1) << TrailingZeros
  867. : Value::MaximumAlignment);
  868. }
  869. }
  870. return Align(1);
  871. }
  872. const Value *Value::DoPHITranslation(const BasicBlock *CurBB,
  873. const BasicBlock *PredBB) const {
  874. auto *PN = dyn_cast<PHINode>(this);
  875. if (PN && PN->getParent() == CurBB)
  876. return PN->getIncomingValueForBlock(PredBB);
  877. return this;
  878. }
  879. LLVMContext &Value::getContext() const { return VTy->getContext(); }
  880. void Value::reverseUseList() {
  881. if (!UseList || !UseList->Next)
  882. // No need to reverse 0 or 1 uses.
  883. return;
  884. Use *Head = UseList;
  885. Use *Current = UseList->Next;
  886. Head->Next = nullptr;
  887. while (Current) {
  888. Use *Next = Current->Next;
  889. Current->Next = Head;
  890. Head->Prev = &Current->Next;
  891. Head = Current;
  892. Current = Next;
  893. }
  894. UseList = Head;
  895. Head->Prev = &UseList;
  896. }
  897. bool Value::isSwiftError() const {
  898. auto *Arg = dyn_cast<Argument>(this);
  899. if (Arg)
  900. return Arg->hasSwiftErrorAttr();
  901. auto *Alloca = dyn_cast<AllocaInst>(this);
  902. if (!Alloca)
  903. return false;
  904. return Alloca->isSwiftError();
  905. }
  906. //===----------------------------------------------------------------------===//
  907. // ValueHandleBase Class
  908. //===----------------------------------------------------------------------===//
  909. void ValueHandleBase::AddToExistingUseList(ValueHandleBase **List) {
  910. assert(List && "Handle list is null?");
  911. // Splice ourselves into the list.
  912. Next = *List;
  913. *List = this;
  914. setPrevPtr(List);
  915. if (Next) {
  916. Next->setPrevPtr(&Next);
  917. assert(getValPtr() == Next->getValPtr() && "Added to wrong list?");
  918. }
  919. }
  920. void ValueHandleBase::AddToExistingUseListAfter(ValueHandleBase *List) {
  921. assert(List && "Must insert after existing node");
  922. Next = List->Next;
  923. setPrevPtr(&List->Next);
  924. List->Next = this;
  925. if (Next)
  926. Next->setPrevPtr(&Next);
  927. }
  928. void ValueHandleBase::AddToUseList() {
  929. assert(getValPtr() && "Null pointer doesn't have a use list!");
  930. LLVMContextImpl *pImpl = getValPtr()->getContext().pImpl;
  931. if (getValPtr()->HasValueHandle) {
  932. // If this value already has a ValueHandle, then it must be in the
  933. // ValueHandles map already.
  934. ValueHandleBase *&Entry = pImpl->ValueHandles[getValPtr()];
  935. assert(Entry && "Value doesn't have any handles?");
  936. AddToExistingUseList(&Entry);
  937. return;
  938. }
  939. // Ok, it doesn't have any handles yet, so we must insert it into the
  940. // DenseMap. However, doing this insertion could cause the DenseMap to
  941. // reallocate itself, which would invalidate all of the PrevP pointers that
  942. // point into the old table. Handle this by checking for reallocation and
  943. // updating the stale pointers only if needed.
  944. DenseMap<Value*, ValueHandleBase*> &Handles = pImpl->ValueHandles;
  945. const void *OldBucketPtr = Handles.getPointerIntoBucketsArray();
  946. ValueHandleBase *&Entry = Handles[getValPtr()];
  947. assert(!Entry && "Value really did already have handles?");
  948. AddToExistingUseList(&Entry);
  949. getValPtr()->HasValueHandle = true;
  950. // If reallocation didn't happen or if this was the first insertion, don't
  951. // walk the table.
  952. if (Handles.isPointerIntoBucketsArray(OldBucketPtr) ||
  953. Handles.size() == 1) {
  954. return;
  955. }
  956. // Okay, reallocation did happen. Fix the Prev Pointers.
  957. for (DenseMap<Value*, ValueHandleBase*>::iterator I = Handles.begin(),
  958. E = Handles.end(); I != E; ++I) {
  959. assert(I->second && I->first == I->second->getValPtr() &&
  960. "List invariant broken!");
  961. I->second->setPrevPtr(&I->second);
  962. }
  963. }
  964. void ValueHandleBase::RemoveFromUseList() {
  965. assert(getValPtr() && getValPtr()->HasValueHandle &&
  966. "Pointer doesn't have a use list!");
  967. // Unlink this from its use list.
  968. ValueHandleBase **PrevPtr = getPrevPtr();
  969. assert(*PrevPtr == this && "List invariant broken");
  970. *PrevPtr = Next;
  971. if (Next) {
  972. assert(Next->getPrevPtr() == &Next && "List invariant broken");
  973. Next->setPrevPtr(PrevPtr);
  974. return;
  975. }
  976. // If the Next pointer was null, then it is possible that this was the last
  977. // ValueHandle watching VP. If so, delete its entry from the ValueHandles
  978. // map.
  979. LLVMContextImpl *pImpl = getValPtr()->getContext().pImpl;
  980. DenseMap<Value*, ValueHandleBase*> &Handles = pImpl->ValueHandles;
  981. if (Handles.isPointerIntoBucketsArray(PrevPtr)) {
  982. Handles.erase(getValPtr());
  983. getValPtr()->HasValueHandle = false;
  984. }
  985. }
  986. void ValueHandleBase::ValueIsDeleted(Value *V) {
  987. assert(V->HasValueHandle && "Should only be called if ValueHandles present");
  988. // Get the linked list base, which is guaranteed to exist since the
  989. // HasValueHandle flag is set.
  990. LLVMContextImpl *pImpl = V->getContext().pImpl;
  991. ValueHandleBase *Entry = pImpl->ValueHandles[V];
  992. assert(Entry && "Value bit set but no entries exist");
  993. // We use a local ValueHandleBase as an iterator so that ValueHandles can add
  994. // and remove themselves from the list without breaking our iteration. This
  995. // is not really an AssertingVH; we just have to give ValueHandleBase a kind.
  996. // Note that we deliberately do not the support the case when dropping a value
  997. // handle results in a new value handle being permanently added to the list
  998. // (as might occur in theory for CallbackVH's): the new value handle will not
  999. // be processed and the checking code will mete out righteous punishment if
  1000. // the handle is still present once we have finished processing all the other
  1001. // value handles (it is fine to momentarily add then remove a value handle).
  1002. for (ValueHandleBase Iterator(Assert, *Entry); Entry; Entry = Iterator.Next) {
  1003. Iterator.RemoveFromUseList();
  1004. Iterator.AddToExistingUseListAfter(Entry);
  1005. assert(Entry->Next == &Iterator && "Loop invariant broken.");
  1006. switch (Entry->getKind()) {
  1007. case Assert:
  1008. break;
  1009. case Weak:
  1010. case WeakTracking:
  1011. // WeakTracking and Weak just go to null, which unlinks them
  1012. // from the list.
  1013. Entry->operator=(nullptr);
  1014. break;
  1015. case Callback:
  1016. // Forward to the subclass's implementation.
  1017. static_cast<CallbackVH*>(Entry)->deleted();
  1018. break;
  1019. }
  1020. }
  1021. // All callbacks, weak references, and assertingVHs should be dropped by now.
  1022. if (V->HasValueHandle) {
  1023. #ifndef NDEBUG // Only in +Asserts mode...
  1024. dbgs() << "While deleting: " << *V->getType() << " %" << V->getName()
  1025. << "\n";
  1026. if (pImpl->ValueHandles[V]->getKind() == Assert)
  1027. llvm_unreachable("An asserting value handle still pointed to this"
  1028. " value!");
  1029. #endif
  1030. llvm_unreachable("All references to V were not removed?");
  1031. }
  1032. }
  1033. void ValueHandleBase::ValueIsRAUWd(Value *Old, Value *New) {
  1034. assert(Old->HasValueHandle &&"Should only be called if ValueHandles present");
  1035. assert(Old != New && "Changing value into itself!");
  1036. assert(Old->getType() == New->getType() &&
  1037. "replaceAllUses of value with new value of different type!");
  1038. // Get the linked list base, which is guaranteed to exist since the
  1039. // HasValueHandle flag is set.
  1040. LLVMContextImpl *pImpl = Old->getContext().pImpl;
  1041. ValueHandleBase *Entry = pImpl->ValueHandles[Old];
  1042. assert(Entry && "Value bit set but no entries exist");
  1043. // We use a local ValueHandleBase as an iterator so that
  1044. // ValueHandles can add and remove themselves from the list without
  1045. // breaking our iteration. This is not really an AssertingVH; we
  1046. // just have to give ValueHandleBase some kind.
  1047. for (ValueHandleBase Iterator(Assert, *Entry); Entry; Entry = Iterator.Next) {
  1048. Iterator.RemoveFromUseList();
  1049. Iterator.AddToExistingUseListAfter(Entry);
  1050. assert(Entry->Next == &Iterator && "Loop invariant broken.");
  1051. switch (Entry->getKind()) {
  1052. case Assert:
  1053. case Weak:
  1054. // Asserting and Weak handles do not follow RAUW implicitly.
  1055. break;
  1056. case WeakTracking:
  1057. // Weak goes to the new value, which will unlink it from Old's list.
  1058. Entry->operator=(New);
  1059. break;
  1060. case Callback:
  1061. // Forward to the subclass's implementation.
  1062. static_cast<CallbackVH*>(Entry)->allUsesReplacedWith(New);
  1063. break;
  1064. }
  1065. }
  1066. #ifndef NDEBUG
  1067. // If any new weak value handles were added while processing the
  1068. // list, then complain about it now.
  1069. if (Old->HasValueHandle)
  1070. for (Entry = pImpl->ValueHandles[Old]; Entry; Entry = Entry->Next)
  1071. switch (Entry->getKind()) {
  1072. case WeakTracking:
  1073. dbgs() << "After RAUW from " << *Old->getType() << " %"
  1074. << Old->getName() << " to " << *New->getType() << " %"
  1075. << New->getName() << "\n";
  1076. llvm_unreachable(
  1077. "A weak tracking value handle still pointed to the old value!\n");
  1078. default:
  1079. break;
  1080. }
  1081. #endif
  1082. }
  1083. // Pin the vtable to this file.
  1084. void CallbackVH::anchor() {}