Value.cpp 42 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245
  1. //===-- Value.cpp - Implement the Value class -----------------------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file implements the Value, ValueHandle, and User classes.
  10. //
  11. //===----------------------------------------------------------------------===//
  12. #include "llvm/IR/Value.h"
  13. #include "LLVMContextImpl.h"
  14. #include "llvm/ADT/DenseMap.h"
  15. #include "llvm/ADT/SmallString.h"
  16. #include "llvm/IR/Constant.h"
  17. #include "llvm/IR/Constants.h"
  18. #include "llvm/IR/DataLayout.h"
  19. #include "llvm/IR/DebugInfo.h"
  20. #include "llvm/IR/DerivedTypes.h"
  21. #include "llvm/IR/DerivedUser.h"
  22. #include "llvm/IR/InstrTypes.h"
  23. #include "llvm/IR/Instructions.h"
  24. #include "llvm/IR/IntrinsicInst.h"
  25. #include "llvm/IR/Module.h"
  26. #include "llvm/IR/Operator.h"
  27. #include "llvm/IR/ValueHandle.h"
  28. #include "llvm/IR/ValueSymbolTable.h"
  29. #include "llvm/Support/CommandLine.h"
  30. #include "llvm/Support/Debug.h"
  31. #include "llvm/Support/ErrorHandling.h"
  32. #include "llvm/Support/raw_ostream.h"
  33. #include <algorithm>
  34. using namespace llvm;
  35. static cl::opt<unsigned> UseDerefAtPointSemantics(
  36. "use-dereferenceable-at-point-semantics", cl::Hidden, cl::init(false),
  37. cl::desc("Deref attributes and metadata infer facts at definition only"));
  38. //===----------------------------------------------------------------------===//
  39. // Value Class
  40. //===----------------------------------------------------------------------===//
  41. static inline Type *checkType(Type *Ty) {
  42. assert(Ty && "Value defined with a null type: Error!");
  43. return Ty;
  44. }
  45. Value::Value(Type *ty, unsigned scid)
  46. : VTy(checkType(ty)), UseList(nullptr), SubclassID(scid), HasValueHandle(0),
  47. SubclassOptionalData(0), SubclassData(0), NumUserOperands(0),
  48. IsUsedByMD(false), HasName(false), HasMetadata(false) {
  49. static_assert(ConstantFirstVal == 0, "!(SubclassID < ConstantFirstVal)");
  50. // FIXME: Why isn't this in the subclass gunk??
  51. // Note, we cannot call isa<CallInst> before the CallInst has been
  52. // constructed.
  53. unsigned OpCode = 0;
  54. if (SubclassID >= InstructionVal)
  55. OpCode = SubclassID - InstructionVal;
  56. if (OpCode == Instruction::Call || OpCode == Instruction::Invoke ||
  57. OpCode == Instruction::CallBr)
  58. assert((VTy->isFirstClassType() || VTy->isVoidTy() || VTy->isStructTy()) &&
  59. "invalid CallBase type!");
  60. else if (SubclassID != BasicBlockVal &&
  61. (/*SubclassID < ConstantFirstVal ||*/ SubclassID > ConstantLastVal))
  62. assert((VTy->isFirstClassType() || VTy->isVoidTy()) &&
  63. "Cannot create non-first-class values except for constants!");
  64. static_assert(sizeof(Value) == 2 * sizeof(void *) + 2 * sizeof(unsigned),
  65. "Value too big");
  66. }
  67. Value::~Value() {
  68. // Notify all ValueHandles (if present) that this value is going away.
  69. if (HasValueHandle)
  70. ValueHandleBase::ValueIsDeleted(this);
  71. if (isUsedByMetadata())
  72. ValueAsMetadata::handleDeletion(this);
  73. // Remove associated metadata from context.
  74. if (HasMetadata)
  75. clearMetadata();
  76. #ifndef NDEBUG // Only in -g mode...
  77. // Check to make sure that there are no uses of this value that are still
  78. // around when the value is destroyed. If there are, then we have a dangling
  79. // reference and something is wrong. This code is here to print out where
  80. // the value is still being referenced.
  81. //
  82. // Note that use_empty() cannot be called here, as it eventually downcasts
  83. // 'this' to GlobalValue (derived class of Value), but GlobalValue has already
  84. // been destructed, so accessing it is UB.
  85. //
  86. if (!materialized_use_empty()) {
  87. dbgs() << "While deleting: " << *VTy << " %" << getName() << "\n";
  88. for (auto *U : users())
  89. dbgs() << "Use still stuck around after Def is destroyed:" << *U << "\n";
  90. }
  91. #endif
  92. assert(materialized_use_empty() && "Uses remain when a value is destroyed!");
  93. // If this value is named, destroy the name. This should not be in a symtab
  94. // at this point.
  95. destroyValueName();
  96. }
  97. void Value::deleteValue() {
  98. switch (getValueID()) {
  99. #define HANDLE_VALUE(Name) \
  100. case Value::Name##Val: \
  101. delete static_cast<Name *>(this); \
  102. break;
  103. #define HANDLE_MEMORY_VALUE(Name) \
  104. case Value::Name##Val: \
  105. static_cast<DerivedUser *>(this)->DeleteValue( \
  106. static_cast<DerivedUser *>(this)); \
  107. break;
  108. #define HANDLE_CONSTANT(Name) \
  109. case Value::Name##Val: \
  110. llvm_unreachable("constants should be destroyed with destroyConstant"); \
  111. break;
  112. #define HANDLE_INSTRUCTION(Name) /* nothing */
  113. #include "llvm/IR/Value.def"
  114. #define HANDLE_INST(N, OPC, CLASS) \
  115. case Value::InstructionVal + Instruction::OPC: \
  116. delete static_cast<CLASS *>(this); \
  117. break;
  118. #define HANDLE_USER_INST(N, OPC, CLASS)
  119. #include "llvm/IR/Instruction.def"
  120. default:
  121. llvm_unreachable("attempting to delete unknown value kind");
  122. }
  123. }
  124. void Value::destroyValueName() {
  125. ValueName *Name = getValueName();
  126. if (Name) {
  127. MallocAllocator Allocator;
  128. Name->Destroy(Allocator);
  129. }
  130. setValueName(nullptr);
  131. }
  132. bool Value::hasNUses(unsigned N) const {
  133. return hasNItems(use_begin(), use_end(), N);
  134. }
  135. bool Value::hasNUsesOrMore(unsigned N) const {
  136. return hasNItemsOrMore(use_begin(), use_end(), N);
  137. }
  138. bool Value::hasOneUser() const {
  139. if (use_empty())
  140. return false;
  141. if (hasOneUse())
  142. return true;
  143. return std::equal(++user_begin(), user_end(), user_begin());
  144. }
  145. static bool isUnDroppableUser(const User *U) { return !U->isDroppable(); }
  146. Use *Value::getSingleUndroppableUse() {
  147. Use *Result = nullptr;
  148. for (Use &U : uses()) {
  149. if (!U.getUser()->isDroppable()) {
  150. if (Result)
  151. return nullptr;
  152. Result = &U;
  153. }
  154. }
  155. return Result;
  156. }
  157. User *Value::getUniqueUndroppableUser() {
  158. User *Result = nullptr;
  159. for (auto *U : users()) {
  160. if (!U->isDroppable()) {
  161. if (Result && Result != U)
  162. return nullptr;
  163. Result = U;
  164. }
  165. }
  166. return Result;
  167. }
  168. bool Value::hasNUndroppableUses(unsigned int N) const {
  169. return hasNItems(user_begin(), user_end(), N, isUnDroppableUser);
  170. }
  171. bool Value::hasNUndroppableUsesOrMore(unsigned int N) const {
  172. return hasNItemsOrMore(user_begin(), user_end(), N, isUnDroppableUser);
  173. }
  174. void Value::dropDroppableUses(
  175. llvm::function_ref<bool(const Use *)> ShouldDrop) {
  176. SmallVector<Use *, 8> ToBeEdited;
  177. for (Use &U : uses())
  178. if (U.getUser()->isDroppable() && ShouldDrop(&U))
  179. ToBeEdited.push_back(&U);
  180. for (Use *U : ToBeEdited)
  181. dropDroppableUse(*U);
  182. }
  183. void Value::dropDroppableUsesIn(User &Usr) {
  184. assert(Usr.isDroppable() && "Expected a droppable user!");
  185. for (Use &UsrOp : Usr.operands()) {
  186. if (UsrOp.get() == this)
  187. dropDroppableUse(UsrOp);
  188. }
  189. }
  190. void Value::dropDroppableUse(Use &U) {
  191. U.removeFromList();
  192. if (auto *Assume = dyn_cast<AssumeInst>(U.getUser())) {
  193. unsigned OpNo = U.getOperandNo();
  194. if (OpNo == 0)
  195. U.set(ConstantInt::getTrue(Assume->getContext()));
  196. else {
  197. U.set(UndefValue::get(U.get()->getType()));
  198. CallInst::BundleOpInfo &BOI = Assume->getBundleOpInfoForOperand(OpNo);
  199. BOI.Tag = Assume->getContext().pImpl->getOrInsertBundleTag("ignore");
  200. }
  201. return;
  202. }
  203. llvm_unreachable("unkown droppable use");
  204. }
  205. bool Value::isUsedInBasicBlock(const BasicBlock *BB) const {
  206. // This can be computed either by scanning the instructions in BB, or by
  207. // scanning the use list of this Value. Both lists can be very long, but
  208. // usually one is quite short.
  209. //
  210. // Scan both lists simultaneously until one is exhausted. This limits the
  211. // search to the shorter list.
  212. BasicBlock::const_iterator BI = BB->begin(), BE = BB->end();
  213. const_user_iterator UI = user_begin(), UE = user_end();
  214. for (; BI != BE && UI != UE; ++BI, ++UI) {
  215. // Scan basic block: Check if this Value is used by the instruction at BI.
  216. if (is_contained(BI->operands(), this))
  217. return true;
  218. // Scan use list: Check if the use at UI is in BB.
  219. const auto *User = dyn_cast<Instruction>(*UI);
  220. if (User && User->getParent() == BB)
  221. return true;
  222. }
  223. return false;
  224. }
  225. unsigned Value::getNumUses() const {
  226. return (unsigned)std::distance(use_begin(), use_end());
  227. }
  228. static bool getSymTab(Value *V, ValueSymbolTable *&ST) {
  229. ST = nullptr;
  230. if (Instruction *I = dyn_cast<Instruction>(V)) {
  231. if (BasicBlock *P = I->getParent())
  232. if (Function *PP = P->getParent())
  233. ST = PP->getValueSymbolTable();
  234. } else if (BasicBlock *BB = dyn_cast<BasicBlock>(V)) {
  235. if (Function *P = BB->getParent())
  236. ST = P->getValueSymbolTable();
  237. } else if (GlobalValue *GV = dyn_cast<GlobalValue>(V)) {
  238. if (Module *P = GV->getParent())
  239. ST = &P->getValueSymbolTable();
  240. } else if (Argument *A = dyn_cast<Argument>(V)) {
  241. if (Function *P = A->getParent())
  242. ST = P->getValueSymbolTable();
  243. } else {
  244. assert(isa<Constant>(V) && "Unknown value type!");
  245. return true; // no name is setable for this.
  246. }
  247. return false;
  248. }
  249. ValueName *Value::getValueName() const {
  250. if (!HasName) return nullptr;
  251. LLVMContext &Ctx = getContext();
  252. auto I = Ctx.pImpl->ValueNames.find(this);
  253. assert(I != Ctx.pImpl->ValueNames.end() &&
  254. "No name entry found!");
  255. return I->second;
  256. }
  257. void Value::setValueName(ValueName *VN) {
  258. LLVMContext &Ctx = getContext();
  259. assert(HasName == Ctx.pImpl->ValueNames.count(this) &&
  260. "HasName bit out of sync!");
  261. if (!VN) {
  262. if (HasName)
  263. Ctx.pImpl->ValueNames.erase(this);
  264. HasName = false;
  265. return;
  266. }
  267. HasName = true;
  268. Ctx.pImpl->ValueNames[this] = VN;
  269. }
  270. StringRef Value::getName() const {
  271. // Make sure the empty string is still a C string. For historical reasons,
  272. // some clients want to call .data() on the result and expect it to be null
  273. // terminated.
  274. if (!hasName())
  275. return StringRef("", 0);
  276. return getValueName()->getKey();
  277. }
  278. void Value::setNameImpl(const Twine &NewName) {
  279. // Fast-path: LLVMContext can be set to strip out non-GlobalValue names
  280. if (getContext().shouldDiscardValueNames() && !isa<GlobalValue>(this))
  281. return;
  282. // Fast path for common IRBuilder case of setName("") when there is no name.
  283. if (NewName.isTriviallyEmpty() && !hasName())
  284. return;
  285. SmallString<256> NameData;
  286. StringRef NameRef = NewName.toStringRef(NameData);
  287. assert(NameRef.find_first_of(0) == StringRef::npos &&
  288. "Null bytes are not allowed in names");
  289. // Name isn't changing?
  290. if (getName() == NameRef)
  291. return;
  292. assert(!getType()->isVoidTy() && "Cannot assign a name to void values!");
  293. // Get the symbol table to update for this object.
  294. ValueSymbolTable *ST;
  295. if (getSymTab(this, ST))
  296. return; // Cannot set a name on this value (e.g. constant).
  297. if (!ST) { // No symbol table to update? Just do the change.
  298. if (NameRef.empty()) {
  299. // Free the name for this value.
  300. destroyValueName();
  301. return;
  302. }
  303. // NOTE: Could optimize for the case the name is shrinking to not deallocate
  304. // then reallocated.
  305. destroyValueName();
  306. // Create the new name.
  307. MallocAllocator Allocator;
  308. setValueName(ValueName::Create(NameRef, Allocator));
  309. getValueName()->setValue(this);
  310. return;
  311. }
  312. // NOTE: Could optimize for the case the name is shrinking to not deallocate
  313. // then reallocated.
  314. if (hasName()) {
  315. // Remove old name.
  316. ST->removeValueName(getValueName());
  317. destroyValueName();
  318. if (NameRef.empty())
  319. return;
  320. }
  321. // Name is changing to something new.
  322. setValueName(ST->createValueName(NameRef, this));
  323. }
  324. void Value::setName(const Twine &NewName) {
  325. setNameImpl(NewName);
  326. if (Function *F = dyn_cast<Function>(this))
  327. F->recalculateIntrinsicID();
  328. }
  329. void Value::takeName(Value *V) {
  330. ValueSymbolTable *ST = nullptr;
  331. // If this value has a name, drop it.
  332. if (hasName()) {
  333. // Get the symtab this is in.
  334. if (getSymTab(this, ST)) {
  335. // We can't set a name on this value, but we need to clear V's name if
  336. // it has one.
  337. if (V->hasName()) V->setName("");
  338. return; // Cannot set a name on this value (e.g. constant).
  339. }
  340. // Remove old name.
  341. if (ST)
  342. ST->removeValueName(getValueName());
  343. destroyValueName();
  344. }
  345. // Now we know that this has no name.
  346. // If V has no name either, we're done.
  347. if (!V->hasName()) return;
  348. // Get this's symtab if we didn't before.
  349. if (!ST) {
  350. if (getSymTab(this, ST)) {
  351. // Clear V's name.
  352. V->setName("");
  353. return; // Cannot set a name on this value (e.g. constant).
  354. }
  355. }
  356. // Get V's ST, this should always succed, because V has a name.
  357. ValueSymbolTable *VST;
  358. bool Failure = getSymTab(V, VST);
  359. assert(!Failure && "V has a name, so it should have a ST!"); (void)Failure;
  360. // If these values are both in the same symtab, we can do this very fast.
  361. // This works even if both values have no symtab yet.
  362. if (ST == VST) {
  363. // Take the name!
  364. setValueName(V->getValueName());
  365. V->setValueName(nullptr);
  366. getValueName()->setValue(this);
  367. return;
  368. }
  369. // Otherwise, things are slightly more complex. Remove V's name from VST and
  370. // then reinsert it into ST.
  371. if (VST)
  372. VST->removeValueName(V->getValueName());
  373. setValueName(V->getValueName());
  374. V->setValueName(nullptr);
  375. getValueName()->setValue(this);
  376. if (ST)
  377. ST->reinsertValue(this);
  378. }
  379. #ifndef NDEBUG
  380. std::string Value::getNameOrAsOperand() const {
  381. if (!getName().empty())
  382. return std::string(getName());
  383. std::string BBName;
  384. raw_string_ostream OS(BBName);
  385. printAsOperand(OS, false);
  386. return OS.str();
  387. }
  388. #endif
  389. void Value::assertModuleIsMaterializedImpl() const {
  390. #ifndef NDEBUG
  391. const GlobalValue *GV = dyn_cast<GlobalValue>(this);
  392. if (!GV)
  393. return;
  394. const Module *M = GV->getParent();
  395. if (!M)
  396. return;
  397. assert(M->isMaterialized());
  398. #endif
  399. }
  400. #ifndef NDEBUG
  401. static bool contains(SmallPtrSetImpl<ConstantExpr *> &Cache, ConstantExpr *Expr,
  402. Constant *C) {
  403. if (!Cache.insert(Expr).second)
  404. return false;
  405. for (auto &O : Expr->operands()) {
  406. if (O == C)
  407. return true;
  408. auto *CE = dyn_cast<ConstantExpr>(O);
  409. if (!CE)
  410. continue;
  411. if (contains(Cache, CE, C))
  412. return true;
  413. }
  414. return false;
  415. }
  416. static bool contains(Value *Expr, Value *V) {
  417. if (Expr == V)
  418. return true;
  419. auto *C = dyn_cast<Constant>(V);
  420. if (!C)
  421. return false;
  422. auto *CE = dyn_cast<ConstantExpr>(Expr);
  423. if (!CE)
  424. return false;
  425. SmallPtrSet<ConstantExpr *, 4> Cache;
  426. return contains(Cache, CE, C);
  427. }
  428. #endif // NDEBUG
  429. void Value::doRAUW(Value *New, ReplaceMetadataUses ReplaceMetaUses) {
  430. assert(New && "Value::replaceAllUsesWith(<null>) is invalid!");
  431. assert(!contains(New, this) &&
  432. "this->replaceAllUsesWith(expr(this)) is NOT valid!");
  433. assert(New->getType() == getType() &&
  434. "replaceAllUses of value with new value of different type!");
  435. // Notify all ValueHandles (if present) that this value is going away.
  436. if (HasValueHandle)
  437. ValueHandleBase::ValueIsRAUWd(this, New);
  438. if (ReplaceMetaUses == ReplaceMetadataUses::Yes && isUsedByMetadata())
  439. ValueAsMetadata::handleRAUW(this, New);
  440. while (!materialized_use_empty()) {
  441. Use &U = *UseList;
  442. // Must handle Constants specially, we cannot call replaceUsesOfWith on a
  443. // constant because they are uniqued.
  444. if (auto *C = dyn_cast<Constant>(U.getUser())) {
  445. if (!isa<GlobalValue>(C)) {
  446. C->handleOperandChange(this, New);
  447. continue;
  448. }
  449. }
  450. U.set(New);
  451. }
  452. if (BasicBlock *BB = dyn_cast<BasicBlock>(this))
  453. BB->replaceSuccessorsPhiUsesWith(cast<BasicBlock>(New));
  454. }
  455. void Value::replaceAllUsesWith(Value *New) {
  456. doRAUW(New, ReplaceMetadataUses::Yes);
  457. }
  458. void Value::replaceNonMetadataUsesWith(Value *New) {
  459. doRAUW(New, ReplaceMetadataUses::No);
  460. }
  461. void Value::replaceUsesWithIf(Value *New,
  462. llvm::function_ref<bool(Use &U)> ShouldReplace) {
  463. assert(New && "Value::replaceUsesWithIf(<null>) is invalid!");
  464. assert(New->getType() == getType() &&
  465. "replaceUses of value with new value of different type!");
  466. SmallVector<TrackingVH<Constant>, 8> Consts;
  467. SmallPtrSet<Constant *, 8> Visited;
  468. for (Use &U : llvm::make_early_inc_range(uses())) {
  469. if (!ShouldReplace(U))
  470. continue;
  471. // Must handle Constants specially, we cannot call replaceUsesOfWith on a
  472. // constant because they are uniqued.
  473. if (auto *C = dyn_cast<Constant>(U.getUser())) {
  474. if (!isa<GlobalValue>(C)) {
  475. if (Visited.insert(C).second)
  476. Consts.push_back(TrackingVH<Constant>(C));
  477. continue;
  478. }
  479. }
  480. U.set(New);
  481. }
  482. while (!Consts.empty()) {
  483. // FIXME: handleOperandChange() updates all the uses in a given Constant,
  484. // not just the one passed to ShouldReplace
  485. Consts.pop_back_val()->handleOperandChange(this, New);
  486. }
  487. }
  488. /// Replace llvm.dbg.* uses of MetadataAsValue(ValueAsMetadata(V)) outside BB
  489. /// with New.
  490. static void replaceDbgUsesOutsideBlock(Value *V, Value *New, BasicBlock *BB) {
  491. SmallVector<DbgVariableIntrinsic *> DbgUsers;
  492. findDbgUsers(DbgUsers, V);
  493. for (auto *DVI : DbgUsers) {
  494. if (DVI->getParent() != BB)
  495. DVI->replaceVariableLocationOp(V, New);
  496. }
  497. }
  498. // Like replaceAllUsesWith except it does not handle constants or basic blocks.
  499. // This routine leaves uses within BB.
  500. void Value::replaceUsesOutsideBlock(Value *New, BasicBlock *BB) {
  501. assert(New && "Value::replaceUsesOutsideBlock(<null>, BB) is invalid!");
  502. assert(!contains(New, this) &&
  503. "this->replaceUsesOutsideBlock(expr(this), BB) is NOT valid!");
  504. assert(New->getType() == getType() &&
  505. "replaceUses of value with new value of different type!");
  506. assert(BB && "Basic block that may contain a use of 'New' must be defined\n");
  507. replaceDbgUsesOutsideBlock(this, New, BB);
  508. replaceUsesWithIf(New, [BB](Use &U) {
  509. auto *I = dyn_cast<Instruction>(U.getUser());
  510. // Don't replace if it's an instruction in the BB basic block.
  511. return !I || I->getParent() != BB;
  512. });
  513. }
  514. namespace {
  515. // Various metrics for how much to strip off of pointers.
  516. enum PointerStripKind {
  517. PSK_ZeroIndices,
  518. PSK_ZeroIndicesAndAliases,
  519. PSK_ZeroIndicesSameRepresentation,
  520. PSK_ForAliasAnalysis,
  521. PSK_InBoundsConstantIndices,
  522. PSK_InBounds
  523. };
  524. template <PointerStripKind StripKind> static void NoopCallback(const Value *) {}
  525. template <PointerStripKind StripKind>
  526. static const Value *stripPointerCastsAndOffsets(
  527. const Value *V,
  528. function_ref<void(const Value *)> Func = NoopCallback<StripKind>) {
  529. if (!V->getType()->isPointerTy())
  530. return V;
  531. // Even though we don't look through PHI nodes, we could be called on an
  532. // instruction in an unreachable block, which may be on a cycle.
  533. SmallPtrSet<const Value *, 4> Visited;
  534. Visited.insert(V);
  535. do {
  536. Func(V);
  537. if (auto *GEP = dyn_cast<GEPOperator>(V)) {
  538. switch (StripKind) {
  539. case PSK_ZeroIndices:
  540. case PSK_ZeroIndicesAndAliases:
  541. case PSK_ZeroIndicesSameRepresentation:
  542. case PSK_ForAliasAnalysis:
  543. if (!GEP->hasAllZeroIndices())
  544. return V;
  545. break;
  546. case PSK_InBoundsConstantIndices:
  547. if (!GEP->hasAllConstantIndices())
  548. return V;
  549. LLVM_FALLTHROUGH;
  550. case PSK_InBounds:
  551. if (!GEP->isInBounds())
  552. return V;
  553. break;
  554. }
  555. V = GEP->getPointerOperand();
  556. } else if (Operator::getOpcode(V) == Instruction::BitCast) {
  557. V = cast<Operator>(V)->getOperand(0);
  558. if (!V->getType()->isPointerTy())
  559. return V;
  560. } else if (StripKind != PSK_ZeroIndicesSameRepresentation &&
  561. Operator::getOpcode(V) == Instruction::AddrSpaceCast) {
  562. // TODO: If we know an address space cast will not change the
  563. // representation we could look through it here as well.
  564. V = cast<Operator>(V)->getOperand(0);
  565. } else if (StripKind == PSK_ZeroIndicesAndAliases && isa<GlobalAlias>(V)) {
  566. V = cast<GlobalAlias>(V)->getAliasee();
  567. } else if (StripKind == PSK_ForAliasAnalysis && isa<PHINode>(V) &&
  568. cast<PHINode>(V)->getNumIncomingValues() == 1) {
  569. V = cast<PHINode>(V)->getIncomingValue(0);
  570. } else {
  571. if (const auto *Call = dyn_cast<CallBase>(V)) {
  572. if (const Value *RV = Call->getReturnedArgOperand()) {
  573. V = RV;
  574. continue;
  575. }
  576. // The result of launder.invariant.group must alias it's argument,
  577. // but it can't be marked with returned attribute, that's why it needs
  578. // special case.
  579. if (StripKind == PSK_ForAliasAnalysis &&
  580. (Call->getIntrinsicID() == Intrinsic::launder_invariant_group ||
  581. Call->getIntrinsicID() == Intrinsic::strip_invariant_group)) {
  582. V = Call->getArgOperand(0);
  583. continue;
  584. }
  585. }
  586. return V;
  587. }
  588. assert(V->getType()->isPointerTy() && "Unexpected operand type!");
  589. } while (Visited.insert(V).second);
  590. return V;
  591. }
  592. } // end anonymous namespace
  593. const Value *Value::stripPointerCasts() const {
  594. return stripPointerCastsAndOffsets<PSK_ZeroIndices>(this);
  595. }
  596. const Value *Value::stripPointerCastsAndAliases() const {
  597. return stripPointerCastsAndOffsets<PSK_ZeroIndicesAndAliases>(this);
  598. }
  599. const Value *Value::stripPointerCastsSameRepresentation() const {
  600. return stripPointerCastsAndOffsets<PSK_ZeroIndicesSameRepresentation>(this);
  601. }
  602. const Value *Value::stripInBoundsConstantOffsets() const {
  603. return stripPointerCastsAndOffsets<PSK_InBoundsConstantIndices>(this);
  604. }
  605. const Value *Value::stripPointerCastsForAliasAnalysis() const {
  606. return stripPointerCastsAndOffsets<PSK_ForAliasAnalysis>(this);
  607. }
  608. const Value *Value::stripAndAccumulateConstantOffsets(
  609. const DataLayout &DL, APInt &Offset, bool AllowNonInbounds,
  610. bool AllowInvariantGroup,
  611. function_ref<bool(Value &, APInt &)> ExternalAnalysis) const {
  612. if (!getType()->isPtrOrPtrVectorTy())
  613. return this;
  614. unsigned BitWidth = Offset.getBitWidth();
  615. assert(BitWidth == DL.getIndexTypeSizeInBits(getType()) &&
  616. "The offset bit width does not match the DL specification.");
  617. // Even though we don't look through PHI nodes, we could be called on an
  618. // instruction in an unreachable block, which may be on a cycle.
  619. SmallPtrSet<const Value *, 4> Visited;
  620. Visited.insert(this);
  621. const Value *V = this;
  622. do {
  623. if (auto *GEP = dyn_cast<GEPOperator>(V)) {
  624. // If in-bounds was requested, we do not strip non-in-bounds GEPs.
  625. if (!AllowNonInbounds && !GEP->isInBounds())
  626. return V;
  627. // If one of the values we have visited is an addrspacecast, then
  628. // the pointer type of this GEP may be different from the type
  629. // of the Ptr parameter which was passed to this function. This
  630. // means when we construct GEPOffset, we need to use the size
  631. // of GEP's pointer type rather than the size of the original
  632. // pointer type.
  633. APInt GEPOffset(DL.getIndexTypeSizeInBits(V->getType()), 0);
  634. if (!GEP->accumulateConstantOffset(DL, GEPOffset, ExternalAnalysis))
  635. return V;
  636. // Stop traversal if the pointer offset wouldn't fit in the bit-width
  637. // provided by the Offset argument. This can happen due to AddrSpaceCast
  638. // stripping.
  639. if (GEPOffset.getMinSignedBits() > BitWidth)
  640. return V;
  641. // External Analysis can return a result higher/lower than the value
  642. // represents. We need to detect overflow/underflow.
  643. APInt GEPOffsetST = GEPOffset.sextOrTrunc(BitWidth);
  644. if (!ExternalAnalysis) {
  645. Offset += GEPOffsetST;
  646. } else {
  647. bool Overflow = false;
  648. APInt OldOffset = Offset;
  649. Offset = Offset.sadd_ov(GEPOffsetST, Overflow);
  650. if (Overflow) {
  651. Offset = OldOffset;
  652. return V;
  653. }
  654. }
  655. V = GEP->getPointerOperand();
  656. } else if (Operator::getOpcode(V) == Instruction::BitCast ||
  657. Operator::getOpcode(V) == Instruction::AddrSpaceCast) {
  658. V = cast<Operator>(V)->getOperand(0);
  659. } else if (auto *GA = dyn_cast<GlobalAlias>(V)) {
  660. if (!GA->isInterposable())
  661. V = GA->getAliasee();
  662. } else if (const auto *Call = dyn_cast<CallBase>(V)) {
  663. if (const Value *RV = Call->getReturnedArgOperand())
  664. V = RV;
  665. if (AllowInvariantGroup && Call->isLaunderOrStripInvariantGroup())
  666. V = Call->getArgOperand(0);
  667. }
  668. assert(V->getType()->isPtrOrPtrVectorTy() && "Unexpected operand type!");
  669. } while (Visited.insert(V).second);
  670. return V;
  671. }
  672. const Value *
  673. Value::stripInBoundsOffsets(function_ref<void(const Value *)> Func) const {
  674. return stripPointerCastsAndOffsets<PSK_InBounds>(this, Func);
  675. }
  676. bool Value::canBeFreed() const {
  677. assert(getType()->isPointerTy());
  678. // Cases that can simply never be deallocated
  679. // *) Constants aren't allocated per se, thus not deallocated either.
  680. if (isa<Constant>(this))
  681. return false;
  682. // Handle byval/byref/sret/inalloca/preallocated arguments. The storage
  683. // lifetime is guaranteed to be longer than the callee's lifetime.
  684. if (auto *A = dyn_cast<Argument>(this)) {
  685. if (A->hasPointeeInMemoryValueAttr())
  686. return false;
  687. // A pointer to an object in a function which neither frees, nor can arrange
  688. // for another thread to free on its behalf, can not be freed in the scope
  689. // of the function. Note that this logic is restricted to memory
  690. // allocations in existance before the call; a nofree function *is* allowed
  691. // to free memory it allocated.
  692. const Function *F = A->getParent();
  693. if (F->doesNotFreeMemory() && F->hasNoSync())
  694. return false;
  695. }
  696. const Function *F = nullptr;
  697. if (auto *I = dyn_cast<Instruction>(this))
  698. F = I->getFunction();
  699. if (auto *A = dyn_cast<Argument>(this))
  700. F = A->getParent();
  701. if (!F)
  702. return true;
  703. // With garbage collection, deallocation typically occurs solely at or after
  704. // safepoints. If we're compiling for a collector which uses the
  705. // gc.statepoint infrastructure, safepoints aren't explicitly present
  706. // in the IR until after lowering from abstract to physical machine model.
  707. // The collector could chose to mix explicit deallocation and gc'd objects
  708. // which is why we need the explicit opt in on a per collector basis.
  709. if (!F->hasGC())
  710. return true;
  711. const auto &GCName = F->getGC();
  712. if (GCName == "statepoint-example") {
  713. auto *PT = cast<PointerType>(this->getType());
  714. if (PT->getAddressSpace() != 1)
  715. // For the sake of this example GC, we arbitrarily pick addrspace(1) as
  716. // our GC managed heap. This must match the same check in
  717. // RewriteStatepointsForGC (and probably needs better factored.)
  718. return true;
  719. // It is cheaper to scan for a declaration than to scan for a use in this
  720. // function. Note that gc.statepoint is a type overloaded function so the
  721. // usual trick of requesting declaration of the intrinsic from the module
  722. // doesn't work.
  723. for (auto &Fn : *F->getParent())
  724. if (Fn.getIntrinsicID() == Intrinsic::experimental_gc_statepoint)
  725. return true;
  726. return false;
  727. }
  728. return true;
  729. }
  730. uint64_t Value::getPointerDereferenceableBytes(const DataLayout &DL,
  731. bool &CanBeNull,
  732. bool &CanBeFreed) const {
  733. assert(getType()->isPointerTy() && "must be pointer");
  734. uint64_t DerefBytes = 0;
  735. CanBeNull = false;
  736. CanBeFreed = UseDerefAtPointSemantics && canBeFreed();
  737. if (const Argument *A = dyn_cast<Argument>(this)) {
  738. DerefBytes = A->getDereferenceableBytes();
  739. if (DerefBytes == 0) {
  740. // Handle byval/byref/inalloca/preallocated arguments
  741. if (Type *ArgMemTy = A->getPointeeInMemoryValueType()) {
  742. if (ArgMemTy->isSized()) {
  743. // FIXME: Why isn't this the type alloc size?
  744. DerefBytes = DL.getTypeStoreSize(ArgMemTy).getKnownMinSize();
  745. }
  746. }
  747. }
  748. if (DerefBytes == 0) {
  749. DerefBytes = A->getDereferenceableOrNullBytes();
  750. CanBeNull = true;
  751. }
  752. } else if (const auto *Call = dyn_cast<CallBase>(this)) {
  753. DerefBytes = Call->getRetDereferenceableBytes();
  754. if (DerefBytes == 0) {
  755. DerefBytes = Call->getRetDereferenceableOrNullBytes();
  756. CanBeNull = true;
  757. }
  758. } else if (const LoadInst *LI = dyn_cast<LoadInst>(this)) {
  759. if (MDNode *MD = LI->getMetadata(LLVMContext::MD_dereferenceable)) {
  760. ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0));
  761. DerefBytes = CI->getLimitedValue();
  762. }
  763. if (DerefBytes == 0) {
  764. if (MDNode *MD =
  765. LI->getMetadata(LLVMContext::MD_dereferenceable_or_null)) {
  766. ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0));
  767. DerefBytes = CI->getLimitedValue();
  768. }
  769. CanBeNull = true;
  770. }
  771. } else if (auto *IP = dyn_cast<IntToPtrInst>(this)) {
  772. if (MDNode *MD = IP->getMetadata(LLVMContext::MD_dereferenceable)) {
  773. ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0));
  774. DerefBytes = CI->getLimitedValue();
  775. }
  776. if (DerefBytes == 0) {
  777. if (MDNode *MD =
  778. IP->getMetadata(LLVMContext::MD_dereferenceable_or_null)) {
  779. ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0));
  780. DerefBytes = CI->getLimitedValue();
  781. }
  782. CanBeNull = true;
  783. }
  784. } else if (auto *AI = dyn_cast<AllocaInst>(this)) {
  785. if (!AI->isArrayAllocation()) {
  786. DerefBytes =
  787. DL.getTypeStoreSize(AI->getAllocatedType()).getKnownMinSize();
  788. CanBeNull = false;
  789. CanBeFreed = false;
  790. }
  791. } else if (auto *GV = dyn_cast<GlobalVariable>(this)) {
  792. if (GV->getValueType()->isSized() && !GV->hasExternalWeakLinkage()) {
  793. // TODO: Don't outright reject hasExternalWeakLinkage but set the
  794. // CanBeNull flag.
  795. DerefBytes = DL.getTypeStoreSize(GV->getValueType()).getFixedSize();
  796. CanBeNull = false;
  797. CanBeFreed = false;
  798. }
  799. }
  800. return DerefBytes;
  801. }
  802. Align Value::getPointerAlignment(const DataLayout &DL) const {
  803. assert(getType()->isPointerTy() && "must be pointer");
  804. if (auto *GO = dyn_cast<GlobalObject>(this)) {
  805. if (isa<Function>(GO)) {
  806. Align FunctionPtrAlign = DL.getFunctionPtrAlign().valueOrOne();
  807. switch (DL.getFunctionPtrAlignType()) {
  808. case DataLayout::FunctionPtrAlignType::Independent:
  809. return FunctionPtrAlign;
  810. case DataLayout::FunctionPtrAlignType::MultipleOfFunctionAlign:
  811. return std::max(FunctionPtrAlign, GO->getAlign().valueOrOne());
  812. }
  813. llvm_unreachable("Unhandled FunctionPtrAlignType");
  814. }
  815. const MaybeAlign Alignment(GO->getAlign());
  816. if (!Alignment) {
  817. if (auto *GVar = dyn_cast<GlobalVariable>(GO)) {
  818. Type *ObjectType = GVar->getValueType();
  819. if (ObjectType->isSized()) {
  820. // If the object is defined in the current Module, we'll be giving
  821. // it the preferred alignment. Otherwise, we have to assume that it
  822. // may only have the minimum ABI alignment.
  823. if (GVar->isStrongDefinitionForLinker())
  824. return DL.getPreferredAlign(GVar);
  825. else
  826. return DL.getABITypeAlign(ObjectType);
  827. }
  828. }
  829. }
  830. return Alignment.valueOrOne();
  831. } else if (const Argument *A = dyn_cast<Argument>(this)) {
  832. const MaybeAlign Alignment = A->getParamAlign();
  833. if (!Alignment && A->hasStructRetAttr()) {
  834. // An sret parameter has at least the ABI alignment of the return type.
  835. Type *EltTy = A->getParamStructRetType();
  836. if (EltTy->isSized())
  837. return DL.getABITypeAlign(EltTy);
  838. }
  839. return Alignment.valueOrOne();
  840. } else if (const AllocaInst *AI = dyn_cast<AllocaInst>(this)) {
  841. return AI->getAlign();
  842. } else if (const auto *Call = dyn_cast<CallBase>(this)) {
  843. MaybeAlign Alignment = Call->getRetAlign();
  844. if (!Alignment && Call->getCalledFunction())
  845. Alignment = Call->getCalledFunction()->getAttributes().getRetAlignment();
  846. return Alignment.valueOrOne();
  847. } else if (const LoadInst *LI = dyn_cast<LoadInst>(this)) {
  848. if (MDNode *MD = LI->getMetadata(LLVMContext::MD_align)) {
  849. ConstantInt *CI = mdconst::extract<ConstantInt>(MD->getOperand(0));
  850. return Align(CI->getLimitedValue());
  851. }
  852. } else if (auto *CstPtr = dyn_cast<Constant>(this)) {
  853. if (auto *CstInt = dyn_cast_or_null<ConstantInt>(ConstantExpr::getPtrToInt(
  854. const_cast<Constant *>(CstPtr), DL.getIntPtrType(getType()),
  855. /*OnlyIfReduced=*/true))) {
  856. size_t TrailingZeros = CstInt->getValue().countTrailingZeros();
  857. // While the actual alignment may be large, elsewhere we have
  858. // an arbitrary upper alignmet limit, so let's clamp to it.
  859. return Align(TrailingZeros < Value::MaxAlignmentExponent
  860. ? uint64_t(1) << TrailingZeros
  861. : Value::MaximumAlignment);
  862. }
  863. }
  864. return Align(1);
  865. }
  866. const Value *Value::DoPHITranslation(const BasicBlock *CurBB,
  867. const BasicBlock *PredBB) const {
  868. auto *PN = dyn_cast<PHINode>(this);
  869. if (PN && PN->getParent() == CurBB)
  870. return PN->getIncomingValueForBlock(PredBB);
  871. return this;
  872. }
  873. LLVMContext &Value::getContext() const { return VTy->getContext(); }
  874. void Value::reverseUseList() {
  875. if (!UseList || !UseList->Next)
  876. // No need to reverse 0 or 1 uses.
  877. return;
  878. Use *Head = UseList;
  879. Use *Current = UseList->Next;
  880. Head->Next = nullptr;
  881. while (Current) {
  882. Use *Next = Current->Next;
  883. Current->Next = Head;
  884. Head->Prev = &Current->Next;
  885. Head = Current;
  886. Current = Next;
  887. }
  888. UseList = Head;
  889. Head->Prev = &UseList;
  890. }
  891. bool Value::isSwiftError() const {
  892. auto *Arg = dyn_cast<Argument>(this);
  893. if (Arg)
  894. return Arg->hasSwiftErrorAttr();
  895. auto *Alloca = dyn_cast<AllocaInst>(this);
  896. if (!Alloca)
  897. return false;
  898. return Alloca->isSwiftError();
  899. }
  900. bool Value::isTransitiveUsedByMetadataOnly() const {
  901. if (use_empty())
  902. return false;
  903. llvm::SmallVector<const User *, 32> WorkList;
  904. llvm::SmallPtrSet<const User *, 32> Visited;
  905. WorkList.insert(WorkList.begin(), user_begin(), user_end());
  906. while (!WorkList.empty()) {
  907. const User *U = WorkList.pop_back_val();
  908. Visited.insert(U);
  909. // If it is transitively used by a global value or a non-constant value,
  910. // it's obviously not only used by metadata.
  911. if (!isa<Constant>(U) || isa<GlobalValue>(U))
  912. return false;
  913. for (const User *UU : U->users())
  914. if (!Visited.count(UU))
  915. WorkList.push_back(UU);
  916. }
  917. return true;
  918. }
  919. //===----------------------------------------------------------------------===//
  920. // ValueHandleBase Class
  921. //===----------------------------------------------------------------------===//
  922. void ValueHandleBase::AddToExistingUseList(ValueHandleBase **List) {
  923. assert(List && "Handle list is null?");
  924. // Splice ourselves into the list.
  925. Next = *List;
  926. *List = this;
  927. setPrevPtr(List);
  928. if (Next) {
  929. Next->setPrevPtr(&Next);
  930. assert(getValPtr() == Next->getValPtr() && "Added to wrong list?");
  931. }
  932. }
  933. void ValueHandleBase::AddToExistingUseListAfter(ValueHandleBase *List) {
  934. assert(List && "Must insert after existing node");
  935. Next = List->Next;
  936. setPrevPtr(&List->Next);
  937. List->Next = this;
  938. if (Next)
  939. Next->setPrevPtr(&Next);
  940. }
  941. void ValueHandleBase::AddToUseList() {
  942. assert(getValPtr() && "Null pointer doesn't have a use list!");
  943. LLVMContextImpl *pImpl = getValPtr()->getContext().pImpl;
  944. if (getValPtr()->HasValueHandle) {
  945. // If this value already has a ValueHandle, then it must be in the
  946. // ValueHandles map already.
  947. ValueHandleBase *&Entry = pImpl->ValueHandles[getValPtr()];
  948. assert(Entry && "Value doesn't have any handles?");
  949. AddToExistingUseList(&Entry);
  950. return;
  951. }
  952. // Ok, it doesn't have any handles yet, so we must insert it into the
  953. // DenseMap. However, doing this insertion could cause the DenseMap to
  954. // reallocate itself, which would invalidate all of the PrevP pointers that
  955. // point into the old table. Handle this by checking for reallocation and
  956. // updating the stale pointers only if needed.
  957. DenseMap<Value*, ValueHandleBase*> &Handles = pImpl->ValueHandles;
  958. const void *OldBucketPtr = Handles.getPointerIntoBucketsArray();
  959. ValueHandleBase *&Entry = Handles[getValPtr()];
  960. assert(!Entry && "Value really did already have handles?");
  961. AddToExistingUseList(&Entry);
  962. getValPtr()->HasValueHandle = true;
  963. // If reallocation didn't happen or if this was the first insertion, don't
  964. // walk the table.
  965. if (Handles.isPointerIntoBucketsArray(OldBucketPtr) ||
  966. Handles.size() == 1) {
  967. return;
  968. }
  969. // Okay, reallocation did happen. Fix the Prev Pointers.
  970. for (DenseMap<Value*, ValueHandleBase*>::iterator I = Handles.begin(),
  971. E = Handles.end(); I != E; ++I) {
  972. assert(I->second && I->first == I->second->getValPtr() &&
  973. "List invariant broken!");
  974. I->second->setPrevPtr(&I->second);
  975. }
  976. }
  977. void ValueHandleBase::RemoveFromUseList() {
  978. assert(getValPtr() && getValPtr()->HasValueHandle &&
  979. "Pointer doesn't have a use list!");
  980. // Unlink this from its use list.
  981. ValueHandleBase **PrevPtr = getPrevPtr();
  982. assert(*PrevPtr == this && "List invariant broken");
  983. *PrevPtr = Next;
  984. if (Next) {
  985. assert(Next->getPrevPtr() == &Next && "List invariant broken");
  986. Next->setPrevPtr(PrevPtr);
  987. return;
  988. }
  989. // If the Next pointer was null, then it is possible that this was the last
  990. // ValueHandle watching VP. If so, delete its entry from the ValueHandles
  991. // map.
  992. LLVMContextImpl *pImpl = getValPtr()->getContext().pImpl;
  993. DenseMap<Value*, ValueHandleBase*> &Handles = pImpl->ValueHandles;
  994. if (Handles.isPointerIntoBucketsArray(PrevPtr)) {
  995. Handles.erase(getValPtr());
  996. getValPtr()->HasValueHandle = false;
  997. }
  998. }
  999. void ValueHandleBase::ValueIsDeleted(Value *V) {
  1000. assert(V->HasValueHandle && "Should only be called if ValueHandles present");
  1001. // Get the linked list base, which is guaranteed to exist since the
  1002. // HasValueHandle flag is set.
  1003. LLVMContextImpl *pImpl = V->getContext().pImpl;
  1004. ValueHandleBase *Entry = pImpl->ValueHandles[V];
  1005. assert(Entry && "Value bit set but no entries exist");
  1006. // We use a local ValueHandleBase as an iterator so that ValueHandles can add
  1007. // and remove themselves from the list without breaking our iteration. This
  1008. // is not really an AssertingVH; we just have to give ValueHandleBase a kind.
  1009. // Note that we deliberately do not the support the case when dropping a value
  1010. // handle results in a new value handle being permanently added to the list
  1011. // (as might occur in theory for CallbackVH's): the new value handle will not
  1012. // be processed and the checking code will mete out righteous punishment if
  1013. // the handle is still present once we have finished processing all the other
  1014. // value handles (it is fine to momentarily add then remove a value handle).
  1015. for (ValueHandleBase Iterator(Assert, *Entry); Entry; Entry = Iterator.Next) {
  1016. Iterator.RemoveFromUseList();
  1017. Iterator.AddToExistingUseListAfter(Entry);
  1018. assert(Entry->Next == &Iterator && "Loop invariant broken.");
  1019. switch (Entry->getKind()) {
  1020. case Assert:
  1021. break;
  1022. case Weak:
  1023. case WeakTracking:
  1024. // WeakTracking and Weak just go to null, which unlinks them
  1025. // from the list.
  1026. Entry->operator=(nullptr);
  1027. break;
  1028. case Callback:
  1029. // Forward to the subclass's implementation.
  1030. static_cast<CallbackVH*>(Entry)->deleted();
  1031. break;
  1032. }
  1033. }
  1034. // All callbacks, weak references, and assertingVHs should be dropped by now.
  1035. if (V->HasValueHandle) {
  1036. #ifndef NDEBUG // Only in +Asserts mode...
  1037. dbgs() << "While deleting: " << *V->getType() << " %" << V->getName()
  1038. << "\n";
  1039. if (pImpl->ValueHandles[V]->getKind() == Assert)
  1040. llvm_unreachable("An asserting value handle still pointed to this"
  1041. " value!");
  1042. #endif
  1043. llvm_unreachable("All references to V were not removed?");
  1044. }
  1045. }
  1046. void ValueHandleBase::ValueIsRAUWd(Value *Old, Value *New) {
  1047. assert(Old->HasValueHandle &&"Should only be called if ValueHandles present");
  1048. assert(Old != New && "Changing value into itself!");
  1049. assert(Old->getType() == New->getType() &&
  1050. "replaceAllUses of value with new value of different type!");
  1051. // Get the linked list base, which is guaranteed to exist since the
  1052. // HasValueHandle flag is set.
  1053. LLVMContextImpl *pImpl = Old->getContext().pImpl;
  1054. ValueHandleBase *Entry = pImpl->ValueHandles[Old];
  1055. assert(Entry && "Value bit set but no entries exist");
  1056. // We use a local ValueHandleBase as an iterator so that
  1057. // ValueHandles can add and remove themselves from the list without
  1058. // breaking our iteration. This is not really an AssertingVH; we
  1059. // just have to give ValueHandleBase some kind.
  1060. for (ValueHandleBase Iterator(Assert, *Entry); Entry; Entry = Iterator.Next) {
  1061. Iterator.RemoveFromUseList();
  1062. Iterator.AddToExistingUseListAfter(Entry);
  1063. assert(Entry->Next == &Iterator && "Loop invariant broken.");
  1064. switch (Entry->getKind()) {
  1065. case Assert:
  1066. case Weak:
  1067. // Asserting and Weak handles do not follow RAUW implicitly.
  1068. break;
  1069. case WeakTracking:
  1070. // Weak goes to the new value, which will unlink it from Old's list.
  1071. Entry->operator=(New);
  1072. break;
  1073. case Callback:
  1074. // Forward to the subclass's implementation.
  1075. static_cast<CallbackVH*>(Entry)->allUsesReplacedWith(New);
  1076. break;
  1077. }
  1078. }
  1079. #ifndef NDEBUG
  1080. // If any new weak value handles were added while processing the
  1081. // list, then complain about it now.
  1082. if (Old->HasValueHandle)
  1083. for (Entry = pImpl->ValueHandles[Old]; Entry; Entry = Entry->Next)
  1084. switch (Entry->getKind()) {
  1085. case WeakTracking:
  1086. dbgs() << "After RAUW from " << *Old->getType() << " %"
  1087. << Old->getName() << " to " << *New->getType() << " %"
  1088. << New->getName() << "\n";
  1089. llvm_unreachable(
  1090. "A weak tracking value handle still pointed to the old value!\n");
  1091. default:
  1092. break;
  1093. }
  1094. #endif
  1095. }
  1096. // Pin the vtable to this file.
  1097. void CallbackVH::anchor() {}