ValueEnumerator.cpp 39 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181
  1. //===- ValueEnumerator.cpp - Number values and types for bitcode writer ---===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file implements the ValueEnumerator class.
  10. //
  11. //===----------------------------------------------------------------------===//
  12. #include "ValueEnumerator.h"
  13. #include "llvm/ADT/SmallVector.h"
  14. #include "llvm/Config/llvm-config.h"
  15. #include "llvm/IR/Argument.h"
  16. #include "llvm/IR/BasicBlock.h"
  17. #include "llvm/IR/Constant.h"
  18. #include "llvm/IR/DebugInfoMetadata.h"
  19. #include "llvm/IR/DerivedTypes.h"
  20. #include "llvm/IR/Function.h"
  21. #include "llvm/IR/GlobalAlias.h"
  22. #include "llvm/IR/GlobalIFunc.h"
  23. #include "llvm/IR/GlobalObject.h"
  24. #include "llvm/IR/GlobalValue.h"
  25. #include "llvm/IR/GlobalVariable.h"
  26. #include "llvm/IR/Instruction.h"
  27. #include "llvm/IR/Instructions.h"
  28. #include "llvm/IR/Metadata.h"
  29. #include "llvm/IR/Module.h"
  30. #include "llvm/IR/Operator.h"
  31. #include "llvm/IR/Type.h"
  32. #include "llvm/IR/Use.h"
  33. #include "llvm/IR/User.h"
  34. #include "llvm/IR/Value.h"
  35. #include "llvm/IR/ValueSymbolTable.h"
  36. #include "llvm/Support/Casting.h"
  37. #include "llvm/Support/Compiler.h"
  38. #include "llvm/Support/Debug.h"
  39. #include "llvm/Support/MathExtras.h"
  40. #include "llvm/Support/raw_ostream.h"
  41. #include <algorithm>
  42. #include <cstddef>
  43. #include <iterator>
  44. #include <tuple>
  45. using namespace llvm;
  46. namespace {
  47. struct OrderMap {
  48. DenseMap<const Value *, std::pair<unsigned, bool>> IDs;
  49. unsigned LastGlobalConstantID = 0;
  50. unsigned LastGlobalValueID = 0;
  51. OrderMap() = default;
  52. bool isGlobalConstant(unsigned ID) const {
  53. return ID <= LastGlobalConstantID;
  54. }
  55. bool isGlobalValue(unsigned ID) const {
  56. return ID <= LastGlobalValueID && !isGlobalConstant(ID);
  57. }
  58. unsigned size() const { return IDs.size(); }
  59. std::pair<unsigned, bool> &operator[](const Value *V) { return IDs[V]; }
  60. std::pair<unsigned, bool> lookup(const Value *V) const {
  61. return IDs.lookup(V);
  62. }
  63. void index(const Value *V) {
  64. // Explicitly sequence get-size and insert-value operations to avoid UB.
  65. unsigned ID = IDs.size() + 1;
  66. IDs[V].first = ID;
  67. }
  68. };
  69. } // end anonymous namespace
  70. static void orderValue(const Value *V, OrderMap &OM) {
  71. if (OM.lookup(V).first)
  72. return;
  73. if (const Constant *C = dyn_cast<Constant>(V)) {
  74. if (C->getNumOperands() && !isa<GlobalValue>(C)) {
  75. for (const Value *Op : C->operands())
  76. if (!isa<BasicBlock>(Op) && !isa<GlobalValue>(Op))
  77. orderValue(Op, OM);
  78. if (auto *CE = dyn_cast<ConstantExpr>(C))
  79. if (CE->getOpcode() == Instruction::ShuffleVector)
  80. orderValue(CE->getShuffleMaskForBitcode(), OM);
  81. }
  82. }
  83. // Note: we cannot cache this lookup above, since inserting into the map
  84. // changes the map's size, and thus affects the other IDs.
  85. OM.index(V);
  86. }
  87. static OrderMap orderModule(const Module &M) {
  88. // This needs to match the order used by ValueEnumerator::ValueEnumerator()
  89. // and ValueEnumerator::incorporateFunction().
  90. OrderMap OM;
  91. // In the reader, initializers of GlobalValues are set *after* all the
  92. // globals have been read. Rather than awkwardly modeling this behaviour
  93. // directly in predictValueUseListOrderImpl(), just assign IDs to
  94. // initializers of GlobalValues before GlobalValues themselves to model this
  95. // implicitly.
  96. for (const GlobalVariable &G : M.globals())
  97. if (G.hasInitializer())
  98. if (!isa<GlobalValue>(G.getInitializer()))
  99. orderValue(G.getInitializer(), OM);
  100. for (const GlobalAlias &A : M.aliases())
  101. if (!isa<GlobalValue>(A.getAliasee()))
  102. orderValue(A.getAliasee(), OM);
  103. for (const GlobalIFunc &I : M.ifuncs())
  104. if (!isa<GlobalValue>(I.getResolver()))
  105. orderValue(I.getResolver(), OM);
  106. for (const Function &F : M) {
  107. for (const Use &U : F.operands())
  108. if (!isa<GlobalValue>(U.get()))
  109. orderValue(U.get(), OM);
  110. }
  111. // As constants used in metadata operands are emitted as module-level
  112. // constants, we must order them before other operands. Also, we must order
  113. // these before global values, as these will be read before setting the
  114. // global values' initializers. The latter matters for constants which have
  115. // uses towards other constants that are used as initializers.
  116. auto orderConstantValue = [&OM](const Value *V) {
  117. if ((isa<Constant>(V) && !isa<GlobalValue>(V)) || isa<InlineAsm>(V))
  118. orderValue(V, OM);
  119. };
  120. for (const Function &F : M) {
  121. if (F.isDeclaration())
  122. continue;
  123. for (const BasicBlock &BB : F)
  124. for (const Instruction &I : BB)
  125. for (const Value *V : I.operands()) {
  126. if (const auto *MAV = dyn_cast<MetadataAsValue>(V)) {
  127. if (const auto *VAM =
  128. dyn_cast<ValueAsMetadata>(MAV->getMetadata())) {
  129. orderConstantValue(VAM->getValue());
  130. } else if (const auto *AL =
  131. dyn_cast<DIArgList>(MAV->getMetadata())) {
  132. for (const auto *VAM : AL->getArgs())
  133. orderConstantValue(VAM->getValue());
  134. }
  135. }
  136. }
  137. }
  138. OM.LastGlobalConstantID = OM.size();
  139. // Initializers of GlobalValues are processed in
  140. // BitcodeReader::ResolveGlobalAndAliasInits(). Match the order there rather
  141. // than ValueEnumerator, and match the code in predictValueUseListOrderImpl()
  142. // by giving IDs in reverse order.
  143. //
  144. // Since GlobalValues never reference each other directly (just through
  145. // initializers), their relative IDs only matter for determining order of
  146. // uses in their initializers.
  147. for (const Function &F : M)
  148. orderValue(&F, OM);
  149. for (const GlobalAlias &A : M.aliases())
  150. orderValue(&A, OM);
  151. for (const GlobalIFunc &I : M.ifuncs())
  152. orderValue(&I, OM);
  153. for (const GlobalVariable &G : M.globals())
  154. orderValue(&G, OM);
  155. OM.LastGlobalValueID = OM.size();
  156. for (const Function &F : M) {
  157. if (F.isDeclaration())
  158. continue;
  159. // Here we need to match the union of ValueEnumerator::incorporateFunction()
  160. // and WriteFunction(). Basic blocks are implicitly declared before
  161. // anything else (by declaring their size).
  162. for (const BasicBlock &BB : F)
  163. orderValue(&BB, OM);
  164. for (const Argument &A : F.args())
  165. orderValue(&A, OM);
  166. for (const BasicBlock &BB : F)
  167. for (const Instruction &I : BB) {
  168. for (const Value *Op : I.operands())
  169. if ((isa<Constant>(*Op) && !isa<GlobalValue>(*Op)) ||
  170. isa<InlineAsm>(*Op))
  171. orderValue(Op, OM);
  172. if (auto *SVI = dyn_cast<ShuffleVectorInst>(&I))
  173. orderValue(SVI->getShuffleMaskForBitcode(), OM);
  174. }
  175. for (const BasicBlock &BB : F)
  176. for (const Instruction &I : BB)
  177. orderValue(&I, OM);
  178. }
  179. return OM;
  180. }
  181. static void predictValueUseListOrderImpl(const Value *V, const Function *F,
  182. unsigned ID, const OrderMap &OM,
  183. UseListOrderStack &Stack) {
  184. // Predict use-list order for this one.
  185. using Entry = std::pair<const Use *, unsigned>;
  186. SmallVector<Entry, 64> List;
  187. for (const Use &U : V->uses())
  188. // Check if this user will be serialized.
  189. if (OM.lookup(U.getUser()).first)
  190. List.push_back(std::make_pair(&U, List.size()));
  191. if (List.size() < 2)
  192. // We may have lost some users.
  193. return;
  194. bool IsGlobalValue = OM.isGlobalValue(ID);
  195. llvm::sort(List, [&](const Entry &L, const Entry &R) {
  196. const Use *LU = L.first;
  197. const Use *RU = R.first;
  198. if (LU == RU)
  199. return false;
  200. auto LID = OM.lookup(LU->getUser()).first;
  201. auto RID = OM.lookup(RU->getUser()).first;
  202. // Global values are processed in reverse order.
  203. //
  204. // Moreover, initializers of GlobalValues are set *after* all the globals
  205. // have been read (despite having earlier IDs). Rather than awkwardly
  206. // modeling this behaviour here, orderModule() has assigned IDs to
  207. // initializers of GlobalValues before GlobalValues themselves.
  208. if (OM.isGlobalValue(LID) && OM.isGlobalValue(RID)) {
  209. if (LID == RID)
  210. return LU->getOperandNo() > RU->getOperandNo();
  211. return LID < RID;
  212. }
  213. // If ID is 4, then expect: 7 6 5 1 2 3.
  214. if (LID < RID) {
  215. if (RID <= ID)
  216. if (!IsGlobalValue) // GlobalValue uses don't get reversed.
  217. return true;
  218. return false;
  219. }
  220. if (RID < LID) {
  221. if (LID <= ID)
  222. if (!IsGlobalValue) // GlobalValue uses don't get reversed.
  223. return false;
  224. return true;
  225. }
  226. // LID and RID are equal, so we have different operands of the same user.
  227. // Assume operands are added in order for all instructions.
  228. if (LID <= ID)
  229. if (!IsGlobalValue) // GlobalValue uses don't get reversed.
  230. return LU->getOperandNo() < RU->getOperandNo();
  231. return LU->getOperandNo() > RU->getOperandNo();
  232. });
  233. if (llvm::is_sorted(List, [](const Entry &L, const Entry &R) {
  234. return L.second < R.second;
  235. }))
  236. // Order is already correct.
  237. return;
  238. // Store the shuffle.
  239. Stack.emplace_back(V, F, List.size());
  240. assert(List.size() == Stack.back().Shuffle.size() && "Wrong size");
  241. for (size_t I = 0, E = List.size(); I != E; ++I)
  242. Stack.back().Shuffle[I] = List[I].second;
  243. }
  244. static void predictValueUseListOrder(const Value *V, const Function *F,
  245. OrderMap &OM, UseListOrderStack &Stack) {
  246. auto &IDPair = OM[V];
  247. assert(IDPair.first && "Unmapped value");
  248. if (IDPair.second)
  249. // Already predicted.
  250. return;
  251. // Do the actual prediction.
  252. IDPair.second = true;
  253. if (!V->use_empty() && std::next(V->use_begin()) != V->use_end())
  254. predictValueUseListOrderImpl(V, F, IDPair.first, OM, Stack);
  255. // Recursive descent into constants.
  256. if (const Constant *C = dyn_cast<Constant>(V)) {
  257. if (C->getNumOperands()) { // Visit GlobalValues.
  258. for (const Value *Op : C->operands())
  259. if (isa<Constant>(Op)) // Visit GlobalValues.
  260. predictValueUseListOrder(Op, F, OM, Stack);
  261. if (auto *CE = dyn_cast<ConstantExpr>(C))
  262. if (CE->getOpcode() == Instruction::ShuffleVector)
  263. predictValueUseListOrder(CE->getShuffleMaskForBitcode(), F, OM,
  264. Stack);
  265. }
  266. }
  267. }
  268. static UseListOrderStack predictUseListOrder(const Module &M) {
  269. OrderMap OM = orderModule(M);
  270. // Use-list orders need to be serialized after all the users have been added
  271. // to a value, or else the shuffles will be incomplete. Store them per
  272. // function in a stack.
  273. //
  274. // Aside from function order, the order of values doesn't matter much here.
  275. UseListOrderStack Stack;
  276. // We want to visit the functions backward now so we can list function-local
  277. // constants in the last Function they're used in. Module-level constants
  278. // have already been visited above.
  279. for (const Function &F : llvm::reverse(M)) {
  280. if (F.isDeclaration())
  281. continue;
  282. for (const BasicBlock &BB : F)
  283. predictValueUseListOrder(&BB, &F, OM, Stack);
  284. for (const Argument &A : F.args())
  285. predictValueUseListOrder(&A, &F, OM, Stack);
  286. for (const BasicBlock &BB : F)
  287. for (const Instruction &I : BB) {
  288. for (const Value *Op : I.operands())
  289. if (isa<Constant>(*Op) || isa<InlineAsm>(*Op)) // Visit GlobalValues.
  290. predictValueUseListOrder(Op, &F, OM, Stack);
  291. if (auto *SVI = dyn_cast<ShuffleVectorInst>(&I))
  292. predictValueUseListOrder(SVI->getShuffleMaskForBitcode(), &F, OM,
  293. Stack);
  294. }
  295. for (const BasicBlock &BB : F)
  296. for (const Instruction &I : BB)
  297. predictValueUseListOrder(&I, &F, OM, Stack);
  298. }
  299. // Visit globals last, since the module-level use-list block will be seen
  300. // before the function bodies are processed.
  301. for (const GlobalVariable &G : M.globals())
  302. predictValueUseListOrder(&G, nullptr, OM, Stack);
  303. for (const Function &F : M)
  304. predictValueUseListOrder(&F, nullptr, OM, Stack);
  305. for (const GlobalAlias &A : M.aliases())
  306. predictValueUseListOrder(&A, nullptr, OM, Stack);
  307. for (const GlobalIFunc &I : M.ifuncs())
  308. predictValueUseListOrder(&I, nullptr, OM, Stack);
  309. for (const GlobalVariable &G : M.globals())
  310. if (G.hasInitializer())
  311. predictValueUseListOrder(G.getInitializer(), nullptr, OM, Stack);
  312. for (const GlobalAlias &A : M.aliases())
  313. predictValueUseListOrder(A.getAliasee(), nullptr, OM, Stack);
  314. for (const GlobalIFunc &I : M.ifuncs())
  315. predictValueUseListOrder(I.getResolver(), nullptr, OM, Stack);
  316. for (const Function &F : M) {
  317. for (const Use &U : F.operands())
  318. predictValueUseListOrder(U.get(), nullptr, OM, Stack);
  319. }
  320. return Stack;
  321. }
  322. static bool isIntOrIntVectorValue(const std::pair<const Value*, unsigned> &V) {
  323. return V.first->getType()->isIntOrIntVectorTy();
  324. }
  325. ValueEnumerator::ValueEnumerator(const Module &M,
  326. bool ShouldPreserveUseListOrder)
  327. : ShouldPreserveUseListOrder(ShouldPreserveUseListOrder) {
  328. if (ShouldPreserveUseListOrder)
  329. UseListOrders = predictUseListOrder(M);
  330. // Enumerate the global variables.
  331. for (const GlobalVariable &GV : M.globals()) {
  332. EnumerateValue(&GV);
  333. EnumerateType(GV.getValueType());
  334. }
  335. // Enumerate the functions.
  336. for (const Function & F : M) {
  337. EnumerateValue(&F);
  338. EnumerateType(F.getValueType());
  339. EnumerateAttributes(F.getAttributes());
  340. }
  341. // Enumerate the aliases.
  342. for (const GlobalAlias &GA : M.aliases()) {
  343. EnumerateValue(&GA);
  344. EnumerateType(GA.getValueType());
  345. }
  346. // Enumerate the ifuncs.
  347. for (const GlobalIFunc &GIF : M.ifuncs()) {
  348. EnumerateValue(&GIF);
  349. EnumerateType(GIF.getValueType());
  350. }
  351. // Remember what is the cutoff between globalvalue's and other constants.
  352. unsigned FirstConstant = Values.size();
  353. // Enumerate the global variable initializers and attributes.
  354. for (const GlobalVariable &GV : M.globals()) {
  355. if (GV.hasInitializer())
  356. EnumerateValue(GV.getInitializer());
  357. if (GV.hasAttributes())
  358. EnumerateAttributes(GV.getAttributesAsList(AttributeList::FunctionIndex));
  359. }
  360. // Enumerate the aliasees.
  361. for (const GlobalAlias &GA : M.aliases())
  362. EnumerateValue(GA.getAliasee());
  363. // Enumerate the ifunc resolvers.
  364. for (const GlobalIFunc &GIF : M.ifuncs())
  365. EnumerateValue(GIF.getResolver());
  366. // Enumerate any optional Function data.
  367. for (const Function &F : M)
  368. for (const Use &U : F.operands())
  369. EnumerateValue(U.get());
  370. // Enumerate the metadata type.
  371. //
  372. // TODO: Move this to ValueEnumerator::EnumerateOperandType() once bitcode
  373. // only encodes the metadata type when it's used as a value.
  374. EnumerateType(Type::getMetadataTy(M.getContext()));
  375. // Insert constants and metadata that are named at module level into the slot
  376. // pool so that the module symbol table can refer to them...
  377. EnumerateValueSymbolTable(M.getValueSymbolTable());
  378. EnumerateNamedMetadata(M);
  379. SmallVector<std::pair<unsigned, MDNode *>, 8> MDs;
  380. for (const GlobalVariable &GV : M.globals()) {
  381. MDs.clear();
  382. GV.getAllMetadata(MDs);
  383. for (const auto &I : MDs)
  384. // FIXME: Pass GV to EnumerateMetadata and arrange for the bitcode writer
  385. // to write metadata to the global variable's own metadata block
  386. // (PR28134).
  387. EnumerateMetadata(nullptr, I.second);
  388. }
  389. // Enumerate types used by function bodies and argument lists.
  390. for (const Function &F : M) {
  391. for (const Argument &A : F.args())
  392. EnumerateType(A.getType());
  393. // Enumerate metadata attached to this function.
  394. MDs.clear();
  395. F.getAllMetadata(MDs);
  396. for (const auto &I : MDs)
  397. EnumerateMetadata(F.isDeclaration() ? nullptr : &F, I.second);
  398. for (const BasicBlock &BB : F)
  399. for (const Instruction &I : BB) {
  400. for (const Use &Op : I.operands()) {
  401. auto *MD = dyn_cast<MetadataAsValue>(&Op);
  402. if (!MD) {
  403. EnumerateOperandType(Op);
  404. continue;
  405. }
  406. // Local metadata is enumerated during function-incorporation, but
  407. // any ConstantAsMetadata arguments in a DIArgList should be examined
  408. // now.
  409. if (isa<LocalAsMetadata>(MD->getMetadata()))
  410. continue;
  411. if (auto *AL = dyn_cast<DIArgList>(MD->getMetadata())) {
  412. for (auto *VAM : AL->getArgs())
  413. if (isa<ConstantAsMetadata>(VAM))
  414. EnumerateMetadata(&F, VAM);
  415. continue;
  416. }
  417. EnumerateMetadata(&F, MD->getMetadata());
  418. }
  419. if (auto *SVI = dyn_cast<ShuffleVectorInst>(&I))
  420. EnumerateType(SVI->getShuffleMaskForBitcode()->getType());
  421. if (auto *GEP = dyn_cast<GetElementPtrInst>(&I))
  422. EnumerateType(GEP->getSourceElementType());
  423. if (auto *AI = dyn_cast<AllocaInst>(&I))
  424. EnumerateType(AI->getAllocatedType());
  425. EnumerateType(I.getType());
  426. if (const auto *Call = dyn_cast<CallBase>(&I)) {
  427. EnumerateAttributes(Call->getAttributes());
  428. EnumerateType(Call->getFunctionType());
  429. }
  430. // Enumerate metadata attached with this instruction.
  431. MDs.clear();
  432. I.getAllMetadataOtherThanDebugLoc(MDs);
  433. for (unsigned i = 0, e = MDs.size(); i != e; ++i)
  434. EnumerateMetadata(&F, MDs[i].second);
  435. // Don't enumerate the location directly -- it has a special record
  436. // type -- but enumerate its operands.
  437. if (DILocation *L = I.getDebugLoc())
  438. for (const Metadata *Op : L->operands())
  439. EnumerateMetadata(&F, Op);
  440. }
  441. }
  442. // Optimize constant ordering.
  443. OptimizeConstants(FirstConstant, Values.size());
  444. // Organize metadata ordering.
  445. organizeMetadata();
  446. }
  447. unsigned ValueEnumerator::getInstructionID(const Instruction *Inst) const {
  448. InstructionMapType::const_iterator I = InstructionMap.find(Inst);
  449. assert(I != InstructionMap.end() && "Instruction is not mapped!");
  450. return I->second;
  451. }
  452. unsigned ValueEnumerator::getComdatID(const Comdat *C) const {
  453. unsigned ComdatID = Comdats.idFor(C);
  454. assert(ComdatID && "Comdat not found!");
  455. return ComdatID;
  456. }
  457. void ValueEnumerator::setInstructionID(const Instruction *I) {
  458. InstructionMap[I] = InstructionCount++;
  459. }
  460. unsigned ValueEnumerator::getValueID(const Value *V) const {
  461. if (auto *MD = dyn_cast<MetadataAsValue>(V))
  462. return getMetadataID(MD->getMetadata());
  463. ValueMapType::const_iterator I = ValueMap.find(V);
  464. assert(I != ValueMap.end() && "Value not in slotcalculator!");
  465. return I->second-1;
  466. }
  467. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  468. LLVM_DUMP_METHOD void ValueEnumerator::dump() const {
  469. print(dbgs(), ValueMap, "Default");
  470. dbgs() << '\n';
  471. print(dbgs(), MetadataMap, "MetaData");
  472. dbgs() << '\n';
  473. }
  474. #endif
  475. void ValueEnumerator::print(raw_ostream &OS, const ValueMapType &Map,
  476. const char *Name) const {
  477. OS << "Map Name: " << Name << "\n";
  478. OS << "Size: " << Map.size() << "\n";
  479. for (const auto &I : Map) {
  480. const Value *V = I.first;
  481. if (V->hasName())
  482. OS << "Value: " << V->getName();
  483. else
  484. OS << "Value: [null]\n";
  485. V->print(errs());
  486. errs() << '\n';
  487. OS << " Uses(" << V->getNumUses() << "):";
  488. for (const Use &U : V->uses()) {
  489. if (&U != &*V->use_begin())
  490. OS << ",";
  491. if(U->hasName())
  492. OS << " " << U->getName();
  493. else
  494. OS << " [null]";
  495. }
  496. OS << "\n\n";
  497. }
  498. }
  499. void ValueEnumerator::print(raw_ostream &OS, const MetadataMapType &Map,
  500. const char *Name) const {
  501. OS << "Map Name: " << Name << "\n";
  502. OS << "Size: " << Map.size() << "\n";
  503. for (const auto &I : Map) {
  504. const Metadata *MD = I.first;
  505. OS << "Metadata: slot = " << I.second.ID << "\n";
  506. OS << "Metadata: function = " << I.second.F << "\n";
  507. MD->print(OS);
  508. OS << "\n";
  509. }
  510. }
  511. /// OptimizeConstants - Reorder constant pool for denser encoding.
  512. void ValueEnumerator::OptimizeConstants(unsigned CstStart, unsigned CstEnd) {
  513. if (CstStart == CstEnd || CstStart+1 == CstEnd) return;
  514. if (ShouldPreserveUseListOrder)
  515. // Optimizing constants makes the use-list order difficult to predict.
  516. // Disable it for now when trying to preserve the order.
  517. return;
  518. std::stable_sort(Values.begin() + CstStart, Values.begin() + CstEnd,
  519. [this](const std::pair<const Value *, unsigned> &LHS,
  520. const std::pair<const Value *, unsigned> &RHS) {
  521. // Sort by plane.
  522. if (LHS.first->getType() != RHS.first->getType())
  523. return getTypeID(LHS.first->getType()) < getTypeID(RHS.first->getType());
  524. // Then by frequency.
  525. return LHS.second > RHS.second;
  526. });
  527. // Ensure that integer and vector of integer constants are at the start of the
  528. // constant pool. This is important so that GEP structure indices come before
  529. // gep constant exprs.
  530. std::stable_partition(Values.begin() + CstStart, Values.begin() + CstEnd,
  531. isIntOrIntVectorValue);
  532. // Rebuild the modified portion of ValueMap.
  533. for (; CstStart != CstEnd; ++CstStart)
  534. ValueMap[Values[CstStart].first] = CstStart+1;
  535. }
  536. /// EnumerateValueSymbolTable - Insert all of the values in the specified symbol
  537. /// table into the values table.
  538. void ValueEnumerator::EnumerateValueSymbolTable(const ValueSymbolTable &VST) {
  539. for (ValueSymbolTable::const_iterator VI = VST.begin(), VE = VST.end();
  540. VI != VE; ++VI)
  541. EnumerateValue(VI->getValue());
  542. }
  543. /// Insert all of the values referenced by named metadata in the specified
  544. /// module.
  545. void ValueEnumerator::EnumerateNamedMetadata(const Module &M) {
  546. for (const auto &I : M.named_metadata())
  547. EnumerateNamedMDNode(&I);
  548. }
  549. void ValueEnumerator::EnumerateNamedMDNode(const NamedMDNode *MD) {
  550. for (unsigned i = 0, e = MD->getNumOperands(); i != e; ++i)
  551. EnumerateMetadata(nullptr, MD->getOperand(i));
  552. }
  553. unsigned ValueEnumerator::getMetadataFunctionID(const Function *F) const {
  554. return F ? getValueID(F) + 1 : 0;
  555. }
  556. void ValueEnumerator::EnumerateMetadata(const Function *F, const Metadata *MD) {
  557. EnumerateMetadata(getMetadataFunctionID(F), MD);
  558. }
  559. void ValueEnumerator::EnumerateFunctionLocalMetadata(
  560. const Function &F, const LocalAsMetadata *Local) {
  561. EnumerateFunctionLocalMetadata(getMetadataFunctionID(&F), Local);
  562. }
  563. void ValueEnumerator::EnumerateFunctionLocalListMetadata(
  564. const Function &F, const DIArgList *ArgList) {
  565. EnumerateFunctionLocalListMetadata(getMetadataFunctionID(&F), ArgList);
  566. }
  567. void ValueEnumerator::dropFunctionFromMetadata(
  568. MetadataMapType::value_type &FirstMD) {
  569. SmallVector<const MDNode *, 64> Worklist;
  570. auto push = [&Worklist](MetadataMapType::value_type &MD) {
  571. auto &Entry = MD.second;
  572. // Nothing to do if this metadata isn't tagged.
  573. if (!Entry.F)
  574. return;
  575. // Drop the function tag.
  576. Entry.F = 0;
  577. // If this is has an ID and is an MDNode, then its operands have entries as
  578. // well. We need to drop the function from them too.
  579. if (Entry.ID)
  580. if (auto *N = dyn_cast<MDNode>(MD.first))
  581. Worklist.push_back(N);
  582. };
  583. push(FirstMD);
  584. while (!Worklist.empty())
  585. for (const Metadata *Op : Worklist.pop_back_val()->operands()) {
  586. if (!Op)
  587. continue;
  588. auto MD = MetadataMap.find(Op);
  589. if (MD != MetadataMap.end())
  590. push(*MD);
  591. }
  592. }
  593. void ValueEnumerator::EnumerateMetadata(unsigned F, const Metadata *MD) {
  594. // It's vital for reader efficiency that uniqued subgraphs are done in
  595. // post-order; it's expensive when their operands have forward references.
  596. // If a distinct node is referenced from a uniqued node, it'll be delayed
  597. // until the uniqued subgraph has been completely traversed.
  598. SmallVector<const MDNode *, 32> DelayedDistinctNodes;
  599. // Start by enumerating MD, and then work through its transitive operands in
  600. // post-order. This requires a depth-first search.
  601. SmallVector<std::pair<const MDNode *, MDNode::op_iterator>, 32> Worklist;
  602. if (const MDNode *N = enumerateMetadataImpl(F, MD))
  603. Worklist.push_back(std::make_pair(N, N->op_begin()));
  604. while (!Worklist.empty()) {
  605. const MDNode *N = Worklist.back().first;
  606. // Enumerate operands until we hit a new node. We need to traverse these
  607. // nodes' operands before visiting the rest of N's operands.
  608. MDNode::op_iterator I = std::find_if(
  609. Worklist.back().second, N->op_end(),
  610. [&](const Metadata *MD) { return enumerateMetadataImpl(F, MD); });
  611. if (I != N->op_end()) {
  612. auto *Op = cast<MDNode>(*I);
  613. Worklist.back().second = ++I;
  614. // Delay traversing Op if it's a distinct node and N is uniqued.
  615. if (Op->isDistinct() && !N->isDistinct())
  616. DelayedDistinctNodes.push_back(Op);
  617. else
  618. Worklist.push_back(std::make_pair(Op, Op->op_begin()));
  619. continue;
  620. }
  621. // All the operands have been visited. Now assign an ID.
  622. Worklist.pop_back();
  623. MDs.push_back(N);
  624. MetadataMap[N].ID = MDs.size();
  625. // Flush out any delayed distinct nodes; these are all the distinct nodes
  626. // that are leaves in last uniqued subgraph.
  627. if (Worklist.empty() || Worklist.back().first->isDistinct()) {
  628. for (const MDNode *N : DelayedDistinctNodes)
  629. Worklist.push_back(std::make_pair(N, N->op_begin()));
  630. DelayedDistinctNodes.clear();
  631. }
  632. }
  633. }
  634. const MDNode *ValueEnumerator::enumerateMetadataImpl(unsigned F, const Metadata *MD) {
  635. if (!MD)
  636. return nullptr;
  637. assert(
  638. (isa<MDNode>(MD) || isa<MDString>(MD) || isa<ConstantAsMetadata>(MD)) &&
  639. "Invalid metadata kind");
  640. auto Insertion = MetadataMap.insert(std::make_pair(MD, MDIndex(F)));
  641. MDIndex &Entry = Insertion.first->second;
  642. if (!Insertion.second) {
  643. // Already mapped. If F doesn't match the function tag, drop it.
  644. if (Entry.hasDifferentFunction(F))
  645. dropFunctionFromMetadata(*Insertion.first);
  646. return nullptr;
  647. }
  648. // Don't assign IDs to metadata nodes.
  649. if (auto *N = dyn_cast<MDNode>(MD))
  650. return N;
  651. // Save the metadata.
  652. MDs.push_back(MD);
  653. Entry.ID = MDs.size();
  654. // Enumerate the constant, if any.
  655. if (auto *C = dyn_cast<ConstantAsMetadata>(MD))
  656. EnumerateValue(C->getValue());
  657. return nullptr;
  658. }
  659. /// EnumerateFunctionLocalMetadata - Incorporate function-local metadata
  660. /// information reachable from the metadata.
  661. void ValueEnumerator::EnumerateFunctionLocalMetadata(
  662. unsigned F, const LocalAsMetadata *Local) {
  663. assert(F && "Expected a function");
  664. // Check to see if it's already in!
  665. MDIndex &Index = MetadataMap[Local];
  666. if (Index.ID) {
  667. assert(Index.F == F && "Expected the same function");
  668. return;
  669. }
  670. MDs.push_back(Local);
  671. Index.F = F;
  672. Index.ID = MDs.size();
  673. EnumerateValue(Local->getValue());
  674. }
  675. /// EnumerateFunctionLocalListMetadata - Incorporate function-local metadata
  676. /// information reachable from the metadata.
  677. void ValueEnumerator::EnumerateFunctionLocalListMetadata(
  678. unsigned F, const DIArgList *ArgList) {
  679. assert(F && "Expected a function");
  680. // Check to see if it's already in!
  681. MDIndex &Index = MetadataMap[ArgList];
  682. if (Index.ID) {
  683. assert(Index.F == F && "Expected the same function");
  684. return;
  685. }
  686. for (ValueAsMetadata *VAM : ArgList->getArgs()) {
  687. if (isa<LocalAsMetadata>(VAM)) {
  688. assert(MetadataMap.count(VAM) &&
  689. "LocalAsMetadata should be enumerated before DIArgList");
  690. assert(MetadataMap[VAM].F == F &&
  691. "Expected LocalAsMetadata in the same function");
  692. } else {
  693. assert(isa<ConstantAsMetadata>(VAM) &&
  694. "Expected LocalAsMetadata or ConstantAsMetadata");
  695. assert(ValueMap.count(VAM->getValue()) &&
  696. "Constant should be enumerated beforeDIArgList");
  697. EnumerateMetadata(F, VAM);
  698. }
  699. }
  700. MDs.push_back(ArgList);
  701. Index.F = F;
  702. Index.ID = MDs.size();
  703. }
  704. static unsigned getMetadataTypeOrder(const Metadata *MD) {
  705. // Strings are emitted in bulk and must come first.
  706. if (isa<MDString>(MD))
  707. return 0;
  708. // ConstantAsMetadata doesn't reference anything. We may as well shuffle it
  709. // to the front since we can detect it.
  710. auto *N = dyn_cast<MDNode>(MD);
  711. if (!N)
  712. return 1;
  713. // The reader is fast forward references for distinct node operands, but slow
  714. // when uniqued operands are unresolved.
  715. return N->isDistinct() ? 2 : 3;
  716. }
  717. void ValueEnumerator::organizeMetadata() {
  718. assert(MetadataMap.size() == MDs.size() &&
  719. "Metadata map and vector out of sync");
  720. if (MDs.empty())
  721. return;
  722. // Copy out the index information from MetadataMap in order to choose a new
  723. // order.
  724. SmallVector<MDIndex, 64> Order;
  725. Order.reserve(MetadataMap.size());
  726. for (const Metadata *MD : MDs)
  727. Order.push_back(MetadataMap.lookup(MD));
  728. // Partition:
  729. // - by function, then
  730. // - by isa<MDString>
  731. // and then sort by the original/current ID. Since the IDs are guaranteed to
  732. // be unique, the result of std::sort will be deterministic. There's no need
  733. // for std::stable_sort.
  734. llvm::sort(Order, [this](MDIndex LHS, MDIndex RHS) {
  735. return std::make_tuple(LHS.F, getMetadataTypeOrder(LHS.get(MDs)), LHS.ID) <
  736. std::make_tuple(RHS.F, getMetadataTypeOrder(RHS.get(MDs)), RHS.ID);
  737. });
  738. // Rebuild MDs, index the metadata ranges for each function in FunctionMDs,
  739. // and fix up MetadataMap.
  740. std::vector<const Metadata *> OldMDs;
  741. MDs.swap(OldMDs);
  742. MDs.reserve(OldMDs.size());
  743. for (unsigned I = 0, E = Order.size(); I != E && !Order[I].F; ++I) {
  744. auto *MD = Order[I].get(OldMDs);
  745. MDs.push_back(MD);
  746. MetadataMap[MD].ID = I + 1;
  747. if (isa<MDString>(MD))
  748. ++NumMDStrings;
  749. }
  750. // Return early if there's nothing for the functions.
  751. if (MDs.size() == Order.size())
  752. return;
  753. // Build the function metadata ranges.
  754. MDRange R;
  755. FunctionMDs.reserve(OldMDs.size());
  756. unsigned PrevF = 0;
  757. for (unsigned I = MDs.size(), E = Order.size(), ID = MDs.size(); I != E;
  758. ++I) {
  759. unsigned F = Order[I].F;
  760. if (!PrevF) {
  761. PrevF = F;
  762. } else if (PrevF != F) {
  763. R.Last = FunctionMDs.size();
  764. std::swap(R, FunctionMDInfo[PrevF]);
  765. R.First = FunctionMDs.size();
  766. ID = MDs.size();
  767. PrevF = F;
  768. }
  769. auto *MD = Order[I].get(OldMDs);
  770. FunctionMDs.push_back(MD);
  771. MetadataMap[MD].ID = ++ID;
  772. if (isa<MDString>(MD))
  773. ++R.NumStrings;
  774. }
  775. R.Last = FunctionMDs.size();
  776. FunctionMDInfo[PrevF] = R;
  777. }
  778. void ValueEnumerator::incorporateFunctionMetadata(const Function &F) {
  779. NumModuleMDs = MDs.size();
  780. auto R = FunctionMDInfo.lookup(getValueID(&F) + 1);
  781. NumMDStrings = R.NumStrings;
  782. MDs.insert(MDs.end(), FunctionMDs.begin() + R.First,
  783. FunctionMDs.begin() + R.Last);
  784. }
  785. void ValueEnumerator::EnumerateValue(const Value *V) {
  786. assert(!V->getType()->isVoidTy() && "Can't insert void values!");
  787. assert(!isa<MetadataAsValue>(V) && "EnumerateValue doesn't handle Metadata!");
  788. // Check to see if it's already in!
  789. unsigned &ValueID = ValueMap[V];
  790. if (ValueID) {
  791. // Increment use count.
  792. Values[ValueID-1].second++;
  793. return;
  794. }
  795. if (auto *GO = dyn_cast<GlobalObject>(V))
  796. if (const Comdat *C = GO->getComdat())
  797. Comdats.insert(C);
  798. // Enumerate the type of this value.
  799. EnumerateType(V->getType());
  800. if (const Constant *C = dyn_cast<Constant>(V)) {
  801. if (isa<GlobalValue>(C)) {
  802. // Initializers for globals are handled explicitly elsewhere.
  803. } else if (C->getNumOperands()) {
  804. // If a constant has operands, enumerate them. This makes sure that if a
  805. // constant has uses (for example an array of const ints), that they are
  806. // inserted also.
  807. // We prefer to enumerate them with values before we enumerate the user
  808. // itself. This makes it more likely that we can avoid forward references
  809. // in the reader. We know that there can be no cycles in the constants
  810. // graph that don't go through a global variable.
  811. for (User::const_op_iterator I = C->op_begin(), E = C->op_end();
  812. I != E; ++I)
  813. if (!isa<BasicBlock>(*I)) // Don't enumerate BB operand to BlockAddress.
  814. EnumerateValue(*I);
  815. if (auto *CE = dyn_cast<ConstantExpr>(C))
  816. if (CE->getOpcode() == Instruction::ShuffleVector)
  817. EnumerateValue(CE->getShuffleMaskForBitcode());
  818. // Finally, add the value. Doing this could make the ValueID reference be
  819. // dangling, don't reuse it.
  820. Values.push_back(std::make_pair(V, 1U));
  821. ValueMap[V] = Values.size();
  822. return;
  823. }
  824. }
  825. // Add the value.
  826. Values.push_back(std::make_pair(V, 1U));
  827. ValueID = Values.size();
  828. }
  829. void ValueEnumerator::EnumerateType(Type *Ty) {
  830. unsigned *TypeID = &TypeMap[Ty];
  831. // We've already seen this type.
  832. if (*TypeID)
  833. return;
  834. // If it is a non-anonymous struct, mark the type as being visited so that we
  835. // don't recursively visit it. This is safe because we allow forward
  836. // references of these in the bitcode reader.
  837. if (StructType *STy = dyn_cast<StructType>(Ty))
  838. if (!STy->isLiteral())
  839. *TypeID = ~0U;
  840. // Enumerate all of the subtypes before we enumerate this type. This ensures
  841. // that the type will be enumerated in an order that can be directly built.
  842. for (Type *SubTy : Ty->subtypes())
  843. EnumerateType(SubTy);
  844. // Refresh the TypeID pointer in case the table rehashed.
  845. TypeID = &TypeMap[Ty];
  846. // Check to see if we got the pointer another way. This can happen when
  847. // enumerating recursive types that hit the base case deeper than they start.
  848. //
  849. // If this is actually a struct that we are treating as forward ref'able,
  850. // then emit the definition now that all of its contents are available.
  851. if (*TypeID && *TypeID != ~0U)
  852. return;
  853. // Add this type now that its contents are all happily enumerated.
  854. Types.push_back(Ty);
  855. *TypeID = Types.size();
  856. }
  857. // Enumerate the types for the specified value. If the value is a constant,
  858. // walk through it, enumerating the types of the constant.
  859. void ValueEnumerator::EnumerateOperandType(const Value *V) {
  860. EnumerateType(V->getType());
  861. assert(!isa<MetadataAsValue>(V) && "Unexpected metadata operand");
  862. const Constant *C = dyn_cast<Constant>(V);
  863. if (!C)
  864. return;
  865. // If this constant is already enumerated, ignore it, we know its type must
  866. // be enumerated.
  867. if (ValueMap.count(C))
  868. return;
  869. // This constant may have operands, make sure to enumerate the types in
  870. // them.
  871. for (const Value *Op : C->operands()) {
  872. // Don't enumerate basic blocks here, this happens as operands to
  873. // blockaddress.
  874. if (isa<BasicBlock>(Op))
  875. continue;
  876. EnumerateOperandType(Op);
  877. }
  878. if (auto *CE = dyn_cast<ConstantExpr>(C)) {
  879. if (CE->getOpcode() == Instruction::ShuffleVector)
  880. EnumerateOperandType(CE->getShuffleMaskForBitcode());
  881. if (CE->getOpcode() == Instruction::GetElementPtr)
  882. EnumerateType(cast<GEPOperator>(CE)->getSourceElementType());
  883. }
  884. }
  885. void ValueEnumerator::EnumerateAttributes(AttributeList PAL) {
  886. if (PAL.isEmpty()) return; // null is always 0.
  887. // Do a lookup.
  888. unsigned &Entry = AttributeListMap[PAL];
  889. if (Entry == 0) {
  890. // Never saw this before, add it.
  891. AttributeLists.push_back(PAL);
  892. Entry = AttributeLists.size();
  893. }
  894. // Do lookups for all attribute groups.
  895. for (unsigned i : PAL.indexes()) {
  896. AttributeSet AS = PAL.getAttributes(i);
  897. if (!AS.hasAttributes())
  898. continue;
  899. IndexAndAttrSet Pair = {i, AS};
  900. unsigned &Entry = AttributeGroupMap[Pair];
  901. if (Entry == 0) {
  902. AttributeGroups.push_back(Pair);
  903. Entry = AttributeGroups.size();
  904. for (Attribute Attr : AS) {
  905. if (Attr.isTypeAttribute())
  906. EnumerateType(Attr.getValueAsType());
  907. }
  908. }
  909. }
  910. }
  911. void ValueEnumerator::incorporateFunction(const Function &F) {
  912. InstructionCount = 0;
  913. NumModuleValues = Values.size();
  914. // Add global metadata to the function block. This doesn't include
  915. // LocalAsMetadata.
  916. incorporateFunctionMetadata(F);
  917. // Adding function arguments to the value table.
  918. for (const auto &I : F.args()) {
  919. EnumerateValue(&I);
  920. if (I.hasAttribute(Attribute::ByVal))
  921. EnumerateType(I.getParamByValType());
  922. else if (I.hasAttribute(Attribute::StructRet))
  923. EnumerateType(I.getParamStructRetType());
  924. else if (I.hasAttribute(Attribute::ByRef))
  925. EnumerateType(I.getParamByRefType());
  926. }
  927. FirstFuncConstantID = Values.size();
  928. // Add all function-level constants to the value table.
  929. for (const BasicBlock &BB : F) {
  930. for (const Instruction &I : BB) {
  931. for (const Use &OI : I.operands()) {
  932. if ((isa<Constant>(OI) && !isa<GlobalValue>(OI)) || isa<InlineAsm>(OI))
  933. EnumerateValue(OI);
  934. }
  935. if (auto *SVI = dyn_cast<ShuffleVectorInst>(&I))
  936. EnumerateValue(SVI->getShuffleMaskForBitcode());
  937. }
  938. BasicBlocks.push_back(&BB);
  939. ValueMap[&BB] = BasicBlocks.size();
  940. }
  941. // Optimize the constant layout.
  942. OptimizeConstants(FirstFuncConstantID, Values.size());
  943. // Add the function's parameter attributes so they are available for use in
  944. // the function's instruction.
  945. EnumerateAttributes(F.getAttributes());
  946. FirstInstID = Values.size();
  947. SmallVector<LocalAsMetadata *, 8> FnLocalMDVector;
  948. SmallVector<DIArgList *, 8> ArgListMDVector;
  949. // Add all of the instructions.
  950. for (const BasicBlock &BB : F) {
  951. for (const Instruction &I : BB) {
  952. for (const Use &OI : I.operands()) {
  953. if (auto *MD = dyn_cast<MetadataAsValue>(&OI)) {
  954. if (auto *Local = dyn_cast<LocalAsMetadata>(MD->getMetadata())) {
  955. // Enumerate metadata after the instructions they might refer to.
  956. FnLocalMDVector.push_back(Local);
  957. } else if (auto *ArgList = dyn_cast<DIArgList>(MD->getMetadata())) {
  958. ArgListMDVector.push_back(ArgList);
  959. for (ValueAsMetadata *VMD : ArgList->getArgs()) {
  960. if (auto *Local = dyn_cast<LocalAsMetadata>(VMD)) {
  961. // Enumerate metadata after the instructions they might refer
  962. // to.
  963. FnLocalMDVector.push_back(Local);
  964. }
  965. }
  966. }
  967. }
  968. }
  969. if (!I.getType()->isVoidTy())
  970. EnumerateValue(&I);
  971. }
  972. }
  973. // Add all of the function-local metadata.
  974. for (unsigned i = 0, e = FnLocalMDVector.size(); i != e; ++i) {
  975. // At this point, every local values have been incorporated, we shouldn't
  976. // have a metadata operand that references a value that hasn't been seen.
  977. assert(ValueMap.count(FnLocalMDVector[i]->getValue()) &&
  978. "Missing value for metadata operand");
  979. EnumerateFunctionLocalMetadata(F, FnLocalMDVector[i]);
  980. }
  981. // DIArgList entries must come after function-local metadata, as it is not
  982. // possible to forward-reference them.
  983. for (const DIArgList *ArgList : ArgListMDVector)
  984. EnumerateFunctionLocalListMetadata(F, ArgList);
  985. }
  986. void ValueEnumerator::purgeFunction() {
  987. /// Remove purged values from the ValueMap.
  988. for (unsigned i = NumModuleValues, e = Values.size(); i != e; ++i)
  989. ValueMap.erase(Values[i].first);
  990. for (unsigned i = NumModuleMDs, e = MDs.size(); i != e; ++i)
  991. MetadataMap.erase(MDs[i]);
  992. for (const BasicBlock *BB : BasicBlocks)
  993. ValueMap.erase(BB);
  994. Values.resize(NumModuleValues);
  995. MDs.resize(NumModuleMDs);
  996. BasicBlocks.clear();
  997. NumMDStrings = 0;
  998. }
  999. static void IncorporateFunctionInfoGlobalBBIDs(const Function *F,
  1000. DenseMap<const BasicBlock*, unsigned> &IDMap) {
  1001. unsigned Counter = 0;
  1002. for (const BasicBlock &BB : *F)
  1003. IDMap[&BB] = ++Counter;
  1004. }
  1005. /// getGlobalBasicBlockID - This returns the function-specific ID for the
  1006. /// specified basic block. This is relatively expensive information, so it
  1007. /// should only be used by rare constructs such as address-of-label.
  1008. unsigned ValueEnumerator::getGlobalBasicBlockID(const BasicBlock *BB) const {
  1009. unsigned &Idx = GlobalBasicBlockIDs[BB];
  1010. if (Idx != 0)
  1011. return Idx-1;
  1012. IncorporateFunctionInfoGlobalBBIDs(BB->getParent(), GlobalBasicBlockIDs);
  1013. return getGlobalBasicBlockID(BB);
  1014. }
  1015. uint64_t ValueEnumerator::computeBitsRequiredForTypeIndicies() const {
  1016. return Log2_32_Ceil(getTypes().size() + 1);
  1017. }