DebugInfoMetadata.cpp 78 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052
  1. //===- DebugInfoMetadata.cpp - Implement debug info metadata --------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file implements the debug info Metadata classes.
  10. //
  11. //===----------------------------------------------------------------------===//
  12. #include "llvm/IR/DebugInfoMetadata.h"
  13. #include "LLVMContextImpl.h"
  14. #include "MetadataImpl.h"
  15. #include "llvm/ADT/SmallSet.h"
  16. #include "llvm/ADT/StringSwitch.h"
  17. #include "llvm/BinaryFormat/Dwarf.h"
  18. #include "llvm/IR/Function.h"
  19. #include "llvm/IR/IntrinsicInst.h"
  20. #include "llvm/IR/Type.h"
  21. #include "llvm/IR/Value.h"
  22. #include <numeric>
  23. #include <optional>
  24. using namespace llvm;
  25. namespace llvm {
  26. // Use FS-AFDO discriminator.
  27. cl::opt<bool> EnableFSDiscriminator(
  28. "enable-fs-discriminator", cl::Hidden,
  29. cl::desc("Enable adding flow sensitive discriminators"));
  30. } // namespace llvm
  31. const DIExpression::FragmentInfo DebugVariable::DefaultFragment = {
  32. std::numeric_limits<uint64_t>::max(), std::numeric_limits<uint64_t>::min()};
  33. DebugVariable::DebugVariable(const DbgVariableIntrinsic *DII)
  34. : Variable(DII->getVariable()),
  35. Fragment(DII->getExpression()->getFragmentInfo()),
  36. InlinedAt(DII->getDebugLoc().getInlinedAt()) {}
  37. DILocation::DILocation(LLVMContext &C, StorageType Storage, unsigned Line,
  38. unsigned Column, ArrayRef<Metadata *> MDs,
  39. bool ImplicitCode)
  40. : MDNode(C, DILocationKind, Storage, MDs) {
  41. assert((MDs.size() == 1 || MDs.size() == 2) &&
  42. "Expected a scope and optional inlined-at");
  43. // Set line and column.
  44. assert(Column < (1u << 16) && "Expected 16-bit column");
  45. SubclassData32 = Line;
  46. SubclassData16 = Column;
  47. setImplicitCode(ImplicitCode);
  48. }
  49. static void adjustColumn(unsigned &Column) {
  50. // Set to unknown on overflow. We only have 16 bits to play with here.
  51. if (Column >= (1u << 16))
  52. Column = 0;
  53. }
  54. DILocation *DILocation::getImpl(LLVMContext &Context, unsigned Line,
  55. unsigned Column, Metadata *Scope,
  56. Metadata *InlinedAt, bool ImplicitCode,
  57. StorageType Storage, bool ShouldCreate) {
  58. // Fixup column.
  59. adjustColumn(Column);
  60. if (Storage == Uniqued) {
  61. if (auto *N = getUniqued(Context.pImpl->DILocations,
  62. DILocationInfo::KeyTy(Line, Column, Scope,
  63. InlinedAt, ImplicitCode)))
  64. return N;
  65. if (!ShouldCreate)
  66. return nullptr;
  67. } else {
  68. assert(ShouldCreate && "Expected non-uniqued nodes to always be created");
  69. }
  70. SmallVector<Metadata *, 2> Ops;
  71. Ops.push_back(Scope);
  72. if (InlinedAt)
  73. Ops.push_back(InlinedAt);
  74. return storeImpl(new (Ops.size(), Storage) DILocation(
  75. Context, Storage, Line, Column, Ops, ImplicitCode),
  76. Storage, Context.pImpl->DILocations);
  77. }
  78. const DILocation *
  79. DILocation::getMergedLocations(ArrayRef<const DILocation *> Locs) {
  80. if (Locs.empty())
  81. return nullptr;
  82. if (Locs.size() == 1)
  83. return Locs[0];
  84. auto *Merged = Locs[0];
  85. for (const DILocation *L : llvm::drop_begin(Locs)) {
  86. Merged = getMergedLocation(Merged, L);
  87. if (Merged == nullptr)
  88. break;
  89. }
  90. return Merged;
  91. }
  92. const DILocation *DILocation::getMergedLocation(const DILocation *LocA,
  93. const DILocation *LocB) {
  94. if (!LocA || !LocB)
  95. return nullptr;
  96. if (LocA == LocB)
  97. return LocA;
  98. LLVMContext &C = LocA->getContext();
  99. SmallDenseMap<std::pair<DILocalScope *, DILocation *>,
  100. std::pair<unsigned, unsigned>, 4>
  101. Locations;
  102. DIScope *S = LocA->getScope();
  103. DILocation *L = LocA->getInlinedAt();
  104. unsigned Line = LocA->getLine();
  105. unsigned Col = LocA->getColumn();
  106. // Walk from the current source locaiton until the file scope;
  107. // then, do the same for the inlined-at locations.
  108. auto AdvanceToParentLoc = [&S, &L, &Line, &Col]() {
  109. S = S->getScope();
  110. if (!S && L) {
  111. Line = L->getLine();
  112. Col = L->getColumn();
  113. S = L->getScope();
  114. L = L->getInlinedAt();
  115. }
  116. };
  117. while (S) {
  118. if (auto *LS = dyn_cast<DILocalScope>(S))
  119. Locations.try_emplace(std::make_pair(LS, L), std::make_pair(Line, Col));
  120. AdvanceToParentLoc();
  121. }
  122. // Walk the source locations of LocB until a match with LocA is found.
  123. S = LocB->getScope();
  124. L = LocB->getInlinedAt();
  125. Line = LocB->getLine();
  126. Col = LocB->getColumn();
  127. while (S) {
  128. if (auto *LS = dyn_cast<DILocalScope>(S)) {
  129. auto MatchLoc = Locations.find(std::make_pair(LS, L));
  130. if (MatchLoc != Locations.end()) {
  131. // If the lines match, keep the line, but set the column to '0'
  132. // If the lines don't match, pick a "line 0" location but keep
  133. // the current scope and inlined-at.
  134. bool SameLine = Line == MatchLoc->second.first;
  135. bool SameCol = Col == MatchLoc->second.second;
  136. Line = SameLine ? Line : 0;
  137. Col = SameLine && SameCol ? Col : 0;
  138. break;
  139. }
  140. }
  141. AdvanceToParentLoc();
  142. }
  143. if (!S) {
  144. // If the two locations are irreconsilable, pick any scope,
  145. // and return a "line 0" location.
  146. Line = Col = 0;
  147. S = LocA->getScope();
  148. }
  149. return DILocation::get(C, Line, Col, S, L);
  150. }
  151. std::optional<unsigned>
  152. DILocation::encodeDiscriminator(unsigned BD, unsigned DF, unsigned CI) {
  153. std::array<unsigned, 3> Components = {BD, DF, CI};
  154. uint64_t RemainingWork = 0U;
  155. // We use RemainingWork to figure out if we have no remaining components to
  156. // encode. For example: if BD != 0 but DF == 0 && CI == 0, we don't need to
  157. // encode anything for the latter 2.
  158. // Since any of the input components is at most 32 bits, their sum will be
  159. // less than 34 bits, and thus RemainingWork won't overflow.
  160. RemainingWork =
  161. std::accumulate(Components.begin(), Components.end(), RemainingWork);
  162. int I = 0;
  163. unsigned Ret = 0;
  164. unsigned NextBitInsertionIndex = 0;
  165. while (RemainingWork > 0) {
  166. unsigned C = Components[I++];
  167. RemainingWork -= C;
  168. unsigned EC = encodeComponent(C);
  169. Ret |= (EC << NextBitInsertionIndex);
  170. NextBitInsertionIndex += encodingBits(C);
  171. }
  172. // Encoding may be unsuccessful because of overflow. We determine success by
  173. // checking equivalence of components before & after encoding. Alternatively,
  174. // we could determine Success during encoding, but the current alternative is
  175. // simpler.
  176. unsigned TBD, TDF, TCI = 0;
  177. decodeDiscriminator(Ret, TBD, TDF, TCI);
  178. if (TBD == BD && TDF == DF && TCI == CI)
  179. return Ret;
  180. return std::nullopt;
  181. }
  182. void DILocation::decodeDiscriminator(unsigned D, unsigned &BD, unsigned &DF,
  183. unsigned &CI) {
  184. BD = getUnsignedFromPrefixEncoding(D);
  185. DF = getUnsignedFromPrefixEncoding(getNextComponentInDiscriminator(D));
  186. CI = getUnsignedFromPrefixEncoding(
  187. getNextComponentInDiscriminator(getNextComponentInDiscriminator(D)));
  188. }
  189. dwarf::Tag DINode::getTag() const { return (dwarf::Tag)SubclassData16; }
  190. DINode::DIFlags DINode::getFlag(StringRef Flag) {
  191. return StringSwitch<DIFlags>(Flag)
  192. #define HANDLE_DI_FLAG(ID, NAME) .Case("DIFlag" #NAME, Flag##NAME)
  193. #include "llvm/IR/DebugInfoFlags.def"
  194. .Default(DINode::FlagZero);
  195. }
  196. StringRef DINode::getFlagString(DIFlags Flag) {
  197. switch (Flag) {
  198. #define HANDLE_DI_FLAG(ID, NAME) \
  199. case Flag##NAME: \
  200. return "DIFlag" #NAME;
  201. #include "llvm/IR/DebugInfoFlags.def"
  202. }
  203. return "";
  204. }
  205. DINode::DIFlags DINode::splitFlags(DIFlags Flags,
  206. SmallVectorImpl<DIFlags> &SplitFlags) {
  207. // Flags that are packed together need to be specially handled, so
  208. // that, for example, we emit "DIFlagPublic" and not
  209. // "DIFlagPrivate | DIFlagProtected".
  210. if (DIFlags A = Flags & FlagAccessibility) {
  211. if (A == FlagPrivate)
  212. SplitFlags.push_back(FlagPrivate);
  213. else if (A == FlagProtected)
  214. SplitFlags.push_back(FlagProtected);
  215. else
  216. SplitFlags.push_back(FlagPublic);
  217. Flags &= ~A;
  218. }
  219. if (DIFlags R = Flags & FlagPtrToMemberRep) {
  220. if (R == FlagSingleInheritance)
  221. SplitFlags.push_back(FlagSingleInheritance);
  222. else if (R == FlagMultipleInheritance)
  223. SplitFlags.push_back(FlagMultipleInheritance);
  224. else
  225. SplitFlags.push_back(FlagVirtualInheritance);
  226. Flags &= ~R;
  227. }
  228. if ((Flags & FlagIndirectVirtualBase) == FlagIndirectVirtualBase) {
  229. Flags &= ~FlagIndirectVirtualBase;
  230. SplitFlags.push_back(FlagIndirectVirtualBase);
  231. }
  232. #define HANDLE_DI_FLAG(ID, NAME) \
  233. if (DIFlags Bit = Flags & Flag##NAME) { \
  234. SplitFlags.push_back(Bit); \
  235. Flags &= ~Bit; \
  236. }
  237. #include "llvm/IR/DebugInfoFlags.def"
  238. return Flags;
  239. }
  240. DIScope *DIScope::getScope() const {
  241. if (auto *T = dyn_cast<DIType>(this))
  242. return T->getScope();
  243. if (auto *SP = dyn_cast<DISubprogram>(this))
  244. return SP->getScope();
  245. if (auto *LB = dyn_cast<DILexicalBlockBase>(this))
  246. return LB->getScope();
  247. if (auto *NS = dyn_cast<DINamespace>(this))
  248. return NS->getScope();
  249. if (auto *CB = dyn_cast<DICommonBlock>(this))
  250. return CB->getScope();
  251. if (auto *M = dyn_cast<DIModule>(this))
  252. return M->getScope();
  253. assert((isa<DIFile>(this) || isa<DICompileUnit>(this)) &&
  254. "Unhandled type of scope.");
  255. return nullptr;
  256. }
  257. StringRef DIScope::getName() const {
  258. if (auto *T = dyn_cast<DIType>(this))
  259. return T->getName();
  260. if (auto *SP = dyn_cast<DISubprogram>(this))
  261. return SP->getName();
  262. if (auto *NS = dyn_cast<DINamespace>(this))
  263. return NS->getName();
  264. if (auto *CB = dyn_cast<DICommonBlock>(this))
  265. return CB->getName();
  266. if (auto *M = dyn_cast<DIModule>(this))
  267. return M->getName();
  268. assert((isa<DILexicalBlockBase>(this) || isa<DIFile>(this) ||
  269. isa<DICompileUnit>(this)) &&
  270. "Unhandled type of scope.");
  271. return "";
  272. }
  273. #ifndef NDEBUG
  274. static bool isCanonical(const MDString *S) {
  275. return !S || !S->getString().empty();
  276. }
  277. #endif
  278. dwarf::Tag GenericDINode::getTag() const { return (dwarf::Tag)SubclassData16; }
  279. GenericDINode *GenericDINode::getImpl(LLVMContext &Context, unsigned Tag,
  280. MDString *Header,
  281. ArrayRef<Metadata *> DwarfOps,
  282. StorageType Storage, bool ShouldCreate) {
  283. unsigned Hash = 0;
  284. if (Storage == Uniqued) {
  285. GenericDINodeInfo::KeyTy Key(Tag, Header, DwarfOps);
  286. if (auto *N = getUniqued(Context.pImpl->GenericDINodes, Key))
  287. return N;
  288. if (!ShouldCreate)
  289. return nullptr;
  290. Hash = Key.getHash();
  291. } else {
  292. assert(ShouldCreate && "Expected non-uniqued nodes to always be created");
  293. }
  294. // Use a nullptr for empty headers.
  295. assert(isCanonical(Header) && "Expected canonical MDString");
  296. Metadata *PreOps[] = {Header};
  297. return storeImpl(new (DwarfOps.size() + 1, Storage) GenericDINode(
  298. Context, Storage, Hash, Tag, PreOps, DwarfOps),
  299. Storage, Context.pImpl->GenericDINodes);
  300. }
  301. void GenericDINode::recalculateHash() {
  302. setHash(GenericDINodeInfo::KeyTy::calculateHash(this));
  303. }
  304. #define UNWRAP_ARGS_IMPL(...) __VA_ARGS__
  305. #define UNWRAP_ARGS(ARGS) UNWRAP_ARGS_IMPL ARGS
  306. #define DEFINE_GETIMPL_LOOKUP(CLASS, ARGS) \
  307. do { \
  308. if (Storage == Uniqued) { \
  309. if (auto *N = getUniqued(Context.pImpl->CLASS##s, \
  310. CLASS##Info::KeyTy(UNWRAP_ARGS(ARGS)))) \
  311. return N; \
  312. if (!ShouldCreate) \
  313. return nullptr; \
  314. } else { \
  315. assert(ShouldCreate && \
  316. "Expected non-uniqued nodes to always be created"); \
  317. } \
  318. } while (false)
  319. #define DEFINE_GETIMPL_STORE(CLASS, ARGS, OPS) \
  320. return storeImpl(new (std::size(OPS), Storage) \
  321. CLASS(Context, Storage, UNWRAP_ARGS(ARGS), OPS), \
  322. Storage, Context.pImpl->CLASS##s)
  323. #define DEFINE_GETIMPL_STORE_NO_OPS(CLASS, ARGS) \
  324. return storeImpl(new (0u, Storage) \
  325. CLASS(Context, Storage, UNWRAP_ARGS(ARGS)), \
  326. Storage, Context.pImpl->CLASS##s)
  327. #define DEFINE_GETIMPL_STORE_NO_CONSTRUCTOR_ARGS(CLASS, OPS) \
  328. return storeImpl(new (std::size(OPS), Storage) CLASS(Context, Storage, OPS), \
  329. Storage, Context.pImpl->CLASS##s)
  330. #define DEFINE_GETIMPL_STORE_N(CLASS, ARGS, OPS, NUM_OPS) \
  331. return storeImpl(new (NUM_OPS, Storage) \
  332. CLASS(Context, Storage, UNWRAP_ARGS(ARGS), OPS), \
  333. Storage, Context.pImpl->CLASS##s)
  334. DISubrange::DISubrange(LLVMContext &C, StorageType Storage,
  335. ArrayRef<Metadata *> Ops)
  336. : DINode(C, DISubrangeKind, Storage, dwarf::DW_TAG_subrange_type, Ops) {}
  337. DISubrange *DISubrange::getImpl(LLVMContext &Context, int64_t Count, int64_t Lo,
  338. StorageType Storage, bool ShouldCreate) {
  339. auto *CountNode = ConstantAsMetadata::get(
  340. ConstantInt::getSigned(Type::getInt64Ty(Context), Count));
  341. auto *LB = ConstantAsMetadata::get(
  342. ConstantInt::getSigned(Type::getInt64Ty(Context), Lo));
  343. return getImpl(Context, CountNode, LB, nullptr, nullptr, Storage,
  344. ShouldCreate);
  345. }
  346. DISubrange *DISubrange::getImpl(LLVMContext &Context, Metadata *CountNode,
  347. int64_t Lo, StorageType Storage,
  348. bool ShouldCreate) {
  349. auto *LB = ConstantAsMetadata::get(
  350. ConstantInt::getSigned(Type::getInt64Ty(Context), Lo));
  351. return getImpl(Context, CountNode, LB, nullptr, nullptr, Storage,
  352. ShouldCreate);
  353. }
  354. DISubrange *DISubrange::getImpl(LLVMContext &Context, Metadata *CountNode,
  355. Metadata *LB, Metadata *UB, Metadata *Stride,
  356. StorageType Storage, bool ShouldCreate) {
  357. DEFINE_GETIMPL_LOOKUP(DISubrange, (CountNode, LB, UB, Stride));
  358. Metadata *Ops[] = {CountNode, LB, UB, Stride};
  359. DEFINE_GETIMPL_STORE_NO_CONSTRUCTOR_ARGS(DISubrange, Ops);
  360. }
  361. DISubrange::BoundType DISubrange::getCount() const {
  362. Metadata *CB = getRawCountNode();
  363. if (!CB)
  364. return BoundType();
  365. assert((isa<ConstantAsMetadata>(CB) || isa<DIVariable>(CB) ||
  366. isa<DIExpression>(CB)) &&
  367. "Count must be signed constant or DIVariable or DIExpression");
  368. if (auto *MD = dyn_cast<ConstantAsMetadata>(CB))
  369. return BoundType(cast<ConstantInt>(MD->getValue()));
  370. if (auto *MD = dyn_cast<DIVariable>(CB))
  371. return BoundType(MD);
  372. if (auto *MD = dyn_cast<DIExpression>(CB))
  373. return BoundType(MD);
  374. return BoundType();
  375. }
  376. DISubrange::BoundType DISubrange::getLowerBound() const {
  377. Metadata *LB = getRawLowerBound();
  378. if (!LB)
  379. return BoundType();
  380. assert((isa<ConstantAsMetadata>(LB) || isa<DIVariable>(LB) ||
  381. isa<DIExpression>(LB)) &&
  382. "LowerBound must be signed constant or DIVariable or DIExpression");
  383. if (auto *MD = dyn_cast<ConstantAsMetadata>(LB))
  384. return BoundType(cast<ConstantInt>(MD->getValue()));
  385. if (auto *MD = dyn_cast<DIVariable>(LB))
  386. return BoundType(MD);
  387. if (auto *MD = dyn_cast<DIExpression>(LB))
  388. return BoundType(MD);
  389. return BoundType();
  390. }
  391. DISubrange::BoundType DISubrange::getUpperBound() const {
  392. Metadata *UB = getRawUpperBound();
  393. if (!UB)
  394. return BoundType();
  395. assert((isa<ConstantAsMetadata>(UB) || isa<DIVariable>(UB) ||
  396. isa<DIExpression>(UB)) &&
  397. "UpperBound must be signed constant or DIVariable or DIExpression");
  398. if (auto *MD = dyn_cast<ConstantAsMetadata>(UB))
  399. return BoundType(cast<ConstantInt>(MD->getValue()));
  400. if (auto *MD = dyn_cast<DIVariable>(UB))
  401. return BoundType(MD);
  402. if (auto *MD = dyn_cast<DIExpression>(UB))
  403. return BoundType(MD);
  404. return BoundType();
  405. }
  406. DISubrange::BoundType DISubrange::getStride() const {
  407. Metadata *ST = getRawStride();
  408. if (!ST)
  409. return BoundType();
  410. assert((isa<ConstantAsMetadata>(ST) || isa<DIVariable>(ST) ||
  411. isa<DIExpression>(ST)) &&
  412. "Stride must be signed constant or DIVariable or DIExpression");
  413. if (auto *MD = dyn_cast<ConstantAsMetadata>(ST))
  414. return BoundType(cast<ConstantInt>(MD->getValue()));
  415. if (auto *MD = dyn_cast<DIVariable>(ST))
  416. return BoundType(MD);
  417. if (auto *MD = dyn_cast<DIExpression>(ST))
  418. return BoundType(MD);
  419. return BoundType();
  420. }
  421. DIGenericSubrange::DIGenericSubrange(LLVMContext &C, StorageType Storage,
  422. ArrayRef<Metadata *> Ops)
  423. : DINode(C, DIGenericSubrangeKind, Storage, dwarf::DW_TAG_generic_subrange,
  424. Ops) {}
  425. DIGenericSubrange *DIGenericSubrange::getImpl(LLVMContext &Context,
  426. Metadata *CountNode, Metadata *LB,
  427. Metadata *UB, Metadata *Stride,
  428. StorageType Storage,
  429. bool ShouldCreate) {
  430. DEFINE_GETIMPL_LOOKUP(DIGenericSubrange, (CountNode, LB, UB, Stride));
  431. Metadata *Ops[] = {CountNode, LB, UB, Stride};
  432. DEFINE_GETIMPL_STORE_NO_CONSTRUCTOR_ARGS(DIGenericSubrange, Ops);
  433. }
  434. DIGenericSubrange::BoundType DIGenericSubrange::getCount() const {
  435. Metadata *CB = getRawCountNode();
  436. if (!CB)
  437. return BoundType();
  438. assert((isa<DIVariable>(CB) || isa<DIExpression>(CB)) &&
  439. "Count must be signed constant or DIVariable or DIExpression");
  440. if (auto *MD = dyn_cast<DIVariable>(CB))
  441. return BoundType(MD);
  442. if (auto *MD = dyn_cast<DIExpression>(CB))
  443. return BoundType(MD);
  444. return BoundType();
  445. }
  446. DIGenericSubrange::BoundType DIGenericSubrange::getLowerBound() const {
  447. Metadata *LB = getRawLowerBound();
  448. if (!LB)
  449. return BoundType();
  450. assert((isa<DIVariable>(LB) || isa<DIExpression>(LB)) &&
  451. "LowerBound must be signed constant or DIVariable or DIExpression");
  452. if (auto *MD = dyn_cast<DIVariable>(LB))
  453. return BoundType(MD);
  454. if (auto *MD = dyn_cast<DIExpression>(LB))
  455. return BoundType(MD);
  456. return BoundType();
  457. }
  458. DIGenericSubrange::BoundType DIGenericSubrange::getUpperBound() const {
  459. Metadata *UB = getRawUpperBound();
  460. if (!UB)
  461. return BoundType();
  462. assert((isa<DIVariable>(UB) || isa<DIExpression>(UB)) &&
  463. "UpperBound must be signed constant or DIVariable or DIExpression");
  464. if (auto *MD = dyn_cast<DIVariable>(UB))
  465. return BoundType(MD);
  466. if (auto *MD = dyn_cast<DIExpression>(UB))
  467. return BoundType(MD);
  468. return BoundType();
  469. }
  470. DIGenericSubrange::BoundType DIGenericSubrange::getStride() const {
  471. Metadata *ST = getRawStride();
  472. if (!ST)
  473. return BoundType();
  474. assert((isa<DIVariable>(ST) || isa<DIExpression>(ST)) &&
  475. "Stride must be signed constant or DIVariable or DIExpression");
  476. if (auto *MD = dyn_cast<DIVariable>(ST))
  477. return BoundType(MD);
  478. if (auto *MD = dyn_cast<DIExpression>(ST))
  479. return BoundType(MD);
  480. return BoundType();
  481. }
  482. DIEnumerator::DIEnumerator(LLVMContext &C, StorageType Storage,
  483. const APInt &Value, bool IsUnsigned,
  484. ArrayRef<Metadata *> Ops)
  485. : DINode(C, DIEnumeratorKind, Storage, dwarf::DW_TAG_enumerator, Ops),
  486. Value(Value) {
  487. SubclassData32 = IsUnsigned;
  488. }
  489. DIEnumerator *DIEnumerator::getImpl(LLVMContext &Context, const APInt &Value,
  490. bool IsUnsigned, MDString *Name,
  491. StorageType Storage, bool ShouldCreate) {
  492. assert(isCanonical(Name) && "Expected canonical MDString");
  493. DEFINE_GETIMPL_LOOKUP(DIEnumerator, (Value, IsUnsigned, Name));
  494. Metadata *Ops[] = {Name};
  495. DEFINE_GETIMPL_STORE(DIEnumerator, (Value, IsUnsigned), Ops);
  496. }
  497. DIBasicType *DIBasicType::getImpl(LLVMContext &Context, unsigned Tag,
  498. MDString *Name, uint64_t SizeInBits,
  499. uint32_t AlignInBits, unsigned Encoding,
  500. DIFlags Flags, StorageType Storage,
  501. bool ShouldCreate) {
  502. assert(isCanonical(Name) && "Expected canonical MDString");
  503. DEFINE_GETIMPL_LOOKUP(DIBasicType,
  504. (Tag, Name, SizeInBits, AlignInBits, Encoding, Flags));
  505. Metadata *Ops[] = {nullptr, nullptr, Name};
  506. DEFINE_GETIMPL_STORE(DIBasicType,
  507. (Tag, SizeInBits, AlignInBits, Encoding, Flags), Ops);
  508. }
  509. std::optional<DIBasicType::Signedness> DIBasicType::getSignedness() const {
  510. switch (getEncoding()) {
  511. case dwarf::DW_ATE_signed:
  512. case dwarf::DW_ATE_signed_char:
  513. return Signedness::Signed;
  514. case dwarf::DW_ATE_unsigned:
  515. case dwarf::DW_ATE_unsigned_char:
  516. return Signedness::Unsigned;
  517. default:
  518. return std::nullopt;
  519. }
  520. }
  521. DIStringType *DIStringType::getImpl(LLVMContext &Context, unsigned Tag,
  522. MDString *Name, Metadata *StringLength,
  523. Metadata *StringLengthExp,
  524. Metadata *StringLocationExp,
  525. uint64_t SizeInBits, uint32_t AlignInBits,
  526. unsigned Encoding, StorageType Storage,
  527. bool ShouldCreate) {
  528. assert(isCanonical(Name) && "Expected canonical MDString");
  529. DEFINE_GETIMPL_LOOKUP(DIStringType,
  530. (Tag, Name, StringLength, StringLengthExp,
  531. StringLocationExp, SizeInBits, AlignInBits, Encoding));
  532. Metadata *Ops[] = {nullptr, nullptr, Name,
  533. StringLength, StringLengthExp, StringLocationExp};
  534. DEFINE_GETIMPL_STORE(DIStringType, (Tag, SizeInBits, AlignInBits, Encoding),
  535. Ops);
  536. }
  537. DIType *DIDerivedType::getClassType() const {
  538. assert(getTag() == dwarf::DW_TAG_ptr_to_member_type);
  539. return cast_or_null<DIType>(getExtraData());
  540. }
  541. uint32_t DIDerivedType::getVBPtrOffset() const {
  542. assert(getTag() == dwarf::DW_TAG_inheritance);
  543. if (auto *CM = cast_or_null<ConstantAsMetadata>(getExtraData()))
  544. if (auto *CI = dyn_cast_or_null<ConstantInt>(CM->getValue()))
  545. return static_cast<uint32_t>(CI->getZExtValue());
  546. return 0;
  547. }
  548. Constant *DIDerivedType::getStorageOffsetInBits() const {
  549. assert(getTag() == dwarf::DW_TAG_member && isBitField());
  550. if (auto *C = cast_or_null<ConstantAsMetadata>(getExtraData()))
  551. return C->getValue();
  552. return nullptr;
  553. }
  554. Constant *DIDerivedType::getConstant() const {
  555. assert(getTag() == dwarf::DW_TAG_member && isStaticMember());
  556. if (auto *C = cast_or_null<ConstantAsMetadata>(getExtraData()))
  557. return C->getValue();
  558. return nullptr;
  559. }
  560. Constant *DIDerivedType::getDiscriminantValue() const {
  561. assert(getTag() == dwarf::DW_TAG_member && !isStaticMember());
  562. if (auto *C = cast_or_null<ConstantAsMetadata>(getExtraData()))
  563. return C->getValue();
  564. return nullptr;
  565. }
  566. DIDerivedType *
  567. DIDerivedType::getImpl(LLVMContext &Context, unsigned Tag, MDString *Name,
  568. Metadata *File, unsigned Line, Metadata *Scope,
  569. Metadata *BaseType, uint64_t SizeInBits,
  570. uint32_t AlignInBits, uint64_t OffsetInBits,
  571. std::optional<unsigned> DWARFAddressSpace, DIFlags Flags,
  572. Metadata *ExtraData, Metadata *Annotations,
  573. StorageType Storage, bool ShouldCreate) {
  574. assert(isCanonical(Name) && "Expected canonical MDString");
  575. DEFINE_GETIMPL_LOOKUP(DIDerivedType,
  576. (Tag, Name, File, Line, Scope, BaseType, SizeInBits,
  577. AlignInBits, OffsetInBits, DWARFAddressSpace, Flags,
  578. ExtraData, Annotations));
  579. Metadata *Ops[] = {File, Scope, Name, BaseType, ExtraData, Annotations};
  580. DEFINE_GETIMPL_STORE(DIDerivedType,
  581. (Tag, Line, SizeInBits, AlignInBits, OffsetInBits,
  582. DWARFAddressSpace, Flags),
  583. Ops);
  584. }
  585. DICompositeType *DICompositeType::getImpl(
  586. LLVMContext &Context, unsigned Tag, MDString *Name, Metadata *File,
  587. unsigned Line, Metadata *Scope, Metadata *BaseType, uint64_t SizeInBits,
  588. uint32_t AlignInBits, uint64_t OffsetInBits, DIFlags Flags,
  589. Metadata *Elements, unsigned RuntimeLang, Metadata *VTableHolder,
  590. Metadata *TemplateParams, MDString *Identifier, Metadata *Discriminator,
  591. Metadata *DataLocation, Metadata *Associated, Metadata *Allocated,
  592. Metadata *Rank, Metadata *Annotations, StorageType Storage,
  593. bool ShouldCreate) {
  594. assert(isCanonical(Name) && "Expected canonical MDString");
  595. // Keep this in sync with buildODRType.
  596. DEFINE_GETIMPL_LOOKUP(DICompositeType,
  597. (Tag, Name, File, Line, Scope, BaseType, SizeInBits,
  598. AlignInBits, OffsetInBits, Flags, Elements,
  599. RuntimeLang, VTableHolder, TemplateParams, Identifier,
  600. Discriminator, DataLocation, Associated, Allocated,
  601. Rank, Annotations));
  602. Metadata *Ops[] = {File, Scope, Name, BaseType,
  603. Elements, VTableHolder, TemplateParams, Identifier,
  604. Discriminator, DataLocation, Associated, Allocated,
  605. Rank, Annotations};
  606. DEFINE_GETIMPL_STORE(
  607. DICompositeType,
  608. (Tag, Line, RuntimeLang, SizeInBits, AlignInBits, OffsetInBits, Flags),
  609. Ops);
  610. }
  611. DICompositeType *DICompositeType::buildODRType(
  612. LLVMContext &Context, MDString &Identifier, unsigned Tag, MDString *Name,
  613. Metadata *File, unsigned Line, Metadata *Scope, Metadata *BaseType,
  614. uint64_t SizeInBits, uint32_t AlignInBits, uint64_t OffsetInBits,
  615. DIFlags Flags, Metadata *Elements, unsigned RuntimeLang,
  616. Metadata *VTableHolder, Metadata *TemplateParams, Metadata *Discriminator,
  617. Metadata *DataLocation, Metadata *Associated, Metadata *Allocated,
  618. Metadata *Rank, Metadata *Annotations) {
  619. assert(!Identifier.getString().empty() && "Expected valid identifier");
  620. if (!Context.isODRUniquingDebugTypes())
  621. return nullptr;
  622. auto *&CT = (*Context.pImpl->DITypeMap)[&Identifier];
  623. if (!CT)
  624. return CT = DICompositeType::getDistinct(
  625. Context, Tag, Name, File, Line, Scope, BaseType, SizeInBits,
  626. AlignInBits, OffsetInBits, Flags, Elements, RuntimeLang,
  627. VTableHolder, TemplateParams, &Identifier, Discriminator,
  628. DataLocation, Associated, Allocated, Rank, Annotations);
  629. if (CT->getTag() != Tag)
  630. return nullptr;
  631. // Only mutate CT if it's a forward declaration and the new operands aren't.
  632. assert(CT->getRawIdentifier() == &Identifier && "Wrong ODR identifier?");
  633. if (!CT->isForwardDecl() || (Flags & DINode::FlagFwdDecl))
  634. return CT;
  635. // Mutate CT in place. Keep this in sync with getImpl.
  636. CT->mutate(Tag, Line, RuntimeLang, SizeInBits, AlignInBits, OffsetInBits,
  637. Flags);
  638. Metadata *Ops[] = {File, Scope, Name, BaseType,
  639. Elements, VTableHolder, TemplateParams, &Identifier,
  640. Discriminator, DataLocation, Associated, Allocated,
  641. Rank, Annotations};
  642. assert((std::end(Ops) - std::begin(Ops)) == (int)CT->getNumOperands() &&
  643. "Mismatched number of operands");
  644. for (unsigned I = 0, E = CT->getNumOperands(); I != E; ++I)
  645. if (Ops[I] != CT->getOperand(I))
  646. CT->setOperand(I, Ops[I]);
  647. return CT;
  648. }
  649. DICompositeType *DICompositeType::getODRType(
  650. LLVMContext &Context, MDString &Identifier, unsigned Tag, MDString *Name,
  651. Metadata *File, unsigned Line, Metadata *Scope, Metadata *BaseType,
  652. uint64_t SizeInBits, uint32_t AlignInBits, uint64_t OffsetInBits,
  653. DIFlags Flags, Metadata *Elements, unsigned RuntimeLang,
  654. Metadata *VTableHolder, Metadata *TemplateParams, Metadata *Discriminator,
  655. Metadata *DataLocation, Metadata *Associated, Metadata *Allocated,
  656. Metadata *Rank, Metadata *Annotations) {
  657. assert(!Identifier.getString().empty() && "Expected valid identifier");
  658. if (!Context.isODRUniquingDebugTypes())
  659. return nullptr;
  660. auto *&CT = (*Context.pImpl->DITypeMap)[&Identifier];
  661. if (!CT) {
  662. CT = DICompositeType::getDistinct(
  663. Context, Tag, Name, File, Line, Scope, BaseType, SizeInBits,
  664. AlignInBits, OffsetInBits, Flags, Elements, RuntimeLang, VTableHolder,
  665. TemplateParams, &Identifier, Discriminator, DataLocation, Associated,
  666. Allocated, Rank, Annotations);
  667. } else {
  668. if (CT->getTag() != Tag)
  669. return nullptr;
  670. }
  671. return CT;
  672. }
  673. DICompositeType *DICompositeType::getODRTypeIfExists(LLVMContext &Context,
  674. MDString &Identifier) {
  675. assert(!Identifier.getString().empty() && "Expected valid identifier");
  676. if (!Context.isODRUniquingDebugTypes())
  677. return nullptr;
  678. return Context.pImpl->DITypeMap->lookup(&Identifier);
  679. }
  680. DISubroutineType::DISubroutineType(LLVMContext &C, StorageType Storage,
  681. DIFlags Flags, uint8_t CC,
  682. ArrayRef<Metadata *> Ops)
  683. : DIType(C, DISubroutineTypeKind, Storage, dwarf::DW_TAG_subroutine_type, 0,
  684. 0, 0, 0, Flags, Ops),
  685. CC(CC) {}
  686. DISubroutineType *DISubroutineType::getImpl(LLVMContext &Context, DIFlags Flags,
  687. uint8_t CC, Metadata *TypeArray,
  688. StorageType Storage,
  689. bool ShouldCreate) {
  690. DEFINE_GETIMPL_LOOKUP(DISubroutineType, (Flags, CC, TypeArray));
  691. Metadata *Ops[] = {nullptr, nullptr, nullptr, TypeArray};
  692. DEFINE_GETIMPL_STORE(DISubroutineType, (Flags, CC), Ops);
  693. }
  694. DIFile::DIFile(LLVMContext &C, StorageType Storage,
  695. std::optional<ChecksumInfo<MDString *>> CS, MDString *Src,
  696. ArrayRef<Metadata *> Ops)
  697. : DIScope(C, DIFileKind, Storage, dwarf::DW_TAG_file_type, Ops),
  698. Checksum(CS), Source(Src) {}
  699. // FIXME: Implement this string-enum correspondence with a .def file and macros,
  700. // so that the association is explicit rather than implied.
  701. static const char *ChecksumKindName[DIFile::CSK_Last] = {
  702. "CSK_MD5",
  703. "CSK_SHA1",
  704. "CSK_SHA256",
  705. };
  706. StringRef DIFile::getChecksumKindAsString(ChecksumKind CSKind) {
  707. assert(CSKind <= DIFile::CSK_Last && "Invalid checksum kind");
  708. // The first space was originally the CSK_None variant, which is now
  709. // obsolete, but the space is still reserved in ChecksumKind, so we account
  710. // for it here.
  711. return ChecksumKindName[CSKind - 1];
  712. }
  713. std::optional<DIFile::ChecksumKind>
  714. DIFile::getChecksumKind(StringRef CSKindStr) {
  715. return StringSwitch<std::optional<DIFile::ChecksumKind>>(CSKindStr)
  716. .Case("CSK_MD5", DIFile::CSK_MD5)
  717. .Case("CSK_SHA1", DIFile::CSK_SHA1)
  718. .Case("CSK_SHA256", DIFile::CSK_SHA256)
  719. .Default(std::nullopt);
  720. }
  721. DIFile *DIFile::getImpl(LLVMContext &Context, MDString *Filename,
  722. MDString *Directory,
  723. std::optional<DIFile::ChecksumInfo<MDString *>> CS,
  724. MDString *Source, StorageType Storage,
  725. bool ShouldCreate) {
  726. assert(isCanonical(Filename) && "Expected canonical MDString");
  727. assert(isCanonical(Directory) && "Expected canonical MDString");
  728. assert((!CS || isCanonical(CS->Value)) && "Expected canonical MDString");
  729. // We do *NOT* expect Source to be a canonical MDString because nullptr
  730. // means none, so we need something to represent the empty file.
  731. DEFINE_GETIMPL_LOOKUP(DIFile, (Filename, Directory, CS, Source));
  732. Metadata *Ops[] = {Filename, Directory, CS ? CS->Value : nullptr, Source};
  733. DEFINE_GETIMPL_STORE(DIFile, (CS, Source), Ops);
  734. }
  735. DICompileUnit::DICompileUnit(LLVMContext &C, StorageType Storage,
  736. unsigned SourceLanguage, bool IsOptimized,
  737. unsigned RuntimeVersion, unsigned EmissionKind,
  738. uint64_t DWOId, bool SplitDebugInlining,
  739. bool DebugInfoForProfiling, unsigned NameTableKind,
  740. bool RangesBaseAddress, ArrayRef<Metadata *> Ops)
  741. : DIScope(C, DICompileUnitKind, Storage, dwarf::DW_TAG_compile_unit, Ops),
  742. SourceLanguage(SourceLanguage), IsOptimized(IsOptimized),
  743. RuntimeVersion(RuntimeVersion), EmissionKind(EmissionKind), DWOId(DWOId),
  744. SplitDebugInlining(SplitDebugInlining),
  745. DebugInfoForProfiling(DebugInfoForProfiling),
  746. NameTableKind(NameTableKind), RangesBaseAddress(RangesBaseAddress) {
  747. assert(Storage != Uniqued);
  748. }
  749. DICompileUnit *DICompileUnit::getImpl(
  750. LLVMContext &Context, unsigned SourceLanguage, Metadata *File,
  751. MDString *Producer, bool IsOptimized, MDString *Flags,
  752. unsigned RuntimeVersion, MDString *SplitDebugFilename,
  753. unsigned EmissionKind, Metadata *EnumTypes, Metadata *RetainedTypes,
  754. Metadata *GlobalVariables, Metadata *ImportedEntities, Metadata *Macros,
  755. uint64_t DWOId, bool SplitDebugInlining, bool DebugInfoForProfiling,
  756. unsigned NameTableKind, bool RangesBaseAddress, MDString *SysRoot,
  757. MDString *SDK, StorageType Storage, bool ShouldCreate) {
  758. assert(Storage != Uniqued && "Cannot unique DICompileUnit");
  759. assert(isCanonical(Producer) && "Expected canonical MDString");
  760. assert(isCanonical(Flags) && "Expected canonical MDString");
  761. assert(isCanonical(SplitDebugFilename) && "Expected canonical MDString");
  762. Metadata *Ops[] = {File,
  763. Producer,
  764. Flags,
  765. SplitDebugFilename,
  766. EnumTypes,
  767. RetainedTypes,
  768. GlobalVariables,
  769. ImportedEntities,
  770. Macros,
  771. SysRoot,
  772. SDK};
  773. return storeImpl(new (std::size(Ops), Storage) DICompileUnit(
  774. Context, Storage, SourceLanguage, IsOptimized,
  775. RuntimeVersion, EmissionKind, DWOId, SplitDebugInlining,
  776. DebugInfoForProfiling, NameTableKind, RangesBaseAddress,
  777. Ops),
  778. Storage);
  779. }
  780. std::optional<DICompileUnit::DebugEmissionKind>
  781. DICompileUnit::getEmissionKind(StringRef Str) {
  782. return StringSwitch<std::optional<DebugEmissionKind>>(Str)
  783. .Case("NoDebug", NoDebug)
  784. .Case("FullDebug", FullDebug)
  785. .Case("LineTablesOnly", LineTablesOnly)
  786. .Case("DebugDirectivesOnly", DebugDirectivesOnly)
  787. .Default(std::nullopt);
  788. }
  789. std::optional<DICompileUnit::DebugNameTableKind>
  790. DICompileUnit::getNameTableKind(StringRef Str) {
  791. return StringSwitch<std::optional<DebugNameTableKind>>(Str)
  792. .Case("Default", DebugNameTableKind::Default)
  793. .Case("GNU", DebugNameTableKind::GNU)
  794. .Case("None", DebugNameTableKind::None)
  795. .Default(std::nullopt);
  796. }
  797. const char *DICompileUnit::emissionKindString(DebugEmissionKind EK) {
  798. switch (EK) {
  799. case NoDebug:
  800. return "NoDebug";
  801. case FullDebug:
  802. return "FullDebug";
  803. case LineTablesOnly:
  804. return "LineTablesOnly";
  805. case DebugDirectivesOnly:
  806. return "DebugDirectivesOnly";
  807. }
  808. return nullptr;
  809. }
  810. const char *DICompileUnit::nameTableKindString(DebugNameTableKind NTK) {
  811. switch (NTK) {
  812. case DebugNameTableKind::Default:
  813. return nullptr;
  814. case DebugNameTableKind::GNU:
  815. return "GNU";
  816. case DebugNameTableKind::None:
  817. return "None";
  818. }
  819. return nullptr;
  820. }
  821. DISubprogram::DISubprogram(LLVMContext &C, StorageType Storage, unsigned Line,
  822. unsigned ScopeLine, unsigned VirtualIndex,
  823. int ThisAdjustment, DIFlags Flags, DISPFlags SPFlags,
  824. ArrayRef<Metadata *> Ops)
  825. : DILocalScope(C, DISubprogramKind, Storage, dwarf::DW_TAG_subprogram, Ops),
  826. Line(Line), ScopeLine(ScopeLine), VirtualIndex(VirtualIndex),
  827. ThisAdjustment(ThisAdjustment), Flags(Flags), SPFlags(SPFlags) {
  828. static_assert(dwarf::DW_VIRTUALITY_max < 4, "Virtuality out of range");
  829. }
  830. DISubprogram::DISPFlags
  831. DISubprogram::toSPFlags(bool IsLocalToUnit, bool IsDefinition, bool IsOptimized,
  832. unsigned Virtuality, bool IsMainSubprogram) {
  833. // We're assuming virtuality is the low-order field.
  834. static_assert(int(SPFlagVirtual) == int(dwarf::DW_VIRTUALITY_virtual) &&
  835. int(SPFlagPureVirtual) ==
  836. int(dwarf::DW_VIRTUALITY_pure_virtual),
  837. "Virtuality constant mismatch");
  838. return static_cast<DISPFlags>(
  839. (Virtuality & SPFlagVirtuality) |
  840. (IsLocalToUnit ? SPFlagLocalToUnit : SPFlagZero) |
  841. (IsDefinition ? SPFlagDefinition : SPFlagZero) |
  842. (IsOptimized ? SPFlagOptimized : SPFlagZero) |
  843. (IsMainSubprogram ? SPFlagMainSubprogram : SPFlagZero));
  844. }
  845. DISubprogram *DILocalScope::getSubprogram() const {
  846. if (auto *Block = dyn_cast<DILexicalBlockBase>(this))
  847. return Block->getScope()->getSubprogram();
  848. return const_cast<DISubprogram *>(cast<DISubprogram>(this));
  849. }
  850. DILocalScope *DILocalScope::getNonLexicalBlockFileScope() const {
  851. if (auto *File = dyn_cast<DILexicalBlockFile>(this))
  852. return File->getScope()->getNonLexicalBlockFileScope();
  853. return const_cast<DILocalScope *>(this);
  854. }
  855. DILocalScope *DILocalScope::cloneScopeForSubprogram(
  856. DILocalScope &RootScope, DISubprogram &NewSP, LLVMContext &Ctx,
  857. DenseMap<const MDNode *, MDNode *> &Cache) {
  858. SmallVector<DIScope *> ScopeChain;
  859. DIScope *CachedResult = nullptr;
  860. for (DIScope *Scope = &RootScope; !isa<DISubprogram>(Scope);
  861. Scope = Scope->getScope()) {
  862. if (auto It = Cache.find(Scope); It != Cache.end()) {
  863. CachedResult = cast<DIScope>(It->second);
  864. break;
  865. }
  866. ScopeChain.push_back(Scope);
  867. }
  868. // Recreate the scope chain, bottom-up, starting at the new subprogram (or a
  869. // cached result).
  870. DIScope *UpdatedScope = CachedResult ? CachedResult : &NewSP;
  871. for (DIScope *ScopeToUpdate : reverse(ScopeChain)) {
  872. TempMDNode ClonedScope = ScopeToUpdate->clone();
  873. cast<DILexicalBlockBase>(*ClonedScope).replaceScope(UpdatedScope);
  874. UpdatedScope =
  875. cast<DIScope>(MDNode::replaceWithUniqued(std::move(ClonedScope)));
  876. Cache[ScopeToUpdate] = UpdatedScope;
  877. }
  878. return cast<DILocalScope>(UpdatedScope);
  879. }
  880. DISubprogram::DISPFlags DISubprogram::getFlag(StringRef Flag) {
  881. return StringSwitch<DISPFlags>(Flag)
  882. #define HANDLE_DISP_FLAG(ID, NAME) .Case("DISPFlag" #NAME, SPFlag##NAME)
  883. #include "llvm/IR/DebugInfoFlags.def"
  884. .Default(SPFlagZero);
  885. }
  886. StringRef DISubprogram::getFlagString(DISPFlags Flag) {
  887. switch (Flag) {
  888. // Appease a warning.
  889. case SPFlagVirtuality:
  890. return "";
  891. #define HANDLE_DISP_FLAG(ID, NAME) \
  892. case SPFlag##NAME: \
  893. return "DISPFlag" #NAME;
  894. #include "llvm/IR/DebugInfoFlags.def"
  895. }
  896. return "";
  897. }
  898. DISubprogram::DISPFlags
  899. DISubprogram::splitFlags(DISPFlags Flags,
  900. SmallVectorImpl<DISPFlags> &SplitFlags) {
  901. // Multi-bit fields can require special handling. In our case, however, the
  902. // only multi-bit field is virtuality, and all its values happen to be
  903. // single-bit values, so the right behavior just falls out.
  904. #define HANDLE_DISP_FLAG(ID, NAME) \
  905. if (DISPFlags Bit = Flags & SPFlag##NAME) { \
  906. SplitFlags.push_back(Bit); \
  907. Flags &= ~Bit; \
  908. }
  909. #include "llvm/IR/DebugInfoFlags.def"
  910. return Flags;
  911. }
  912. DISubprogram *DISubprogram::getImpl(
  913. LLVMContext &Context, Metadata *Scope, MDString *Name,
  914. MDString *LinkageName, Metadata *File, unsigned Line, Metadata *Type,
  915. unsigned ScopeLine, Metadata *ContainingType, unsigned VirtualIndex,
  916. int ThisAdjustment, DIFlags Flags, DISPFlags SPFlags, Metadata *Unit,
  917. Metadata *TemplateParams, Metadata *Declaration, Metadata *RetainedNodes,
  918. Metadata *ThrownTypes, Metadata *Annotations, MDString *TargetFuncName,
  919. StorageType Storage, bool ShouldCreate) {
  920. assert(isCanonical(Name) && "Expected canonical MDString");
  921. assert(isCanonical(LinkageName) && "Expected canonical MDString");
  922. assert(isCanonical(TargetFuncName) && "Expected canonical MDString");
  923. DEFINE_GETIMPL_LOOKUP(DISubprogram,
  924. (Scope, Name, LinkageName, File, Line, Type, ScopeLine,
  925. ContainingType, VirtualIndex, ThisAdjustment, Flags,
  926. SPFlags, Unit, TemplateParams, Declaration,
  927. RetainedNodes, ThrownTypes, Annotations,
  928. TargetFuncName));
  929. SmallVector<Metadata *, 13> Ops = {
  930. File, Scope, Name, LinkageName,
  931. Type, Unit, Declaration, RetainedNodes,
  932. ContainingType, TemplateParams, ThrownTypes, Annotations,
  933. TargetFuncName};
  934. if (!TargetFuncName) {
  935. Ops.pop_back();
  936. if (!Annotations) {
  937. Ops.pop_back();
  938. if (!ThrownTypes) {
  939. Ops.pop_back();
  940. if (!TemplateParams) {
  941. Ops.pop_back();
  942. if (!ContainingType)
  943. Ops.pop_back();
  944. }
  945. }
  946. }
  947. }
  948. DEFINE_GETIMPL_STORE_N(
  949. DISubprogram,
  950. (Line, ScopeLine, VirtualIndex, ThisAdjustment, Flags, SPFlags), Ops,
  951. Ops.size());
  952. }
  953. bool DISubprogram::describes(const Function *F) const {
  954. assert(F && "Invalid function");
  955. return F->getSubprogram() == this;
  956. }
  957. DILexicalBlockBase::DILexicalBlockBase(LLVMContext &C, unsigned ID,
  958. StorageType Storage,
  959. ArrayRef<Metadata *> Ops)
  960. : DILocalScope(C, ID, Storage, dwarf::DW_TAG_lexical_block, Ops) {}
  961. DILexicalBlock *DILexicalBlock::getImpl(LLVMContext &Context, Metadata *Scope,
  962. Metadata *File, unsigned Line,
  963. unsigned Column, StorageType Storage,
  964. bool ShouldCreate) {
  965. // Fixup column.
  966. adjustColumn(Column);
  967. assert(Scope && "Expected scope");
  968. DEFINE_GETIMPL_LOOKUP(DILexicalBlock, (Scope, File, Line, Column));
  969. Metadata *Ops[] = {File, Scope};
  970. DEFINE_GETIMPL_STORE(DILexicalBlock, (Line, Column), Ops);
  971. }
  972. DILexicalBlockFile *DILexicalBlockFile::getImpl(LLVMContext &Context,
  973. Metadata *Scope, Metadata *File,
  974. unsigned Discriminator,
  975. StorageType Storage,
  976. bool ShouldCreate) {
  977. assert(Scope && "Expected scope");
  978. DEFINE_GETIMPL_LOOKUP(DILexicalBlockFile, (Scope, File, Discriminator));
  979. Metadata *Ops[] = {File, Scope};
  980. DEFINE_GETIMPL_STORE(DILexicalBlockFile, (Discriminator), Ops);
  981. }
  982. DINamespace::DINamespace(LLVMContext &Context, StorageType Storage,
  983. bool ExportSymbols, ArrayRef<Metadata *> Ops)
  984. : DIScope(Context, DINamespaceKind, Storage, dwarf::DW_TAG_namespace, Ops),
  985. ExportSymbols(ExportSymbols) {}
  986. DINamespace *DINamespace::getImpl(LLVMContext &Context, Metadata *Scope,
  987. MDString *Name, bool ExportSymbols,
  988. StorageType Storage, bool ShouldCreate) {
  989. assert(isCanonical(Name) && "Expected canonical MDString");
  990. DEFINE_GETIMPL_LOOKUP(DINamespace, (Scope, Name, ExportSymbols));
  991. // The nullptr is for DIScope's File operand. This should be refactored.
  992. Metadata *Ops[] = {nullptr, Scope, Name};
  993. DEFINE_GETIMPL_STORE(DINamespace, (ExportSymbols), Ops);
  994. }
  995. DICommonBlock::DICommonBlock(LLVMContext &Context, StorageType Storage,
  996. unsigned LineNo, ArrayRef<Metadata *> Ops)
  997. : DIScope(Context, DICommonBlockKind, Storage, dwarf::DW_TAG_common_block,
  998. Ops),
  999. LineNo(LineNo) {}
  1000. DICommonBlock *DICommonBlock::getImpl(LLVMContext &Context, Metadata *Scope,
  1001. Metadata *Decl, MDString *Name,
  1002. Metadata *File, unsigned LineNo,
  1003. StorageType Storage, bool ShouldCreate) {
  1004. assert(isCanonical(Name) && "Expected canonical MDString");
  1005. DEFINE_GETIMPL_LOOKUP(DICommonBlock, (Scope, Decl, Name, File, LineNo));
  1006. // The nullptr is for DIScope's File operand. This should be refactored.
  1007. Metadata *Ops[] = {Scope, Decl, Name, File};
  1008. DEFINE_GETIMPL_STORE(DICommonBlock, (LineNo), Ops);
  1009. }
  1010. DIModule::DIModule(LLVMContext &Context, StorageType Storage, unsigned LineNo,
  1011. bool IsDecl, ArrayRef<Metadata *> Ops)
  1012. : DIScope(Context, DIModuleKind, Storage, dwarf::DW_TAG_module, Ops),
  1013. LineNo(LineNo), IsDecl(IsDecl) {}
  1014. DIModule *DIModule::getImpl(LLVMContext &Context, Metadata *File,
  1015. Metadata *Scope, MDString *Name,
  1016. MDString *ConfigurationMacros,
  1017. MDString *IncludePath, MDString *APINotesFile,
  1018. unsigned LineNo, bool IsDecl, StorageType Storage,
  1019. bool ShouldCreate) {
  1020. assert(isCanonical(Name) && "Expected canonical MDString");
  1021. DEFINE_GETIMPL_LOOKUP(DIModule, (File, Scope, Name, ConfigurationMacros,
  1022. IncludePath, APINotesFile, LineNo, IsDecl));
  1023. Metadata *Ops[] = {File, Scope, Name, ConfigurationMacros,
  1024. IncludePath, APINotesFile};
  1025. DEFINE_GETIMPL_STORE(DIModule, (LineNo, IsDecl), Ops);
  1026. }
  1027. DITemplateTypeParameter::DITemplateTypeParameter(LLVMContext &Context,
  1028. StorageType Storage,
  1029. bool IsDefault,
  1030. ArrayRef<Metadata *> Ops)
  1031. : DITemplateParameter(Context, DITemplateTypeParameterKind, Storage,
  1032. dwarf::DW_TAG_template_type_parameter, IsDefault,
  1033. Ops) {}
  1034. DITemplateTypeParameter *
  1035. DITemplateTypeParameter::getImpl(LLVMContext &Context, MDString *Name,
  1036. Metadata *Type, bool isDefault,
  1037. StorageType Storage, bool ShouldCreate) {
  1038. assert(isCanonical(Name) && "Expected canonical MDString");
  1039. DEFINE_GETIMPL_LOOKUP(DITemplateTypeParameter, (Name, Type, isDefault));
  1040. Metadata *Ops[] = {Name, Type};
  1041. DEFINE_GETIMPL_STORE(DITemplateTypeParameter, (isDefault), Ops);
  1042. }
  1043. DITemplateValueParameter *DITemplateValueParameter::getImpl(
  1044. LLVMContext &Context, unsigned Tag, MDString *Name, Metadata *Type,
  1045. bool isDefault, Metadata *Value, StorageType Storage, bool ShouldCreate) {
  1046. assert(isCanonical(Name) && "Expected canonical MDString");
  1047. DEFINE_GETIMPL_LOOKUP(DITemplateValueParameter,
  1048. (Tag, Name, Type, isDefault, Value));
  1049. Metadata *Ops[] = {Name, Type, Value};
  1050. DEFINE_GETIMPL_STORE(DITemplateValueParameter, (Tag, isDefault), Ops);
  1051. }
  1052. DIGlobalVariable *
  1053. DIGlobalVariable::getImpl(LLVMContext &Context, Metadata *Scope, MDString *Name,
  1054. MDString *LinkageName, Metadata *File, unsigned Line,
  1055. Metadata *Type, bool IsLocalToUnit, bool IsDefinition,
  1056. Metadata *StaticDataMemberDeclaration,
  1057. Metadata *TemplateParams, uint32_t AlignInBits,
  1058. Metadata *Annotations, StorageType Storage,
  1059. bool ShouldCreate) {
  1060. assert(isCanonical(Name) && "Expected canonical MDString");
  1061. assert(isCanonical(LinkageName) && "Expected canonical MDString");
  1062. DEFINE_GETIMPL_LOOKUP(
  1063. DIGlobalVariable,
  1064. (Scope, Name, LinkageName, File, Line, Type, IsLocalToUnit, IsDefinition,
  1065. StaticDataMemberDeclaration, TemplateParams, AlignInBits, Annotations));
  1066. Metadata *Ops[] = {Scope,
  1067. Name,
  1068. File,
  1069. Type,
  1070. Name,
  1071. LinkageName,
  1072. StaticDataMemberDeclaration,
  1073. TemplateParams,
  1074. Annotations};
  1075. DEFINE_GETIMPL_STORE(DIGlobalVariable,
  1076. (Line, IsLocalToUnit, IsDefinition, AlignInBits), Ops);
  1077. }
  1078. DILocalVariable *
  1079. DILocalVariable::getImpl(LLVMContext &Context, Metadata *Scope, MDString *Name,
  1080. Metadata *File, unsigned Line, Metadata *Type,
  1081. unsigned Arg, DIFlags Flags, uint32_t AlignInBits,
  1082. Metadata *Annotations, StorageType Storage,
  1083. bool ShouldCreate) {
  1084. // 64K ought to be enough for any frontend.
  1085. assert(Arg <= UINT16_MAX && "Expected argument number to fit in 16-bits");
  1086. assert(Scope && "Expected scope");
  1087. assert(isCanonical(Name) && "Expected canonical MDString");
  1088. DEFINE_GETIMPL_LOOKUP(DILocalVariable, (Scope, Name, File, Line, Type, Arg,
  1089. Flags, AlignInBits, Annotations));
  1090. Metadata *Ops[] = {Scope, Name, File, Type, Annotations};
  1091. DEFINE_GETIMPL_STORE(DILocalVariable, (Line, Arg, Flags, AlignInBits), Ops);
  1092. }
  1093. DIVariable::DIVariable(LLVMContext &C, unsigned ID, StorageType Storage,
  1094. signed Line, ArrayRef<Metadata *> Ops,
  1095. uint32_t AlignInBits)
  1096. : DINode(C, ID, Storage, dwarf::DW_TAG_variable, Ops), Line(Line),
  1097. AlignInBits(AlignInBits) {}
  1098. std::optional<uint64_t> DIVariable::getSizeInBits() const {
  1099. // This is used by the Verifier so be mindful of broken types.
  1100. const Metadata *RawType = getRawType();
  1101. while (RawType) {
  1102. // Try to get the size directly.
  1103. if (auto *T = dyn_cast<DIType>(RawType))
  1104. if (uint64_t Size = T->getSizeInBits())
  1105. return Size;
  1106. if (auto *DT = dyn_cast<DIDerivedType>(RawType)) {
  1107. // Look at the base type.
  1108. RawType = DT->getRawBaseType();
  1109. continue;
  1110. }
  1111. // Missing type or size.
  1112. break;
  1113. }
  1114. // Fail gracefully.
  1115. return std::nullopt;
  1116. }
  1117. DILabel::DILabel(LLVMContext &C, StorageType Storage, unsigned Line,
  1118. ArrayRef<Metadata *> Ops)
  1119. : DINode(C, DILabelKind, Storage, dwarf::DW_TAG_label, Ops), Line(Line) {}
  1120. DILabel *DILabel::getImpl(LLVMContext &Context, Metadata *Scope, MDString *Name,
  1121. Metadata *File, unsigned Line, StorageType Storage,
  1122. bool ShouldCreate) {
  1123. assert(Scope && "Expected scope");
  1124. assert(isCanonical(Name) && "Expected canonical MDString");
  1125. DEFINE_GETIMPL_LOOKUP(DILabel, (Scope, Name, File, Line));
  1126. Metadata *Ops[] = {Scope, Name, File};
  1127. DEFINE_GETIMPL_STORE(DILabel, (Line), Ops);
  1128. }
  1129. DIExpression *DIExpression::getImpl(LLVMContext &Context,
  1130. ArrayRef<uint64_t> Elements,
  1131. StorageType Storage, bool ShouldCreate) {
  1132. DEFINE_GETIMPL_LOOKUP(DIExpression, (Elements));
  1133. DEFINE_GETIMPL_STORE_NO_OPS(DIExpression, (Elements));
  1134. }
  1135. bool DIExpression::isEntryValue() const {
  1136. return getNumElements() > 0 && getElement(0) == dwarf::DW_OP_LLVM_entry_value;
  1137. }
  1138. bool DIExpression::startsWithDeref() const {
  1139. return getNumElements() > 0 && getElement(0) == dwarf::DW_OP_deref;
  1140. }
  1141. DIAssignID *DIAssignID::getImpl(LLVMContext &Context, StorageType Storage,
  1142. bool ShouldCreate) {
  1143. // Uniqued DIAssignID are not supported as the instance address *is* the ID.
  1144. assert(Storage != StorageType::Uniqued && "uniqued DIAssignID unsupported");
  1145. return storeImpl(new (0u, Storage) DIAssignID(Context, Storage), Storage);
  1146. }
  1147. unsigned DIExpression::ExprOperand::getSize() const {
  1148. uint64_t Op = getOp();
  1149. if (Op >= dwarf::DW_OP_breg0 && Op <= dwarf::DW_OP_breg31)
  1150. return 2;
  1151. switch (Op) {
  1152. case dwarf::DW_OP_LLVM_convert:
  1153. case dwarf::DW_OP_LLVM_fragment:
  1154. case dwarf::DW_OP_bregx:
  1155. return 3;
  1156. case dwarf::DW_OP_constu:
  1157. case dwarf::DW_OP_consts:
  1158. case dwarf::DW_OP_deref_size:
  1159. case dwarf::DW_OP_plus_uconst:
  1160. case dwarf::DW_OP_LLVM_tag_offset:
  1161. case dwarf::DW_OP_LLVM_entry_value:
  1162. case dwarf::DW_OP_LLVM_arg:
  1163. case dwarf::DW_OP_regx:
  1164. return 2;
  1165. default:
  1166. return 1;
  1167. }
  1168. }
  1169. bool DIExpression::isValid() const {
  1170. for (auto I = expr_op_begin(), E = expr_op_end(); I != E; ++I) {
  1171. // Check that there's space for the operand.
  1172. if (I->get() + I->getSize() > E->get())
  1173. return false;
  1174. uint64_t Op = I->getOp();
  1175. if ((Op >= dwarf::DW_OP_reg0 && Op <= dwarf::DW_OP_reg31) ||
  1176. (Op >= dwarf::DW_OP_breg0 && Op <= dwarf::DW_OP_breg31))
  1177. return true;
  1178. // Check that the operand is valid.
  1179. switch (Op) {
  1180. default:
  1181. return false;
  1182. case dwarf::DW_OP_LLVM_fragment:
  1183. // A fragment operator must appear at the end.
  1184. return I->get() + I->getSize() == E->get();
  1185. case dwarf::DW_OP_stack_value: {
  1186. // Must be the last one or followed by a DW_OP_LLVM_fragment.
  1187. if (I->get() + I->getSize() == E->get())
  1188. break;
  1189. auto J = I;
  1190. if ((++J)->getOp() != dwarf::DW_OP_LLVM_fragment)
  1191. return false;
  1192. break;
  1193. }
  1194. case dwarf::DW_OP_swap: {
  1195. // Must be more than one implicit element on the stack.
  1196. // FIXME: A better way to implement this would be to add a local variable
  1197. // that keeps track of the stack depth and introduce something like a
  1198. // DW_LLVM_OP_implicit_location as a placeholder for the location this
  1199. // DIExpression is attached to, or else pass the number of implicit stack
  1200. // elements into isValid.
  1201. if (getNumElements() == 1)
  1202. return false;
  1203. break;
  1204. }
  1205. case dwarf::DW_OP_LLVM_entry_value: {
  1206. // An entry value operator must appear at the beginning or immediately
  1207. // following `DW_OP_LLVM_arg 0`, and the number of operations it cover can
  1208. // currently only be 1, because we support only entry values of a simple
  1209. // register location. One reason for this is that we currently can't
  1210. // calculate the size of the resulting DWARF block for other expressions.
  1211. auto FirstOp = expr_op_begin();
  1212. if (FirstOp->getOp() == dwarf::DW_OP_LLVM_arg && FirstOp->getArg(0) == 0)
  1213. ++FirstOp;
  1214. return I->get() == FirstOp->get() && I->getArg(0) == 1;
  1215. }
  1216. case dwarf::DW_OP_LLVM_implicit_pointer:
  1217. case dwarf::DW_OP_LLVM_convert:
  1218. case dwarf::DW_OP_LLVM_arg:
  1219. case dwarf::DW_OP_LLVM_tag_offset:
  1220. case dwarf::DW_OP_constu:
  1221. case dwarf::DW_OP_plus_uconst:
  1222. case dwarf::DW_OP_plus:
  1223. case dwarf::DW_OP_minus:
  1224. case dwarf::DW_OP_mul:
  1225. case dwarf::DW_OP_div:
  1226. case dwarf::DW_OP_mod:
  1227. case dwarf::DW_OP_or:
  1228. case dwarf::DW_OP_and:
  1229. case dwarf::DW_OP_xor:
  1230. case dwarf::DW_OP_shl:
  1231. case dwarf::DW_OP_shr:
  1232. case dwarf::DW_OP_shra:
  1233. case dwarf::DW_OP_deref:
  1234. case dwarf::DW_OP_deref_size:
  1235. case dwarf::DW_OP_xderef:
  1236. case dwarf::DW_OP_lit0:
  1237. case dwarf::DW_OP_not:
  1238. case dwarf::DW_OP_dup:
  1239. case dwarf::DW_OP_regx:
  1240. case dwarf::DW_OP_bregx:
  1241. case dwarf::DW_OP_push_object_address:
  1242. case dwarf::DW_OP_over:
  1243. case dwarf::DW_OP_consts:
  1244. break;
  1245. }
  1246. }
  1247. return true;
  1248. }
  1249. bool DIExpression::isImplicit() const {
  1250. if (!isValid())
  1251. return false;
  1252. if (getNumElements() == 0)
  1253. return false;
  1254. for (const auto &It : expr_ops()) {
  1255. switch (It.getOp()) {
  1256. default:
  1257. break;
  1258. case dwarf::DW_OP_stack_value:
  1259. case dwarf::DW_OP_LLVM_tag_offset:
  1260. return true;
  1261. }
  1262. }
  1263. return false;
  1264. }
  1265. bool DIExpression::isComplex() const {
  1266. if (!isValid())
  1267. return false;
  1268. if (getNumElements() == 0)
  1269. return false;
  1270. // If there are any elements other than fragment or tag_offset, then some
  1271. // kind of complex computation occurs.
  1272. for (const auto &It : expr_ops()) {
  1273. switch (It.getOp()) {
  1274. case dwarf::DW_OP_LLVM_tag_offset:
  1275. case dwarf::DW_OP_LLVM_fragment:
  1276. case dwarf::DW_OP_LLVM_arg:
  1277. continue;
  1278. default:
  1279. return true;
  1280. }
  1281. }
  1282. return false;
  1283. }
  1284. bool DIExpression::isSingleLocationExpression() const {
  1285. if (!isValid())
  1286. return false;
  1287. if (getNumElements() == 0)
  1288. return true;
  1289. auto ExprOpBegin = expr_ops().begin();
  1290. auto ExprOpEnd = expr_ops().end();
  1291. if (ExprOpBegin->getOp() == dwarf::DW_OP_LLVM_arg)
  1292. ++ExprOpBegin;
  1293. return !std::any_of(ExprOpBegin, ExprOpEnd, [](auto Op) {
  1294. return Op.getOp() == dwarf::DW_OP_LLVM_arg;
  1295. });
  1296. }
  1297. const DIExpression *
  1298. DIExpression::convertToUndefExpression(const DIExpression *Expr) {
  1299. SmallVector<uint64_t, 3> UndefOps;
  1300. if (auto FragmentInfo = Expr->getFragmentInfo()) {
  1301. UndefOps.append({dwarf::DW_OP_LLVM_fragment, FragmentInfo->OffsetInBits,
  1302. FragmentInfo->SizeInBits});
  1303. }
  1304. return DIExpression::get(Expr->getContext(), UndefOps);
  1305. }
  1306. const DIExpression *
  1307. DIExpression::convertToVariadicExpression(const DIExpression *Expr) {
  1308. if (any_of(Expr->expr_ops(), [](auto ExprOp) {
  1309. return ExprOp.getOp() == dwarf::DW_OP_LLVM_arg;
  1310. }))
  1311. return Expr;
  1312. SmallVector<uint64_t> NewOps;
  1313. NewOps.reserve(Expr->getNumElements() + 2);
  1314. NewOps.append({dwarf::DW_OP_LLVM_arg, 0});
  1315. NewOps.append(Expr->elements_begin(), Expr->elements_end());
  1316. return DIExpression::get(Expr->getContext(), NewOps);
  1317. }
  1318. std::optional<const DIExpression *>
  1319. DIExpression::convertToNonVariadicExpression(const DIExpression *Expr) {
  1320. // Check for `isValid` covered by `isSingleLocationExpression`.
  1321. if (!Expr->isSingleLocationExpression())
  1322. return std::nullopt;
  1323. // An empty expression is already non-variadic.
  1324. if (!Expr->getNumElements())
  1325. return Expr;
  1326. auto ElementsBegin = Expr->elements_begin();
  1327. // If Expr does not have a leading DW_OP_LLVM_arg then we don't need to do
  1328. // anything.
  1329. if (*ElementsBegin != dwarf::DW_OP_LLVM_arg)
  1330. return Expr;
  1331. SmallVector<uint64_t> NonVariadicOps(
  1332. make_range(ElementsBegin + 2, Expr->elements_end()));
  1333. return DIExpression::get(Expr->getContext(), NonVariadicOps);
  1334. }
  1335. void DIExpression::canonicalizeExpressionOps(SmallVectorImpl<uint64_t> &Ops,
  1336. const DIExpression *Expr,
  1337. bool IsIndirect) {
  1338. // If Expr is not already variadic, insert the implied `DW_OP_LLVM_arg 0`
  1339. // to the existing expression ops.
  1340. if (none_of(Expr->expr_ops(), [](auto ExprOp) {
  1341. return ExprOp.getOp() == dwarf::DW_OP_LLVM_arg;
  1342. }))
  1343. Ops.append({dwarf::DW_OP_LLVM_arg, 0});
  1344. // If Expr is not indirect, we only need to insert the expression elements and
  1345. // we're done.
  1346. if (!IsIndirect) {
  1347. Ops.append(Expr->elements_begin(), Expr->elements_end());
  1348. return;
  1349. }
  1350. // If Expr is indirect, insert the implied DW_OP_deref at the end of the
  1351. // expression but before DW_OP_{stack_value, LLVM_fragment} if they are
  1352. // present.
  1353. for (auto Op : Expr->expr_ops()) {
  1354. if (Op.getOp() == dwarf::DW_OP_stack_value ||
  1355. Op.getOp() == dwarf::DW_OP_LLVM_fragment) {
  1356. Ops.push_back(dwarf::DW_OP_deref);
  1357. IsIndirect = false;
  1358. }
  1359. Op.appendToVector(Ops);
  1360. }
  1361. if (IsIndirect)
  1362. Ops.push_back(dwarf::DW_OP_deref);
  1363. }
  1364. bool DIExpression::isEqualExpression(const DIExpression *FirstExpr,
  1365. bool FirstIndirect,
  1366. const DIExpression *SecondExpr,
  1367. bool SecondIndirect) {
  1368. SmallVector<uint64_t> FirstOps;
  1369. DIExpression::canonicalizeExpressionOps(FirstOps, FirstExpr, FirstIndirect);
  1370. SmallVector<uint64_t> SecondOps;
  1371. DIExpression::canonicalizeExpressionOps(SecondOps, SecondExpr,
  1372. SecondIndirect);
  1373. return FirstOps == SecondOps;
  1374. }
  1375. std::optional<DIExpression::FragmentInfo>
  1376. DIExpression::getFragmentInfo(expr_op_iterator Start, expr_op_iterator End) {
  1377. for (auto I = Start; I != End; ++I)
  1378. if (I->getOp() == dwarf::DW_OP_LLVM_fragment) {
  1379. DIExpression::FragmentInfo Info = {I->getArg(1), I->getArg(0)};
  1380. return Info;
  1381. }
  1382. return std::nullopt;
  1383. }
  1384. void DIExpression::appendOffset(SmallVectorImpl<uint64_t> &Ops,
  1385. int64_t Offset) {
  1386. if (Offset > 0) {
  1387. Ops.push_back(dwarf::DW_OP_plus_uconst);
  1388. Ops.push_back(Offset);
  1389. } else if (Offset < 0) {
  1390. Ops.push_back(dwarf::DW_OP_constu);
  1391. // Avoid UB when encountering LLONG_MIN, because in 2's complement
  1392. // abs(LLONG_MIN) is LLONG_MAX+1.
  1393. uint64_t AbsMinusOne = -(Offset+1);
  1394. Ops.push_back(AbsMinusOne + 1);
  1395. Ops.push_back(dwarf::DW_OP_minus);
  1396. }
  1397. }
  1398. bool DIExpression::extractIfOffset(int64_t &Offset) const {
  1399. if (getNumElements() == 0) {
  1400. Offset = 0;
  1401. return true;
  1402. }
  1403. if (getNumElements() == 2 && Elements[0] == dwarf::DW_OP_plus_uconst) {
  1404. Offset = Elements[1];
  1405. return true;
  1406. }
  1407. if (getNumElements() == 3 && Elements[0] == dwarf::DW_OP_constu) {
  1408. if (Elements[2] == dwarf::DW_OP_plus) {
  1409. Offset = Elements[1];
  1410. return true;
  1411. }
  1412. if (Elements[2] == dwarf::DW_OP_minus) {
  1413. Offset = -Elements[1];
  1414. return true;
  1415. }
  1416. }
  1417. return false;
  1418. }
  1419. bool DIExpression::hasAllLocationOps(unsigned N) const {
  1420. SmallDenseSet<uint64_t, 4> SeenOps;
  1421. for (auto ExprOp : expr_ops())
  1422. if (ExprOp.getOp() == dwarf::DW_OP_LLVM_arg)
  1423. SeenOps.insert(ExprOp.getArg(0));
  1424. for (uint64_t Idx = 0; Idx < N; ++Idx)
  1425. if (!is_contained(SeenOps, Idx))
  1426. return false;
  1427. return true;
  1428. }
  1429. const DIExpression *DIExpression::extractAddressClass(const DIExpression *Expr,
  1430. unsigned &AddrClass) {
  1431. // FIXME: This seems fragile. Nothing that verifies that these elements
  1432. // actually map to ops and not operands.
  1433. const unsigned PatternSize = 4;
  1434. if (Expr->Elements.size() >= PatternSize &&
  1435. Expr->Elements[PatternSize - 4] == dwarf::DW_OP_constu &&
  1436. Expr->Elements[PatternSize - 2] == dwarf::DW_OP_swap &&
  1437. Expr->Elements[PatternSize - 1] == dwarf::DW_OP_xderef) {
  1438. AddrClass = Expr->Elements[PatternSize - 3];
  1439. if (Expr->Elements.size() == PatternSize)
  1440. return nullptr;
  1441. return DIExpression::get(Expr->getContext(),
  1442. ArrayRef(&*Expr->Elements.begin(),
  1443. Expr->Elements.size() - PatternSize));
  1444. }
  1445. return Expr;
  1446. }
  1447. DIExpression *DIExpression::prepend(const DIExpression *Expr, uint8_t Flags,
  1448. int64_t Offset) {
  1449. SmallVector<uint64_t, 8> Ops;
  1450. if (Flags & DIExpression::DerefBefore)
  1451. Ops.push_back(dwarf::DW_OP_deref);
  1452. appendOffset(Ops, Offset);
  1453. if (Flags & DIExpression::DerefAfter)
  1454. Ops.push_back(dwarf::DW_OP_deref);
  1455. bool StackValue = Flags & DIExpression::StackValue;
  1456. bool EntryValue = Flags & DIExpression::EntryValue;
  1457. return prependOpcodes(Expr, Ops, StackValue, EntryValue);
  1458. }
  1459. DIExpression *DIExpression::appendOpsToArg(const DIExpression *Expr,
  1460. ArrayRef<uint64_t> Ops,
  1461. unsigned ArgNo, bool StackValue) {
  1462. assert(Expr && "Can't add ops to this expression");
  1463. // Handle non-variadic intrinsics by prepending the opcodes.
  1464. if (!any_of(Expr->expr_ops(),
  1465. [](auto Op) { return Op.getOp() == dwarf::DW_OP_LLVM_arg; })) {
  1466. assert(ArgNo == 0 &&
  1467. "Location Index must be 0 for a non-variadic expression.");
  1468. SmallVector<uint64_t, 8> NewOps(Ops.begin(), Ops.end());
  1469. return DIExpression::prependOpcodes(Expr, NewOps, StackValue);
  1470. }
  1471. SmallVector<uint64_t, 8> NewOps;
  1472. for (auto Op : Expr->expr_ops()) {
  1473. // A DW_OP_stack_value comes at the end, but before a DW_OP_LLVM_fragment.
  1474. if (StackValue) {
  1475. if (Op.getOp() == dwarf::DW_OP_stack_value)
  1476. StackValue = false;
  1477. else if (Op.getOp() == dwarf::DW_OP_LLVM_fragment) {
  1478. NewOps.push_back(dwarf::DW_OP_stack_value);
  1479. StackValue = false;
  1480. }
  1481. }
  1482. Op.appendToVector(NewOps);
  1483. if (Op.getOp() == dwarf::DW_OP_LLVM_arg && Op.getArg(0) == ArgNo)
  1484. NewOps.insert(NewOps.end(), Ops.begin(), Ops.end());
  1485. }
  1486. if (StackValue)
  1487. NewOps.push_back(dwarf::DW_OP_stack_value);
  1488. return DIExpression::get(Expr->getContext(), NewOps);
  1489. }
  1490. DIExpression *DIExpression::replaceArg(const DIExpression *Expr,
  1491. uint64_t OldArg, uint64_t NewArg) {
  1492. assert(Expr && "Can't replace args in this expression");
  1493. SmallVector<uint64_t, 8> NewOps;
  1494. for (auto Op : Expr->expr_ops()) {
  1495. if (Op.getOp() != dwarf::DW_OP_LLVM_arg || Op.getArg(0) < OldArg) {
  1496. Op.appendToVector(NewOps);
  1497. continue;
  1498. }
  1499. NewOps.push_back(dwarf::DW_OP_LLVM_arg);
  1500. uint64_t Arg = Op.getArg(0) == OldArg ? NewArg : Op.getArg(0);
  1501. // OldArg has been deleted from the Op list, so decrement all indices
  1502. // greater than it.
  1503. if (Arg > OldArg)
  1504. --Arg;
  1505. NewOps.push_back(Arg);
  1506. }
  1507. return DIExpression::get(Expr->getContext(), NewOps);
  1508. }
  1509. DIExpression *DIExpression::prependOpcodes(const DIExpression *Expr,
  1510. SmallVectorImpl<uint64_t> &Ops,
  1511. bool StackValue, bool EntryValue) {
  1512. assert(Expr && "Can't prepend ops to this expression");
  1513. if (EntryValue) {
  1514. Ops.push_back(dwarf::DW_OP_LLVM_entry_value);
  1515. // Use a block size of 1 for the target register operand. The
  1516. // DWARF backend currently cannot emit entry values with a block
  1517. // size > 1.
  1518. Ops.push_back(1);
  1519. }
  1520. // If there are no ops to prepend, do not even add the DW_OP_stack_value.
  1521. if (Ops.empty())
  1522. StackValue = false;
  1523. for (auto Op : Expr->expr_ops()) {
  1524. // A DW_OP_stack_value comes at the end, but before a DW_OP_LLVM_fragment.
  1525. if (StackValue) {
  1526. if (Op.getOp() == dwarf::DW_OP_stack_value)
  1527. StackValue = false;
  1528. else if (Op.getOp() == dwarf::DW_OP_LLVM_fragment) {
  1529. Ops.push_back(dwarf::DW_OP_stack_value);
  1530. StackValue = false;
  1531. }
  1532. }
  1533. Op.appendToVector(Ops);
  1534. }
  1535. if (StackValue)
  1536. Ops.push_back(dwarf::DW_OP_stack_value);
  1537. return DIExpression::get(Expr->getContext(), Ops);
  1538. }
  1539. DIExpression *DIExpression::append(const DIExpression *Expr,
  1540. ArrayRef<uint64_t> Ops) {
  1541. assert(Expr && !Ops.empty() && "Can't append ops to this expression");
  1542. // Copy Expr's current op list.
  1543. SmallVector<uint64_t, 16> NewOps;
  1544. for (auto Op : Expr->expr_ops()) {
  1545. // Append new opcodes before DW_OP_{stack_value, LLVM_fragment}.
  1546. if (Op.getOp() == dwarf::DW_OP_stack_value ||
  1547. Op.getOp() == dwarf::DW_OP_LLVM_fragment) {
  1548. NewOps.append(Ops.begin(), Ops.end());
  1549. // Ensure that the new opcodes are only appended once.
  1550. Ops = std::nullopt;
  1551. }
  1552. Op.appendToVector(NewOps);
  1553. }
  1554. NewOps.append(Ops.begin(), Ops.end());
  1555. auto *result = DIExpression::get(Expr->getContext(), NewOps);
  1556. assert(result->isValid() && "concatenated expression is not valid");
  1557. return result;
  1558. }
  1559. DIExpression *DIExpression::appendToStack(const DIExpression *Expr,
  1560. ArrayRef<uint64_t> Ops) {
  1561. assert(Expr && !Ops.empty() && "Can't append ops to this expression");
  1562. assert(none_of(Ops,
  1563. [](uint64_t Op) {
  1564. return Op == dwarf::DW_OP_stack_value ||
  1565. Op == dwarf::DW_OP_LLVM_fragment;
  1566. }) &&
  1567. "Can't append this op");
  1568. // Append a DW_OP_deref after Expr's current op list if it's non-empty and
  1569. // has no DW_OP_stack_value.
  1570. //
  1571. // Match .* DW_OP_stack_value (DW_OP_LLVM_fragment A B)?.
  1572. std::optional<FragmentInfo> FI = Expr->getFragmentInfo();
  1573. unsigned DropUntilStackValue = FI ? 3 : 0;
  1574. ArrayRef<uint64_t> ExprOpsBeforeFragment =
  1575. Expr->getElements().drop_back(DropUntilStackValue);
  1576. bool NeedsDeref = (Expr->getNumElements() > DropUntilStackValue) &&
  1577. (ExprOpsBeforeFragment.back() != dwarf::DW_OP_stack_value);
  1578. bool NeedsStackValue = NeedsDeref || ExprOpsBeforeFragment.empty();
  1579. // Append a DW_OP_deref after Expr's current op list if needed, then append
  1580. // the new ops, and finally ensure that a single DW_OP_stack_value is present.
  1581. SmallVector<uint64_t, 16> NewOps;
  1582. if (NeedsDeref)
  1583. NewOps.push_back(dwarf::DW_OP_deref);
  1584. NewOps.append(Ops.begin(), Ops.end());
  1585. if (NeedsStackValue)
  1586. NewOps.push_back(dwarf::DW_OP_stack_value);
  1587. return DIExpression::append(Expr, NewOps);
  1588. }
  1589. std::optional<DIExpression *> DIExpression::createFragmentExpression(
  1590. const DIExpression *Expr, unsigned OffsetInBits, unsigned SizeInBits) {
  1591. SmallVector<uint64_t, 8> Ops;
  1592. // Track whether it's safe to split the value at the top of the DWARF stack,
  1593. // assuming that it'll be used as an implicit location value.
  1594. bool CanSplitValue = true;
  1595. // Copy over the expression, but leave off any trailing DW_OP_LLVM_fragment.
  1596. if (Expr) {
  1597. for (auto Op : Expr->expr_ops()) {
  1598. switch (Op.getOp()) {
  1599. default:
  1600. break;
  1601. case dwarf::DW_OP_shr:
  1602. case dwarf::DW_OP_shra:
  1603. case dwarf::DW_OP_shl:
  1604. case dwarf::DW_OP_plus:
  1605. case dwarf::DW_OP_plus_uconst:
  1606. case dwarf::DW_OP_minus:
  1607. // We can't safely split arithmetic or shift operations into multiple
  1608. // fragments because we can't express carry-over between fragments.
  1609. //
  1610. // FIXME: We *could* preserve the lowest fragment of a constant offset
  1611. // operation if the offset fits into SizeInBits.
  1612. CanSplitValue = false;
  1613. break;
  1614. case dwarf::DW_OP_deref:
  1615. case dwarf::DW_OP_deref_size:
  1616. case dwarf::DW_OP_deref_type:
  1617. case dwarf::DW_OP_xderef:
  1618. case dwarf::DW_OP_xderef_size:
  1619. case dwarf::DW_OP_xderef_type:
  1620. // Preceeding arithmetic operations have been applied to compute an
  1621. // address. It's okay to split the value loaded from that address.
  1622. CanSplitValue = true;
  1623. break;
  1624. case dwarf::DW_OP_stack_value:
  1625. // Bail if this expression computes a value that cannot be split.
  1626. if (!CanSplitValue)
  1627. return std::nullopt;
  1628. break;
  1629. case dwarf::DW_OP_LLVM_fragment: {
  1630. // Make the new offset point into the existing fragment.
  1631. uint64_t FragmentOffsetInBits = Op.getArg(0);
  1632. uint64_t FragmentSizeInBits = Op.getArg(1);
  1633. (void)FragmentSizeInBits;
  1634. assert((OffsetInBits + SizeInBits <= FragmentSizeInBits) &&
  1635. "new fragment outside of original fragment");
  1636. OffsetInBits += FragmentOffsetInBits;
  1637. continue;
  1638. }
  1639. }
  1640. Op.appendToVector(Ops);
  1641. }
  1642. }
  1643. assert((!Expr->isImplicit() || CanSplitValue) && "Expr can't be split");
  1644. assert(Expr && "Unknown DIExpression");
  1645. Ops.push_back(dwarf::DW_OP_LLVM_fragment);
  1646. Ops.push_back(OffsetInBits);
  1647. Ops.push_back(SizeInBits);
  1648. return DIExpression::get(Expr->getContext(), Ops);
  1649. }
  1650. std::pair<DIExpression *, const ConstantInt *>
  1651. DIExpression::constantFold(const ConstantInt *CI) {
  1652. // Copy the APInt so we can modify it.
  1653. APInt NewInt = CI->getValue();
  1654. SmallVector<uint64_t, 8> Ops;
  1655. // Fold operators only at the beginning of the expression.
  1656. bool First = true;
  1657. bool Changed = false;
  1658. for (auto Op : expr_ops()) {
  1659. switch (Op.getOp()) {
  1660. default:
  1661. // We fold only the leading part of the expression; if we get to a part
  1662. // that we're going to copy unchanged, and haven't done any folding,
  1663. // then the entire expression is unchanged and we can return early.
  1664. if (!Changed)
  1665. return {this, CI};
  1666. First = false;
  1667. break;
  1668. case dwarf::DW_OP_LLVM_convert:
  1669. if (!First)
  1670. break;
  1671. Changed = true;
  1672. if (Op.getArg(1) == dwarf::DW_ATE_signed)
  1673. NewInt = NewInt.sextOrTrunc(Op.getArg(0));
  1674. else {
  1675. assert(Op.getArg(1) == dwarf::DW_ATE_unsigned && "Unexpected operand");
  1676. NewInt = NewInt.zextOrTrunc(Op.getArg(0));
  1677. }
  1678. continue;
  1679. }
  1680. Op.appendToVector(Ops);
  1681. }
  1682. if (!Changed)
  1683. return {this, CI};
  1684. return {DIExpression::get(getContext(), Ops),
  1685. ConstantInt::get(getContext(), NewInt)};
  1686. }
  1687. uint64_t DIExpression::getNumLocationOperands() const {
  1688. uint64_t Result = 0;
  1689. for (auto ExprOp : expr_ops())
  1690. if (ExprOp.getOp() == dwarf::DW_OP_LLVM_arg)
  1691. Result = std::max(Result, ExprOp.getArg(0) + 1);
  1692. assert(hasAllLocationOps(Result) &&
  1693. "Expression is missing one or more location operands.");
  1694. return Result;
  1695. }
  1696. std::optional<DIExpression::SignedOrUnsignedConstant>
  1697. DIExpression::isConstant() const {
  1698. // Recognize signed and unsigned constants.
  1699. // An signed constants can be represented as DW_OP_consts C DW_OP_stack_value
  1700. // (DW_OP_LLVM_fragment of Len).
  1701. // An unsigned constant can be represented as
  1702. // DW_OP_constu C DW_OP_stack_value (DW_OP_LLVM_fragment of Len).
  1703. if ((getNumElements() != 2 && getNumElements() != 3 &&
  1704. getNumElements() != 6) ||
  1705. (getElement(0) != dwarf::DW_OP_consts &&
  1706. getElement(0) != dwarf::DW_OP_constu))
  1707. return std::nullopt;
  1708. if (getNumElements() == 2 && getElement(0) == dwarf::DW_OP_consts)
  1709. return SignedOrUnsignedConstant::SignedConstant;
  1710. if ((getNumElements() == 3 && getElement(2) != dwarf::DW_OP_stack_value) ||
  1711. (getNumElements() == 6 && (getElement(2) != dwarf::DW_OP_stack_value ||
  1712. getElement(3) != dwarf::DW_OP_LLVM_fragment)))
  1713. return std::nullopt;
  1714. return getElement(0) == dwarf::DW_OP_constu
  1715. ? SignedOrUnsignedConstant::UnsignedConstant
  1716. : SignedOrUnsignedConstant::SignedConstant;
  1717. }
  1718. DIExpression::ExtOps DIExpression::getExtOps(unsigned FromSize, unsigned ToSize,
  1719. bool Signed) {
  1720. dwarf::TypeKind TK = Signed ? dwarf::DW_ATE_signed : dwarf::DW_ATE_unsigned;
  1721. DIExpression::ExtOps Ops{{dwarf::DW_OP_LLVM_convert, FromSize, TK,
  1722. dwarf::DW_OP_LLVM_convert, ToSize, TK}};
  1723. return Ops;
  1724. }
  1725. DIExpression *DIExpression::appendExt(const DIExpression *Expr,
  1726. unsigned FromSize, unsigned ToSize,
  1727. bool Signed) {
  1728. return appendToStack(Expr, getExtOps(FromSize, ToSize, Signed));
  1729. }
  1730. DIGlobalVariableExpression *
  1731. DIGlobalVariableExpression::getImpl(LLVMContext &Context, Metadata *Variable,
  1732. Metadata *Expression, StorageType Storage,
  1733. bool ShouldCreate) {
  1734. DEFINE_GETIMPL_LOOKUP(DIGlobalVariableExpression, (Variable, Expression));
  1735. Metadata *Ops[] = {Variable, Expression};
  1736. DEFINE_GETIMPL_STORE_NO_CONSTRUCTOR_ARGS(DIGlobalVariableExpression, Ops);
  1737. }
  1738. DIObjCProperty::DIObjCProperty(LLVMContext &C, StorageType Storage,
  1739. unsigned Line, unsigned Attributes,
  1740. ArrayRef<Metadata *> Ops)
  1741. : DINode(C, DIObjCPropertyKind, Storage, dwarf::DW_TAG_APPLE_property, Ops),
  1742. Line(Line), Attributes(Attributes) {}
  1743. DIObjCProperty *DIObjCProperty::getImpl(
  1744. LLVMContext &Context, MDString *Name, Metadata *File, unsigned Line,
  1745. MDString *GetterName, MDString *SetterName, unsigned Attributes,
  1746. Metadata *Type, StorageType Storage, bool ShouldCreate) {
  1747. assert(isCanonical(Name) && "Expected canonical MDString");
  1748. assert(isCanonical(GetterName) && "Expected canonical MDString");
  1749. assert(isCanonical(SetterName) && "Expected canonical MDString");
  1750. DEFINE_GETIMPL_LOOKUP(DIObjCProperty, (Name, File, Line, GetterName,
  1751. SetterName, Attributes, Type));
  1752. Metadata *Ops[] = {Name, File, GetterName, SetterName, Type};
  1753. DEFINE_GETIMPL_STORE(DIObjCProperty, (Line, Attributes), Ops);
  1754. }
  1755. DIImportedEntity *DIImportedEntity::getImpl(LLVMContext &Context, unsigned Tag,
  1756. Metadata *Scope, Metadata *Entity,
  1757. Metadata *File, unsigned Line,
  1758. MDString *Name, Metadata *Elements,
  1759. StorageType Storage,
  1760. bool ShouldCreate) {
  1761. assert(isCanonical(Name) && "Expected canonical MDString");
  1762. DEFINE_GETIMPL_LOOKUP(DIImportedEntity,
  1763. (Tag, Scope, Entity, File, Line, Name, Elements));
  1764. Metadata *Ops[] = {Scope, Entity, Name, File, Elements};
  1765. DEFINE_GETIMPL_STORE(DIImportedEntity, (Tag, Line), Ops);
  1766. }
  1767. DIMacro *DIMacro::getImpl(LLVMContext &Context, unsigned MIType, unsigned Line,
  1768. MDString *Name, MDString *Value, StorageType Storage,
  1769. bool ShouldCreate) {
  1770. assert(isCanonical(Name) && "Expected canonical MDString");
  1771. DEFINE_GETIMPL_LOOKUP(DIMacro, (MIType, Line, Name, Value));
  1772. Metadata *Ops[] = {Name, Value};
  1773. DEFINE_GETIMPL_STORE(DIMacro, (MIType, Line), Ops);
  1774. }
  1775. DIMacroFile *DIMacroFile::getImpl(LLVMContext &Context, unsigned MIType,
  1776. unsigned Line, Metadata *File,
  1777. Metadata *Elements, StorageType Storage,
  1778. bool ShouldCreate) {
  1779. DEFINE_GETIMPL_LOOKUP(DIMacroFile, (MIType, Line, File, Elements));
  1780. Metadata *Ops[] = {File, Elements};
  1781. DEFINE_GETIMPL_STORE(DIMacroFile, (MIType, Line), Ops);
  1782. }
  1783. DIArgList *DIArgList::getImpl(LLVMContext &Context,
  1784. ArrayRef<ValueAsMetadata *> Args,
  1785. StorageType Storage, bool ShouldCreate) {
  1786. DEFINE_GETIMPL_LOOKUP(DIArgList, (Args));
  1787. DEFINE_GETIMPL_STORE_NO_OPS(DIArgList, (Args));
  1788. }
  1789. void DIArgList::handleChangedOperand(void *Ref, Metadata *New) {
  1790. ValueAsMetadata **OldVMPtr = static_cast<ValueAsMetadata **>(Ref);
  1791. assert((!New || isa<ValueAsMetadata>(New)) &&
  1792. "DIArgList must be passed a ValueAsMetadata");
  1793. untrack();
  1794. bool Uniq = isUniqued();
  1795. if (Uniq) {
  1796. // We need to update the uniqueness once the Args are updated since they
  1797. // form the key to the DIArgLists store.
  1798. eraseFromStore();
  1799. }
  1800. ValueAsMetadata *NewVM = cast_or_null<ValueAsMetadata>(New);
  1801. for (ValueAsMetadata *&VM : Args) {
  1802. if (&VM == OldVMPtr) {
  1803. if (NewVM)
  1804. VM = NewVM;
  1805. else
  1806. VM = ValueAsMetadata::get(UndefValue::get(VM->getValue()->getType()));
  1807. }
  1808. }
  1809. if (Uniq) {
  1810. if (uniquify() != this)
  1811. storeDistinctInContext();
  1812. }
  1813. track();
  1814. }
  1815. void DIArgList::track() {
  1816. for (ValueAsMetadata *&VAM : Args)
  1817. if (VAM)
  1818. MetadataTracking::track(&VAM, *VAM, *this);
  1819. }
  1820. void DIArgList::untrack() {
  1821. for (ValueAsMetadata *&VAM : Args)
  1822. if (VAM)
  1823. MetadataTracking::untrack(&VAM, *VAM);
  1824. }
  1825. void DIArgList::dropAllReferences() {
  1826. untrack();
  1827. Args.clear();
  1828. MDNode::dropAllReferences();
  1829. }