VarLocBasedImpl.cpp 98 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403
  1. //===- VarLocBasedImpl.cpp - Tracking Debug Value MIs with VarLoc class----===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. ///
  9. /// \file VarLocBasedImpl.cpp
  10. ///
  11. /// LiveDebugValues is an optimistic "available expressions" dataflow
  12. /// algorithm. The set of expressions is the set of machine locations
  13. /// (registers, spill slots, constants, and target indices) that a variable
  14. /// fragment might be located, qualified by a DIExpression and indirect-ness
  15. /// flag, while each variable is identified by a DebugVariable object. The
  16. /// availability of an expression begins when a DBG_VALUE instruction specifies
  17. /// the location of a DebugVariable, and continues until that location is
  18. /// clobbered or re-specified by a different DBG_VALUE for the same
  19. /// DebugVariable.
  20. ///
  21. /// The output of LiveDebugValues is additional DBG_VALUE instructions,
  22. /// placed to extend variable locations as far they're available. This file
  23. /// and the VarLocBasedLDV class is an implementation that explicitly tracks
  24. /// locations, using the VarLoc class.
  25. ///
  26. /// The canonical "available expressions" problem doesn't have expression
  27. /// clobbering, instead when a variable is re-assigned, any expressions using
  28. /// that variable get invalidated. LiveDebugValues can map onto "available
  29. /// expressions" by having every register represented by a variable, which is
  30. /// used in an expression that becomes available at a DBG_VALUE instruction.
  31. /// When the register is clobbered, its variable is effectively reassigned, and
  32. /// expressions computed from it become unavailable. A similar construct is
  33. /// needed when a DebugVariable has its location re-specified, to invalidate
  34. /// all other locations for that DebugVariable.
  35. ///
  36. /// Using the dataflow analysis to compute the available expressions, we create
  37. /// a DBG_VALUE at the beginning of each block where the expression is
  38. /// live-in. This propagates variable locations into every basic block where
  39. /// the location can be determined, rather than only having DBG_VALUEs in blocks
  40. /// where locations are specified due to an assignment or some optimization.
  41. /// Movements of values between registers and spill slots are annotated with
  42. /// DBG_VALUEs too to track variable values bewteen locations. All this allows
  43. /// DbgEntityHistoryCalculator to focus on only the locations within individual
  44. /// blocks, facilitating testing and improving modularity.
  45. ///
  46. /// We follow an optimisic dataflow approach, with this lattice:
  47. ///
  48. /// \verbatim
  49. /// ┬ "Unknown"
  50. /// |
  51. /// v
  52. /// True
  53. /// |
  54. /// v
  55. /// ⊥ False
  56. /// \endverbatim With "True" signifying that the expression is available (and
  57. /// thus a DebugVariable's location is the corresponding register), while
  58. /// "False" signifies that the expression is unavailable. "Unknown"s never
  59. /// survive to the end of the analysis (see below).
  60. ///
  61. /// Formally, all DebugVariable locations that are live-out of a block are
  62. /// initialized to \top. A blocks live-in values take the meet of the lattice
  63. /// value for every predecessors live-outs, except for the entry block, where
  64. /// all live-ins are \bot. The usual dataflow propagation occurs: the transfer
  65. /// function for a block assigns an expression for a DebugVariable to be "True"
  66. /// if a DBG_VALUE in the block specifies it; "False" if the location is
  67. /// clobbered; or the live-in value if it is unaffected by the block. We
  68. /// visit each block in reverse post order until a fixedpoint is reached. The
  69. /// solution produced is maximal.
  70. ///
  71. /// Intuitively, we start by assuming that every expression / variable location
  72. /// is at least "True", and then propagate "False" from the entry block and any
  73. /// clobbers until there are no more changes to make. This gives us an accurate
  74. /// solution because all incorrect locations will have a "False" propagated into
  75. /// them. It also gives us a solution that copes well with loops by assuming
  76. /// that variable locations are live-through every loop, and then removing those
  77. /// that are not through dataflow.
  78. ///
  79. /// Within LiveDebugValues: each variable location is represented by a
  80. /// VarLoc object that identifies the source variable, the set of
  81. /// machine-locations that currently describe it (a single location for
  82. /// DBG_VALUE or multiple for DBG_VALUE_LIST), and the DBG_VALUE inst that
  83. /// specifies the location. Each VarLoc is indexed in the (function-scope) \p
  84. /// VarLocMap, giving each VarLoc a set of unique indexes, each of which
  85. /// corresponds to one of the VarLoc's machine-locations and can be used to
  86. /// lookup the VarLoc in the VarLocMap. Rather than operate directly on machine
  87. /// locations, the dataflow analysis in this pass identifies locations by their
  88. /// indices in the VarLocMap, meaning all the variable locations in a block can
  89. /// be described by a sparse vector of VarLocMap indicies.
  90. ///
  91. /// All the storage for the dataflow analysis is local to the ExtendRanges
  92. /// method and passed down to helper methods. "OutLocs" and "InLocs" record the
  93. /// in and out lattice values for each block. "OpenRanges" maintains a list of
  94. /// variable locations and, with the "process" method, evaluates the transfer
  95. /// function of each block. "flushPendingLocs" installs debug value instructions
  96. /// for each live-in location at the start of blocks, while "Transfers" records
  97. /// transfers of values between machine-locations.
  98. ///
  99. /// We avoid explicitly representing the "Unknown" (\top) lattice value in the
  100. /// implementation. Instead, unvisited blocks implicitly have all lattice
  101. /// values set as "Unknown". After being visited, there will be path back to
  102. /// the entry block where the lattice value is "False", and as the transfer
  103. /// function cannot make new "Unknown" locations, there are no scenarios where
  104. /// a block can have an "Unknown" location after being visited. Similarly, we
  105. /// don't enumerate all possible variable locations before exploring the
  106. /// function: when a new location is discovered, all blocks previously explored
  107. /// were implicitly "False" but unrecorded, and become explicitly "False" when
  108. /// a new VarLoc is created with its bit not set in predecessor InLocs or
  109. /// OutLocs.
  110. ///
  111. //===----------------------------------------------------------------------===//
  112. #include "LiveDebugValues.h"
  113. #include "llvm/ADT/CoalescingBitVector.h"
  114. #include "llvm/ADT/DenseMap.h"
  115. #include "llvm/ADT/PostOrderIterator.h"
  116. #include "llvm/ADT/SmallPtrSet.h"
  117. #include "llvm/ADT/SmallSet.h"
  118. #include "llvm/ADT/SmallVector.h"
  119. #include "llvm/ADT/Statistic.h"
  120. #include "llvm/BinaryFormat/Dwarf.h"
  121. #include "llvm/CodeGen/LexicalScopes.h"
  122. #include "llvm/CodeGen/MachineBasicBlock.h"
  123. #include "llvm/CodeGen/MachineFunction.h"
  124. #include "llvm/CodeGen/MachineInstr.h"
  125. #include "llvm/CodeGen/MachineInstrBuilder.h"
  126. #include "llvm/CodeGen/MachineMemOperand.h"
  127. #include "llvm/CodeGen/MachineOperand.h"
  128. #include "llvm/CodeGen/PseudoSourceValue.h"
  129. #include "llvm/CodeGen/TargetFrameLowering.h"
  130. #include "llvm/CodeGen/TargetInstrInfo.h"
  131. #include "llvm/CodeGen/TargetLowering.h"
  132. #include "llvm/CodeGen/TargetPassConfig.h"
  133. #include "llvm/CodeGen/TargetRegisterInfo.h"
  134. #include "llvm/CodeGen/TargetSubtargetInfo.h"
  135. #include "llvm/Config/llvm-config.h"
  136. #include "llvm/IR/DebugInfoMetadata.h"
  137. #include "llvm/IR/DebugLoc.h"
  138. #include "llvm/IR/Function.h"
  139. #include "llvm/MC/MCRegisterInfo.h"
  140. #include "llvm/Support/Casting.h"
  141. #include "llvm/Support/Debug.h"
  142. #include "llvm/Support/TypeSize.h"
  143. #include "llvm/Support/raw_ostream.h"
  144. #include "llvm/Target/TargetMachine.h"
  145. #include <algorithm>
  146. #include <cassert>
  147. #include <cstdint>
  148. #include <functional>
  149. #include <map>
  150. #include <optional>
  151. #include <queue>
  152. #include <tuple>
  153. #include <utility>
  154. #include <vector>
  155. using namespace llvm;
  156. #define DEBUG_TYPE "livedebugvalues"
  157. STATISTIC(NumInserted, "Number of DBG_VALUE instructions inserted");
  158. /// If \p Op is a stack or frame register return true, otherwise return false.
  159. /// This is used to avoid basing the debug entry values on the registers, since
  160. /// we do not support it at the moment.
  161. static bool isRegOtherThanSPAndFP(const MachineOperand &Op,
  162. const MachineInstr &MI,
  163. const TargetRegisterInfo *TRI) {
  164. if (!Op.isReg())
  165. return false;
  166. const MachineFunction *MF = MI.getParent()->getParent();
  167. const TargetLowering *TLI = MF->getSubtarget().getTargetLowering();
  168. Register SP = TLI->getStackPointerRegisterToSaveRestore();
  169. Register FP = TRI->getFrameRegister(*MF);
  170. Register Reg = Op.getReg();
  171. return Reg && Reg != SP && Reg != FP;
  172. }
  173. namespace {
  174. // Max out the number of statically allocated elements in DefinedRegsSet, as
  175. // this prevents fallback to std::set::count() operations.
  176. using DefinedRegsSet = SmallSet<Register, 32>;
  177. // The IDs in this set correspond to MachineLocs in VarLocs, as well as VarLocs
  178. // that represent Entry Values; every VarLoc in the set will also appear
  179. // exactly once at Location=0.
  180. // As a result, each VarLoc may appear more than once in this "set", but each
  181. // range corresponding to a Reg, SpillLoc, or EntryValue type will still be a
  182. // "true" set (i.e. each VarLoc may appear only once), and the range Location=0
  183. // is the set of all VarLocs.
  184. using VarLocSet = CoalescingBitVector<uint64_t>;
  185. /// A type-checked pair of {Register Location (or 0), Index}, used to index
  186. /// into a \ref VarLocMap. This can be efficiently converted to a 64-bit int
  187. /// for insertion into a \ref VarLocSet, and efficiently converted back. The
  188. /// type-checker helps ensure that the conversions aren't lossy.
  189. ///
  190. /// Why encode a location /into/ the VarLocMap index? This makes it possible
  191. /// to find the open VarLocs killed by a register def very quickly. This is a
  192. /// performance-critical operation for LiveDebugValues.
  193. struct LocIndex {
  194. using u32_location_t = uint32_t;
  195. using u32_index_t = uint32_t;
  196. u32_location_t Location; // Physical registers live in the range [1;2^30) (see
  197. // \ref MCRegister), so we have plenty of range left
  198. // here to encode non-register locations.
  199. u32_index_t Index;
  200. /// The location that has an entry for every VarLoc in the map.
  201. static constexpr u32_location_t kUniversalLocation = 0;
  202. /// The first location that is reserved for VarLocs with locations of kind
  203. /// RegisterKind.
  204. static constexpr u32_location_t kFirstRegLocation = 1;
  205. /// The first location greater than 0 that is not reserved for VarLocs with
  206. /// locations of kind RegisterKind.
  207. static constexpr u32_location_t kFirstInvalidRegLocation = 1 << 30;
  208. /// A special location reserved for VarLocs with locations of kind
  209. /// SpillLocKind.
  210. static constexpr u32_location_t kSpillLocation = kFirstInvalidRegLocation;
  211. /// A special location reserved for VarLocs of kind EntryValueBackupKind and
  212. /// EntryValueCopyBackupKind.
  213. static constexpr u32_location_t kEntryValueBackupLocation =
  214. kFirstInvalidRegLocation + 1;
  215. /// A special location reserved for VarLocs with locations of kind
  216. /// WasmLocKind.
  217. /// TODO Placing all Wasm target index locations in this single kWasmLocation
  218. /// may cause slowdown in compilation time in very large functions. Consider
  219. /// giving a each target index/offset pair its own u32_location_t if this
  220. /// becomes a problem.
  221. static constexpr u32_location_t kWasmLocation = kFirstInvalidRegLocation + 2;
  222. LocIndex(u32_location_t Location, u32_index_t Index)
  223. : Location(Location), Index(Index) {}
  224. uint64_t getAsRawInteger() const {
  225. return (static_cast<uint64_t>(Location) << 32) | Index;
  226. }
  227. template<typename IntT> static LocIndex fromRawInteger(IntT ID) {
  228. static_assert(std::is_unsigned_v<IntT> && sizeof(ID) == sizeof(uint64_t),
  229. "Cannot convert raw integer to LocIndex");
  230. return {static_cast<u32_location_t>(ID >> 32),
  231. static_cast<u32_index_t>(ID)};
  232. }
  233. /// Get the start of the interval reserved for VarLocs of kind RegisterKind
  234. /// which reside in \p Reg. The end is at rawIndexForReg(Reg+1)-1.
  235. static uint64_t rawIndexForReg(Register Reg) {
  236. return LocIndex(Reg, 0).getAsRawInteger();
  237. }
  238. /// Return a range covering all set indices in the interval reserved for
  239. /// \p Location in \p Set.
  240. static auto indexRangeForLocation(const VarLocSet &Set,
  241. u32_location_t Location) {
  242. uint64_t Start = LocIndex(Location, 0).getAsRawInteger();
  243. uint64_t End = LocIndex(Location + 1, 0).getAsRawInteger();
  244. return Set.half_open_range(Start, End);
  245. }
  246. };
  247. // Simple Set for storing all the VarLoc Indices at a Location bucket.
  248. using VarLocsInRange = SmallSet<LocIndex::u32_index_t, 32>;
  249. // Vector of all `LocIndex`s for a given VarLoc; the same Location should not
  250. // appear in any two of these, as each VarLoc appears at most once in any
  251. // Location bucket.
  252. using LocIndices = SmallVector<LocIndex, 2>;
  253. class VarLocBasedLDV : public LDVImpl {
  254. private:
  255. const TargetRegisterInfo *TRI;
  256. const TargetInstrInfo *TII;
  257. const TargetFrameLowering *TFI;
  258. TargetPassConfig *TPC;
  259. BitVector CalleeSavedRegs;
  260. LexicalScopes LS;
  261. VarLocSet::Allocator Alloc;
  262. const MachineInstr *LastNonDbgMI;
  263. enum struct TransferKind { TransferCopy, TransferSpill, TransferRestore };
  264. using FragmentInfo = DIExpression::FragmentInfo;
  265. using OptFragmentInfo = std::optional<DIExpression::FragmentInfo>;
  266. /// A pair of debug variable and value location.
  267. struct VarLoc {
  268. // The location at which a spilled variable resides. It consists of a
  269. // register and an offset.
  270. struct SpillLoc {
  271. unsigned SpillBase;
  272. StackOffset SpillOffset;
  273. bool operator==(const SpillLoc &Other) const {
  274. return SpillBase == Other.SpillBase && SpillOffset == Other.SpillOffset;
  275. }
  276. bool operator!=(const SpillLoc &Other) const {
  277. return !(*this == Other);
  278. }
  279. };
  280. // Target indices used for wasm-specific locations.
  281. struct WasmLoc {
  282. // One of TargetIndex values defined in WebAssembly.h. We deal with
  283. // local-related TargetIndex in this analysis (TI_LOCAL and
  284. // TI_LOCAL_INDIRECT). Stack operands (TI_OPERAND_STACK) will be handled
  285. // separately WebAssemblyDebugFixup pass, and we don't associate debug
  286. // info with values in global operands (TI_GLOBAL_RELOC) at the moment.
  287. int Index;
  288. int64_t Offset;
  289. bool operator==(const WasmLoc &Other) const {
  290. return Index == Other.Index && Offset == Other.Offset;
  291. }
  292. bool operator!=(const WasmLoc &Other) const { return !(*this == Other); }
  293. };
  294. /// Identity of the variable at this location.
  295. const DebugVariable Var;
  296. /// The expression applied to this location.
  297. const DIExpression *Expr;
  298. /// DBG_VALUE to clone var/expr information from if this location
  299. /// is moved.
  300. const MachineInstr &MI;
  301. enum class MachineLocKind {
  302. InvalidKind = 0,
  303. RegisterKind,
  304. SpillLocKind,
  305. ImmediateKind,
  306. WasmLocKind
  307. };
  308. enum class EntryValueLocKind {
  309. NonEntryValueKind = 0,
  310. EntryValueKind,
  311. EntryValueBackupKind,
  312. EntryValueCopyBackupKind
  313. } EVKind = EntryValueLocKind::NonEntryValueKind;
  314. /// The value location. Stored separately to avoid repeatedly
  315. /// extracting it from MI.
  316. union MachineLocValue {
  317. uint64_t RegNo;
  318. SpillLoc SpillLocation;
  319. uint64_t Hash;
  320. int64_t Immediate;
  321. const ConstantFP *FPImm;
  322. const ConstantInt *CImm;
  323. WasmLoc WasmLocation;
  324. MachineLocValue() : Hash(0) {}
  325. };
  326. /// A single machine location; its Kind is either a register, spill
  327. /// location, or immediate value.
  328. /// If the VarLoc is not a NonEntryValueKind, then it will use only a
  329. /// single MachineLoc of RegisterKind.
  330. struct MachineLoc {
  331. MachineLocKind Kind;
  332. MachineLocValue Value;
  333. bool operator==(const MachineLoc &Other) const {
  334. if (Kind != Other.Kind)
  335. return false;
  336. switch (Kind) {
  337. case MachineLocKind::SpillLocKind:
  338. return Value.SpillLocation == Other.Value.SpillLocation;
  339. case MachineLocKind::WasmLocKind:
  340. return Value.WasmLocation == Other.Value.WasmLocation;
  341. case MachineLocKind::RegisterKind:
  342. case MachineLocKind::ImmediateKind:
  343. return Value.Hash == Other.Value.Hash;
  344. default:
  345. llvm_unreachable("Invalid kind");
  346. }
  347. }
  348. bool operator<(const MachineLoc &Other) const {
  349. switch (Kind) {
  350. case MachineLocKind::SpillLocKind:
  351. return std::make_tuple(
  352. Kind, Value.SpillLocation.SpillBase,
  353. Value.SpillLocation.SpillOffset.getFixed(),
  354. Value.SpillLocation.SpillOffset.getScalable()) <
  355. std::make_tuple(
  356. Other.Kind, Other.Value.SpillLocation.SpillBase,
  357. Other.Value.SpillLocation.SpillOffset.getFixed(),
  358. Other.Value.SpillLocation.SpillOffset.getScalable());
  359. case MachineLocKind::WasmLocKind:
  360. return std::make_tuple(Kind, Value.WasmLocation.Index,
  361. Value.WasmLocation.Offset) <
  362. std::make_tuple(Other.Kind, Other.Value.WasmLocation.Index,
  363. Other.Value.WasmLocation.Offset);
  364. case MachineLocKind::RegisterKind:
  365. case MachineLocKind::ImmediateKind:
  366. return std::tie(Kind, Value.Hash) <
  367. std::tie(Other.Kind, Other.Value.Hash);
  368. default:
  369. llvm_unreachable("Invalid kind");
  370. }
  371. }
  372. };
  373. /// The set of machine locations used to determine the variable's value, in
  374. /// conjunction with Expr. Initially populated with MI's debug operands,
  375. /// but may be transformed independently afterwards.
  376. SmallVector<MachineLoc, 8> Locs;
  377. /// Used to map the index of each location in Locs back to the index of its
  378. /// original debug operand in MI. Used when multiple location operands are
  379. /// coalesced and the original MI's operands need to be accessed while
  380. /// emitting a debug value.
  381. SmallVector<unsigned, 8> OrigLocMap;
  382. VarLoc(const MachineInstr &MI)
  383. : Var(MI.getDebugVariable(), MI.getDebugExpression(),
  384. MI.getDebugLoc()->getInlinedAt()),
  385. Expr(MI.getDebugExpression()), MI(MI) {
  386. assert(MI.isDebugValue() && "not a DBG_VALUE");
  387. assert((MI.isDebugValueList() || MI.getNumOperands() == 4) &&
  388. "malformed DBG_VALUE");
  389. for (const MachineOperand &Op : MI.debug_operands()) {
  390. MachineLoc ML = GetLocForOp(Op);
  391. auto It = find(Locs, ML);
  392. if (It == Locs.end()) {
  393. Locs.push_back(ML);
  394. OrigLocMap.push_back(MI.getDebugOperandIndex(&Op));
  395. } else {
  396. // ML duplicates an element in Locs; replace references to Op
  397. // with references to the duplicating element.
  398. unsigned OpIdx = Locs.size();
  399. unsigned DuplicatingIdx = std::distance(Locs.begin(), It);
  400. Expr = DIExpression::replaceArg(Expr, OpIdx, DuplicatingIdx);
  401. }
  402. }
  403. // We create the debug entry values from the factory functions rather
  404. // than from this ctor.
  405. assert(EVKind != EntryValueLocKind::EntryValueKind &&
  406. !isEntryBackupLoc());
  407. }
  408. static MachineLoc GetLocForOp(const MachineOperand &Op) {
  409. MachineLocKind Kind;
  410. MachineLocValue Loc;
  411. if (Op.isReg()) {
  412. Kind = MachineLocKind::RegisterKind;
  413. Loc.RegNo = Op.getReg();
  414. } else if (Op.isImm()) {
  415. Kind = MachineLocKind::ImmediateKind;
  416. Loc.Immediate = Op.getImm();
  417. } else if (Op.isFPImm()) {
  418. Kind = MachineLocKind::ImmediateKind;
  419. Loc.FPImm = Op.getFPImm();
  420. } else if (Op.isCImm()) {
  421. Kind = MachineLocKind::ImmediateKind;
  422. Loc.CImm = Op.getCImm();
  423. } else if (Op.isTargetIndex()) {
  424. Kind = MachineLocKind::WasmLocKind;
  425. Loc.WasmLocation = {Op.getIndex(), Op.getOffset()};
  426. } else
  427. llvm_unreachable("Invalid Op kind for MachineLoc.");
  428. return {Kind, Loc};
  429. }
  430. /// Take the variable and machine-location in DBG_VALUE MI, and build an
  431. /// entry location using the given expression.
  432. static VarLoc CreateEntryLoc(const MachineInstr &MI,
  433. const DIExpression *EntryExpr, Register Reg) {
  434. VarLoc VL(MI);
  435. assert(VL.Locs.size() == 1 &&
  436. VL.Locs[0].Kind == MachineLocKind::RegisterKind);
  437. VL.EVKind = EntryValueLocKind::EntryValueKind;
  438. VL.Expr = EntryExpr;
  439. VL.Locs[0].Value.RegNo = Reg;
  440. return VL;
  441. }
  442. /// Take the variable and machine-location from the DBG_VALUE (from the
  443. /// function entry), and build an entry value backup location. The backup
  444. /// location will turn into the normal location if the backup is valid at
  445. /// the time of the primary location clobbering.
  446. static VarLoc CreateEntryBackupLoc(const MachineInstr &MI,
  447. const DIExpression *EntryExpr) {
  448. VarLoc VL(MI);
  449. assert(VL.Locs.size() == 1 &&
  450. VL.Locs[0].Kind == MachineLocKind::RegisterKind);
  451. VL.EVKind = EntryValueLocKind::EntryValueBackupKind;
  452. VL.Expr = EntryExpr;
  453. return VL;
  454. }
  455. /// Take the variable and machine-location from the DBG_VALUE (from the
  456. /// function entry), and build a copy of an entry value backup location by
  457. /// setting the register location to NewReg.
  458. static VarLoc CreateEntryCopyBackupLoc(const MachineInstr &MI,
  459. const DIExpression *EntryExpr,
  460. Register NewReg) {
  461. VarLoc VL(MI);
  462. assert(VL.Locs.size() == 1 &&
  463. VL.Locs[0].Kind == MachineLocKind::RegisterKind);
  464. VL.EVKind = EntryValueLocKind::EntryValueCopyBackupKind;
  465. VL.Expr = EntryExpr;
  466. VL.Locs[0].Value.RegNo = NewReg;
  467. return VL;
  468. }
  469. /// Copy the register location in DBG_VALUE MI, updating the register to
  470. /// be NewReg.
  471. static VarLoc CreateCopyLoc(const VarLoc &OldVL, const MachineLoc &OldML,
  472. Register NewReg) {
  473. VarLoc VL = OldVL;
  474. for (MachineLoc &ML : VL.Locs)
  475. if (ML == OldML) {
  476. ML.Kind = MachineLocKind::RegisterKind;
  477. ML.Value.RegNo = NewReg;
  478. return VL;
  479. }
  480. llvm_unreachable("Should have found OldML in new VarLoc.");
  481. }
  482. /// Take the variable described by DBG_VALUE* MI, and create a VarLoc
  483. /// locating it in the specified spill location.
  484. static VarLoc CreateSpillLoc(const VarLoc &OldVL, const MachineLoc &OldML,
  485. unsigned SpillBase, StackOffset SpillOffset) {
  486. VarLoc VL = OldVL;
  487. for (MachineLoc &ML : VL.Locs)
  488. if (ML == OldML) {
  489. ML.Kind = MachineLocKind::SpillLocKind;
  490. ML.Value.SpillLocation = {SpillBase, SpillOffset};
  491. return VL;
  492. }
  493. llvm_unreachable("Should have found OldML in new VarLoc.");
  494. }
  495. /// Create a DBG_VALUE representing this VarLoc in the given function.
  496. /// Copies variable-specific information such as DILocalVariable and
  497. /// inlining information from the original DBG_VALUE instruction, which may
  498. /// have been several transfers ago.
  499. MachineInstr *BuildDbgValue(MachineFunction &MF) const {
  500. assert(!isEntryBackupLoc() &&
  501. "Tried to produce DBG_VALUE for backup VarLoc");
  502. const DebugLoc &DbgLoc = MI.getDebugLoc();
  503. bool Indirect = MI.isIndirectDebugValue();
  504. const auto &IID = MI.getDesc();
  505. const DILocalVariable *Var = MI.getDebugVariable();
  506. NumInserted++;
  507. const DIExpression *DIExpr = Expr;
  508. SmallVector<MachineOperand, 8> MOs;
  509. for (unsigned I = 0, E = Locs.size(); I < E; ++I) {
  510. MachineLocKind LocKind = Locs[I].Kind;
  511. MachineLocValue Loc = Locs[I].Value;
  512. const MachineOperand &Orig = MI.getDebugOperand(OrigLocMap[I]);
  513. switch (LocKind) {
  514. case MachineLocKind::RegisterKind:
  515. // An entry value is a register location -- but with an updated
  516. // expression. The register location of such DBG_VALUE is always the
  517. // one from the entry DBG_VALUE, it does not matter if the entry value
  518. // was copied in to another register due to some optimizations.
  519. // Non-entry value register locations are like the source
  520. // DBG_VALUE, but with the register number from this VarLoc.
  521. MOs.push_back(MachineOperand::CreateReg(
  522. EVKind == EntryValueLocKind::EntryValueKind ? Orig.getReg()
  523. : Register(Loc.RegNo),
  524. false));
  525. break;
  526. case MachineLocKind::SpillLocKind: {
  527. // Spills are indirect DBG_VALUEs, with a base register and offset.
  528. // Use the original DBG_VALUEs expression to build the spilt location
  529. // on top of. FIXME: spill locations created before this pass runs
  530. // are not recognized, and not handled here.
  531. unsigned Base = Loc.SpillLocation.SpillBase;
  532. auto *TRI = MF.getSubtarget().getRegisterInfo();
  533. if (MI.isNonListDebugValue()) {
  534. auto Deref = Indirect ? DIExpression::DerefAfter : 0;
  535. DIExpr = TRI->prependOffsetExpression(
  536. DIExpr, DIExpression::ApplyOffset | Deref,
  537. Loc.SpillLocation.SpillOffset);
  538. Indirect = true;
  539. } else {
  540. SmallVector<uint64_t, 4> Ops;
  541. TRI->getOffsetOpcodes(Loc.SpillLocation.SpillOffset, Ops);
  542. Ops.push_back(dwarf::DW_OP_deref);
  543. DIExpr = DIExpression::appendOpsToArg(DIExpr, Ops, I);
  544. }
  545. MOs.push_back(MachineOperand::CreateReg(Base, false));
  546. break;
  547. }
  548. case MachineLocKind::ImmediateKind: {
  549. MOs.push_back(Orig);
  550. break;
  551. }
  552. case MachineLocKind::WasmLocKind: {
  553. MOs.push_back(Orig);
  554. break;
  555. }
  556. case MachineLocKind::InvalidKind:
  557. llvm_unreachable("Tried to produce DBG_VALUE for invalid VarLoc");
  558. }
  559. }
  560. return BuildMI(MF, DbgLoc, IID, Indirect, MOs, Var, DIExpr);
  561. }
  562. /// Is the Loc field a constant or constant object?
  563. bool isConstant(MachineLocKind Kind) const {
  564. return Kind == MachineLocKind::ImmediateKind;
  565. }
  566. /// Check if the Loc field is an entry backup location.
  567. bool isEntryBackupLoc() const {
  568. return EVKind == EntryValueLocKind::EntryValueBackupKind ||
  569. EVKind == EntryValueLocKind::EntryValueCopyBackupKind;
  570. }
  571. /// If this variable is described by register \p Reg holding the entry
  572. /// value, return true.
  573. bool isEntryValueBackupReg(Register Reg) const {
  574. return EVKind == EntryValueLocKind::EntryValueBackupKind && usesReg(Reg);
  575. }
  576. /// If this variable is described by register \p Reg holding a copy of the
  577. /// entry value, return true.
  578. bool isEntryValueCopyBackupReg(Register Reg) const {
  579. return EVKind == EntryValueLocKind::EntryValueCopyBackupKind &&
  580. usesReg(Reg);
  581. }
  582. /// If this variable is described in whole or part by \p Reg, return true.
  583. bool usesReg(Register Reg) const {
  584. MachineLoc RegML;
  585. RegML.Kind = MachineLocKind::RegisterKind;
  586. RegML.Value.RegNo = Reg;
  587. return is_contained(Locs, RegML);
  588. }
  589. /// If this variable is described in whole or part by \p Reg, return true.
  590. unsigned getRegIdx(Register Reg) const {
  591. for (unsigned Idx = 0; Idx < Locs.size(); ++Idx)
  592. if (Locs[Idx].Kind == MachineLocKind::RegisterKind &&
  593. Register{static_cast<unsigned>(Locs[Idx].Value.RegNo)} == Reg)
  594. return Idx;
  595. llvm_unreachable("Could not find given Reg in Locs");
  596. }
  597. /// If this variable is described in whole or part by 1 or more registers,
  598. /// add each of them to \p Regs and return true.
  599. bool getDescribingRegs(SmallVectorImpl<uint32_t> &Regs) const {
  600. bool AnyRegs = false;
  601. for (const auto &Loc : Locs)
  602. if (Loc.Kind == MachineLocKind::RegisterKind) {
  603. Regs.push_back(Loc.Value.RegNo);
  604. AnyRegs = true;
  605. }
  606. return AnyRegs;
  607. }
  608. bool containsSpillLocs() const {
  609. return any_of(Locs, [](VarLoc::MachineLoc ML) {
  610. return ML.Kind == VarLoc::MachineLocKind::SpillLocKind;
  611. });
  612. }
  613. /// If this variable is described in whole or part by \p SpillLocation,
  614. /// return true.
  615. bool usesSpillLoc(SpillLoc SpillLocation) const {
  616. MachineLoc SpillML;
  617. SpillML.Kind = MachineLocKind::SpillLocKind;
  618. SpillML.Value.SpillLocation = SpillLocation;
  619. return is_contained(Locs, SpillML);
  620. }
  621. /// If this variable is described in whole or part by \p SpillLocation,
  622. /// return the index .
  623. unsigned getSpillLocIdx(SpillLoc SpillLocation) const {
  624. for (unsigned Idx = 0; Idx < Locs.size(); ++Idx)
  625. if (Locs[Idx].Kind == MachineLocKind::SpillLocKind &&
  626. Locs[Idx].Value.SpillLocation == SpillLocation)
  627. return Idx;
  628. llvm_unreachable("Could not find given SpillLoc in Locs");
  629. }
  630. bool containsWasmLocs() const {
  631. return any_of(Locs, [](VarLoc::MachineLoc ML) {
  632. return ML.Kind == VarLoc::MachineLocKind::WasmLocKind;
  633. });
  634. }
  635. /// If this variable is described in whole or part by \p WasmLocation,
  636. /// return true.
  637. bool usesWasmLoc(WasmLoc WasmLocation) const {
  638. MachineLoc WasmML;
  639. WasmML.Kind = MachineLocKind::WasmLocKind;
  640. WasmML.Value.WasmLocation = WasmLocation;
  641. return is_contained(Locs, WasmML);
  642. }
  643. /// Determine whether the lexical scope of this value's debug location
  644. /// dominates MBB.
  645. bool dominates(LexicalScopes &LS, MachineBasicBlock &MBB) const {
  646. return LS.dominates(MI.getDebugLoc().get(), &MBB);
  647. }
  648. #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
  649. // TRI and TII can be null.
  650. void dump(const TargetRegisterInfo *TRI, const TargetInstrInfo *TII,
  651. raw_ostream &Out = dbgs()) const {
  652. Out << "VarLoc(";
  653. for (const MachineLoc &MLoc : Locs) {
  654. if (Locs.begin() != &MLoc)
  655. Out << ", ";
  656. switch (MLoc.Kind) {
  657. case MachineLocKind::RegisterKind:
  658. Out << printReg(MLoc.Value.RegNo, TRI);
  659. break;
  660. case MachineLocKind::SpillLocKind:
  661. Out << printReg(MLoc.Value.SpillLocation.SpillBase, TRI);
  662. Out << "[" << MLoc.Value.SpillLocation.SpillOffset.getFixed() << " + "
  663. << MLoc.Value.SpillLocation.SpillOffset.getScalable()
  664. << "x vscale"
  665. << "]";
  666. break;
  667. case MachineLocKind::ImmediateKind:
  668. Out << MLoc.Value.Immediate;
  669. break;
  670. case MachineLocKind::WasmLocKind: {
  671. if (TII) {
  672. auto Indices = TII->getSerializableTargetIndices();
  673. auto Found =
  674. find_if(Indices, [&](const std::pair<int, const char *> &I) {
  675. return I.first == MLoc.Value.WasmLocation.Index;
  676. });
  677. assert(Found != Indices.end());
  678. Out << Found->second;
  679. if (MLoc.Value.WasmLocation.Offset > 0)
  680. Out << " + " << MLoc.Value.WasmLocation.Offset;
  681. } else {
  682. Out << "WasmLoc";
  683. }
  684. break;
  685. }
  686. case MachineLocKind::InvalidKind:
  687. llvm_unreachable("Invalid VarLoc in dump method");
  688. }
  689. }
  690. Out << ", \"" << Var.getVariable()->getName() << "\", " << *Expr << ", ";
  691. if (Var.getInlinedAt())
  692. Out << "!" << Var.getInlinedAt()->getMetadataID() << ")\n";
  693. else
  694. Out << "(null))";
  695. if (isEntryBackupLoc())
  696. Out << " (backup loc)\n";
  697. else
  698. Out << "\n";
  699. }
  700. #endif
  701. bool operator==(const VarLoc &Other) const {
  702. return std::tie(EVKind, Var, Expr, Locs) ==
  703. std::tie(Other.EVKind, Other.Var, Other.Expr, Other.Locs);
  704. }
  705. /// This operator guarantees that VarLocs are sorted by Variable first.
  706. bool operator<(const VarLoc &Other) const {
  707. return std::tie(Var, EVKind, Locs, Expr) <
  708. std::tie(Other.Var, Other.EVKind, Other.Locs, Other.Expr);
  709. }
  710. };
  711. #ifndef NDEBUG
  712. using VarVec = SmallVector<VarLoc, 32>;
  713. #endif
  714. /// VarLocMap is used for two things:
  715. /// 1) Assigning LocIndices to a VarLoc. The LocIndices can be used to
  716. /// virtually insert a VarLoc into a VarLocSet.
  717. /// 2) Given a LocIndex, look up the unique associated VarLoc.
  718. class VarLocMap {
  719. /// Map a VarLoc to an index within the vector reserved for its location
  720. /// within Loc2Vars.
  721. std::map<VarLoc, LocIndices> Var2Indices;
  722. /// Map a location to a vector which holds VarLocs which live in that
  723. /// location.
  724. SmallDenseMap<LocIndex::u32_location_t, std::vector<VarLoc>> Loc2Vars;
  725. public:
  726. /// Retrieve LocIndices for \p VL.
  727. LocIndices insert(const VarLoc &VL) {
  728. LocIndices &Indices = Var2Indices[VL];
  729. // If Indices is not empty, VL is already in the map.
  730. if (!Indices.empty())
  731. return Indices;
  732. SmallVector<LocIndex::u32_location_t, 4> Locations;
  733. // LocIndices are determined by EVKind and MLs; each Register has a
  734. // unique location, while all SpillLocs use a single bucket, and any EV
  735. // VarLocs use only the Backup bucket or none at all (except the
  736. // compulsory entry at the universal location index). LocIndices will
  737. // always have an index at the universal location index as the last index.
  738. if (VL.EVKind == VarLoc::EntryValueLocKind::NonEntryValueKind) {
  739. VL.getDescribingRegs(Locations);
  740. assert(all_of(Locations,
  741. [](auto RegNo) {
  742. return RegNo < LocIndex::kFirstInvalidRegLocation;
  743. }) &&
  744. "Physreg out of range?");
  745. if (VL.containsSpillLocs())
  746. Locations.push_back(LocIndex::kSpillLocation);
  747. if (VL.containsWasmLocs())
  748. Locations.push_back(LocIndex::kWasmLocation);
  749. } else if (VL.EVKind != VarLoc::EntryValueLocKind::EntryValueKind) {
  750. LocIndex::u32_location_t Loc = LocIndex::kEntryValueBackupLocation;
  751. Locations.push_back(Loc);
  752. }
  753. Locations.push_back(LocIndex::kUniversalLocation);
  754. for (LocIndex::u32_location_t Location : Locations) {
  755. auto &Vars = Loc2Vars[Location];
  756. Indices.push_back(
  757. {Location, static_cast<LocIndex::u32_index_t>(Vars.size())});
  758. Vars.push_back(VL);
  759. }
  760. return Indices;
  761. }
  762. LocIndices getAllIndices(const VarLoc &VL) const {
  763. auto IndIt = Var2Indices.find(VL);
  764. assert(IndIt != Var2Indices.end() && "VarLoc not tracked");
  765. return IndIt->second;
  766. }
  767. /// Retrieve the unique VarLoc associated with \p ID.
  768. const VarLoc &operator[](LocIndex ID) const {
  769. auto LocIt = Loc2Vars.find(ID.Location);
  770. assert(LocIt != Loc2Vars.end() && "Location not tracked");
  771. return LocIt->second[ID.Index];
  772. }
  773. };
  774. using VarLocInMBB =
  775. SmallDenseMap<const MachineBasicBlock *, std::unique_ptr<VarLocSet>>;
  776. struct TransferDebugPair {
  777. MachineInstr *TransferInst; ///< Instruction where this transfer occurs.
  778. LocIndex LocationID; ///< Location number for the transfer dest.
  779. };
  780. using TransferMap = SmallVector<TransferDebugPair, 4>;
  781. // Types for recording Entry Var Locations emitted by a single MachineInstr,
  782. // as well as recording MachineInstr which last defined a register.
  783. using InstToEntryLocMap = std::multimap<const MachineInstr *, LocIndex>;
  784. using RegDefToInstMap = DenseMap<Register, MachineInstr *>;
  785. // Types for recording sets of variable fragments that overlap. For a given
  786. // local variable, we record all other fragments of that variable that could
  787. // overlap it, to reduce search time.
  788. using FragmentOfVar =
  789. std::pair<const DILocalVariable *, DIExpression::FragmentInfo>;
  790. using OverlapMap =
  791. DenseMap<FragmentOfVar, SmallVector<DIExpression::FragmentInfo, 1>>;
  792. // Helper while building OverlapMap, a map of all fragments seen for a given
  793. // DILocalVariable.
  794. using VarToFragments =
  795. DenseMap<const DILocalVariable *, SmallSet<FragmentInfo, 4>>;
  796. /// Collects all VarLocs from \p CollectFrom. Each unique VarLoc is added
  797. /// to \p Collected once, in order of insertion into \p VarLocIDs.
  798. static void collectAllVarLocs(SmallVectorImpl<VarLoc> &Collected,
  799. const VarLocSet &CollectFrom,
  800. const VarLocMap &VarLocIDs);
  801. /// Get the registers which are used by VarLocs of kind RegisterKind tracked
  802. /// by \p CollectFrom.
  803. void getUsedRegs(const VarLocSet &CollectFrom,
  804. SmallVectorImpl<Register> &UsedRegs) const;
  805. /// This holds the working set of currently open ranges. For fast
  806. /// access, this is done both as a set of VarLocIDs, and a map of
  807. /// DebugVariable to recent VarLocID. Note that a DBG_VALUE ends all
  808. /// previous open ranges for the same variable. In addition, we keep
  809. /// two different maps (Vars/EntryValuesBackupVars), so erase/insert
  810. /// methods act differently depending on whether a VarLoc is primary
  811. /// location or backup one. In the case the VarLoc is backup location
  812. /// we will erase/insert from the EntryValuesBackupVars map, otherwise
  813. /// we perform the operation on the Vars.
  814. class OpenRangesSet {
  815. VarLocSet::Allocator &Alloc;
  816. VarLocSet VarLocs;
  817. // Map the DebugVariable to recent primary location ID.
  818. SmallDenseMap<DebugVariable, LocIndices, 8> Vars;
  819. // Map the DebugVariable to recent backup location ID.
  820. SmallDenseMap<DebugVariable, LocIndices, 8> EntryValuesBackupVars;
  821. OverlapMap &OverlappingFragments;
  822. public:
  823. OpenRangesSet(VarLocSet::Allocator &Alloc, OverlapMap &_OLapMap)
  824. : Alloc(Alloc), VarLocs(Alloc), OverlappingFragments(_OLapMap) {}
  825. const VarLocSet &getVarLocs() const { return VarLocs; }
  826. // Fetches all VarLocs in \p VarLocIDs and inserts them into \p Collected.
  827. // This method is needed to get every VarLoc once, as each VarLoc may have
  828. // multiple indices in a VarLocMap (corresponding to each applicable
  829. // location), but all VarLocs appear exactly once at the universal location
  830. // index.
  831. void getUniqueVarLocs(SmallVectorImpl<VarLoc> &Collected,
  832. const VarLocMap &VarLocIDs) const {
  833. collectAllVarLocs(Collected, VarLocs, VarLocIDs);
  834. }
  835. /// Terminate all open ranges for VL.Var by removing it from the set.
  836. void erase(const VarLoc &VL);
  837. /// Terminate all open ranges listed as indices in \c KillSet with
  838. /// \c Location by removing them from the set.
  839. void erase(const VarLocsInRange &KillSet, const VarLocMap &VarLocIDs,
  840. LocIndex::u32_location_t Location);
  841. /// Insert a new range into the set.
  842. void insert(LocIndices VarLocIDs, const VarLoc &VL);
  843. /// Insert a set of ranges.
  844. void insertFromLocSet(const VarLocSet &ToLoad, const VarLocMap &Map);
  845. std::optional<LocIndices> getEntryValueBackup(DebugVariable Var);
  846. /// Empty the set.
  847. void clear() {
  848. VarLocs.clear();
  849. Vars.clear();
  850. EntryValuesBackupVars.clear();
  851. }
  852. /// Return whether the set is empty or not.
  853. bool empty() const {
  854. assert(Vars.empty() == EntryValuesBackupVars.empty() &&
  855. Vars.empty() == VarLocs.empty() &&
  856. "open ranges are inconsistent");
  857. return VarLocs.empty();
  858. }
  859. /// Get an empty range of VarLoc IDs.
  860. auto getEmptyVarLocRange() const {
  861. return iterator_range<VarLocSet::const_iterator>(getVarLocs().end(),
  862. getVarLocs().end());
  863. }
  864. /// Get all set IDs for VarLocs with MLs of kind RegisterKind in \p Reg.
  865. auto getRegisterVarLocs(Register Reg) const {
  866. return LocIndex::indexRangeForLocation(getVarLocs(), Reg);
  867. }
  868. /// Get all set IDs for VarLocs with MLs of kind SpillLocKind.
  869. auto getSpillVarLocs() const {
  870. return LocIndex::indexRangeForLocation(getVarLocs(),
  871. LocIndex::kSpillLocation);
  872. }
  873. /// Get all set IDs for VarLocs of EVKind EntryValueBackupKind or
  874. /// EntryValueCopyBackupKind.
  875. auto getEntryValueBackupVarLocs() const {
  876. return LocIndex::indexRangeForLocation(
  877. getVarLocs(), LocIndex::kEntryValueBackupLocation);
  878. }
  879. /// Get all set IDs for VarLocs with MLs of kind WasmLocKind.
  880. auto getWasmVarLocs() const {
  881. return LocIndex::indexRangeForLocation(getVarLocs(),
  882. LocIndex::kWasmLocation);
  883. }
  884. };
  885. /// Collect all VarLoc IDs from \p CollectFrom for VarLocs with MLs of kind
  886. /// RegisterKind which are located in any reg in \p Regs. The IDs for each
  887. /// VarLoc correspond to entries in the universal location bucket, which every
  888. /// VarLoc has exactly 1 entry for. Insert collected IDs into \p Collected.
  889. static void collectIDsForRegs(VarLocsInRange &Collected,
  890. const DefinedRegsSet &Regs,
  891. const VarLocSet &CollectFrom,
  892. const VarLocMap &VarLocIDs);
  893. VarLocSet &getVarLocsInMBB(const MachineBasicBlock *MBB, VarLocInMBB &Locs) {
  894. std::unique_ptr<VarLocSet> &VLS = Locs[MBB];
  895. if (!VLS)
  896. VLS = std::make_unique<VarLocSet>(Alloc);
  897. return *VLS;
  898. }
  899. const VarLocSet &getVarLocsInMBB(const MachineBasicBlock *MBB,
  900. const VarLocInMBB &Locs) const {
  901. auto It = Locs.find(MBB);
  902. assert(It != Locs.end() && "MBB not in map");
  903. return *It->second;
  904. }
  905. /// Tests whether this instruction is a spill to a stack location.
  906. bool isSpillInstruction(const MachineInstr &MI, MachineFunction *MF);
  907. /// Decide if @MI is a spill instruction and return true if it is. We use 2
  908. /// criteria to make this decision:
  909. /// - Is this instruction a store to a spill slot?
  910. /// - Is there a register operand that is both used and killed?
  911. /// TODO: Store optimization can fold spills into other stores (including
  912. /// other spills). We do not handle this yet (more than one memory operand).
  913. bool isLocationSpill(const MachineInstr &MI, MachineFunction *MF,
  914. Register &Reg);
  915. /// Returns true if the given machine instruction is a debug value which we
  916. /// can emit entry values for.
  917. ///
  918. /// Currently, we generate debug entry values only for parameters that are
  919. /// unmodified throughout the function and located in a register.
  920. bool isEntryValueCandidate(const MachineInstr &MI,
  921. const DefinedRegsSet &Regs) const;
  922. /// If a given instruction is identified as a spill, return the spill location
  923. /// and set \p Reg to the spilled register.
  924. std::optional<VarLoc::SpillLoc> isRestoreInstruction(const MachineInstr &MI,
  925. MachineFunction *MF,
  926. Register &Reg);
  927. /// Given a spill instruction, extract the register and offset used to
  928. /// address the spill location in a target independent way.
  929. VarLoc::SpillLoc extractSpillBaseRegAndOffset(const MachineInstr &MI);
  930. void insertTransferDebugPair(MachineInstr &MI, OpenRangesSet &OpenRanges,
  931. TransferMap &Transfers, VarLocMap &VarLocIDs,
  932. LocIndex OldVarID, TransferKind Kind,
  933. const VarLoc::MachineLoc &OldLoc,
  934. Register NewReg = Register());
  935. void transferDebugValue(const MachineInstr &MI, OpenRangesSet &OpenRanges,
  936. VarLocMap &VarLocIDs,
  937. InstToEntryLocMap &EntryValTransfers,
  938. RegDefToInstMap &RegSetInstrs);
  939. void transferSpillOrRestoreInst(MachineInstr &MI, OpenRangesSet &OpenRanges,
  940. VarLocMap &VarLocIDs, TransferMap &Transfers);
  941. void cleanupEntryValueTransfers(const MachineInstr *MI,
  942. OpenRangesSet &OpenRanges,
  943. VarLocMap &VarLocIDs, const VarLoc &EntryVL,
  944. InstToEntryLocMap &EntryValTransfers);
  945. void removeEntryValue(const MachineInstr &MI, OpenRangesSet &OpenRanges,
  946. VarLocMap &VarLocIDs, const VarLoc &EntryVL,
  947. InstToEntryLocMap &EntryValTransfers,
  948. RegDefToInstMap &RegSetInstrs);
  949. void emitEntryValues(MachineInstr &MI, OpenRangesSet &OpenRanges,
  950. VarLocMap &VarLocIDs,
  951. InstToEntryLocMap &EntryValTransfers,
  952. VarLocsInRange &KillSet);
  953. void recordEntryValue(const MachineInstr &MI,
  954. const DefinedRegsSet &DefinedRegs,
  955. OpenRangesSet &OpenRanges, VarLocMap &VarLocIDs);
  956. void transferRegisterCopy(MachineInstr &MI, OpenRangesSet &OpenRanges,
  957. VarLocMap &VarLocIDs, TransferMap &Transfers);
  958. void transferRegisterDef(MachineInstr &MI, OpenRangesSet &OpenRanges,
  959. VarLocMap &VarLocIDs,
  960. InstToEntryLocMap &EntryValTransfers,
  961. RegDefToInstMap &RegSetInstrs);
  962. void transferWasmDef(MachineInstr &MI, OpenRangesSet &OpenRanges,
  963. VarLocMap &VarLocIDs);
  964. bool transferTerminator(MachineBasicBlock *MBB, OpenRangesSet &OpenRanges,
  965. VarLocInMBB &OutLocs, const VarLocMap &VarLocIDs);
  966. void process(MachineInstr &MI, OpenRangesSet &OpenRanges,
  967. VarLocMap &VarLocIDs, TransferMap &Transfers,
  968. InstToEntryLocMap &EntryValTransfers,
  969. RegDefToInstMap &RegSetInstrs);
  970. void accumulateFragmentMap(MachineInstr &MI, VarToFragments &SeenFragments,
  971. OverlapMap &OLapMap);
  972. bool join(MachineBasicBlock &MBB, VarLocInMBB &OutLocs, VarLocInMBB &InLocs,
  973. const VarLocMap &VarLocIDs,
  974. SmallPtrSet<const MachineBasicBlock *, 16> &Visited,
  975. SmallPtrSetImpl<const MachineBasicBlock *> &ArtificialBlocks);
  976. /// Create DBG_VALUE insts for inlocs that have been propagated but
  977. /// had their instruction creation deferred.
  978. void flushPendingLocs(VarLocInMBB &PendingInLocs, VarLocMap &VarLocIDs);
  979. bool ExtendRanges(MachineFunction &MF, MachineDominatorTree *DomTree,
  980. TargetPassConfig *TPC, unsigned InputBBLimit,
  981. unsigned InputDbgValLimit) override;
  982. public:
  983. /// Default construct and initialize the pass.
  984. VarLocBasedLDV();
  985. ~VarLocBasedLDV();
  986. /// Print to ostream with a message.
  987. void printVarLocInMBB(const MachineFunction &MF, const VarLocInMBB &V,
  988. const VarLocMap &VarLocIDs, const char *msg,
  989. raw_ostream &Out) const;
  990. };
  991. } // end anonymous namespace
  992. //===----------------------------------------------------------------------===//
  993. // Implementation
  994. //===----------------------------------------------------------------------===//
  995. VarLocBasedLDV::VarLocBasedLDV() = default;
  996. VarLocBasedLDV::~VarLocBasedLDV() = default;
  997. /// Erase a variable from the set of open ranges, and additionally erase any
  998. /// fragments that may overlap it. If the VarLoc is a backup location, erase
  999. /// the variable from the EntryValuesBackupVars set, indicating we should stop
  1000. /// tracking its backup entry location. Otherwise, if the VarLoc is primary
  1001. /// location, erase the variable from the Vars set.
  1002. void VarLocBasedLDV::OpenRangesSet::erase(const VarLoc &VL) {
  1003. // Erasure helper.
  1004. auto DoErase = [VL, this](DebugVariable VarToErase) {
  1005. auto *EraseFrom = VL.isEntryBackupLoc() ? &EntryValuesBackupVars : &Vars;
  1006. auto It = EraseFrom->find(VarToErase);
  1007. if (It != EraseFrom->end()) {
  1008. LocIndices IDs = It->second;
  1009. for (LocIndex ID : IDs)
  1010. VarLocs.reset(ID.getAsRawInteger());
  1011. EraseFrom->erase(It);
  1012. }
  1013. };
  1014. DebugVariable Var = VL.Var;
  1015. // Erase the variable/fragment that ends here.
  1016. DoErase(Var);
  1017. // Extract the fragment. Interpret an empty fragment as one that covers all
  1018. // possible bits.
  1019. FragmentInfo ThisFragment = Var.getFragmentOrDefault();
  1020. // There may be fragments that overlap the designated fragment. Look them up
  1021. // in the pre-computed overlap map, and erase them too.
  1022. auto MapIt = OverlappingFragments.find({Var.getVariable(), ThisFragment});
  1023. if (MapIt != OverlappingFragments.end()) {
  1024. for (auto Fragment : MapIt->second) {
  1025. VarLocBasedLDV::OptFragmentInfo FragmentHolder;
  1026. if (!DebugVariable::isDefaultFragment(Fragment))
  1027. FragmentHolder = VarLocBasedLDV::OptFragmentInfo(Fragment);
  1028. DoErase({Var.getVariable(), FragmentHolder, Var.getInlinedAt()});
  1029. }
  1030. }
  1031. }
  1032. void VarLocBasedLDV::OpenRangesSet::erase(const VarLocsInRange &KillSet,
  1033. const VarLocMap &VarLocIDs,
  1034. LocIndex::u32_location_t Location) {
  1035. VarLocSet RemoveSet(Alloc);
  1036. for (LocIndex::u32_index_t ID : KillSet) {
  1037. const VarLoc &VL = VarLocIDs[LocIndex(Location, ID)];
  1038. auto *EraseFrom = VL.isEntryBackupLoc() ? &EntryValuesBackupVars : &Vars;
  1039. EraseFrom->erase(VL.Var);
  1040. LocIndices VLI = VarLocIDs.getAllIndices(VL);
  1041. for (LocIndex ID : VLI)
  1042. RemoveSet.set(ID.getAsRawInteger());
  1043. }
  1044. VarLocs.intersectWithComplement(RemoveSet);
  1045. }
  1046. void VarLocBasedLDV::OpenRangesSet::insertFromLocSet(const VarLocSet &ToLoad,
  1047. const VarLocMap &Map) {
  1048. VarLocsInRange UniqueVarLocIDs;
  1049. DefinedRegsSet Regs;
  1050. Regs.insert(LocIndex::kUniversalLocation);
  1051. collectIDsForRegs(UniqueVarLocIDs, Regs, ToLoad, Map);
  1052. for (uint64_t ID : UniqueVarLocIDs) {
  1053. LocIndex Idx = LocIndex::fromRawInteger(ID);
  1054. const VarLoc &VarL = Map[Idx];
  1055. const LocIndices Indices = Map.getAllIndices(VarL);
  1056. insert(Indices, VarL);
  1057. }
  1058. }
  1059. void VarLocBasedLDV::OpenRangesSet::insert(LocIndices VarLocIDs,
  1060. const VarLoc &VL) {
  1061. auto *InsertInto = VL.isEntryBackupLoc() ? &EntryValuesBackupVars : &Vars;
  1062. for (LocIndex ID : VarLocIDs)
  1063. VarLocs.set(ID.getAsRawInteger());
  1064. InsertInto->insert({VL.Var, VarLocIDs});
  1065. }
  1066. /// Return the Loc ID of an entry value backup location, if it exists for the
  1067. /// variable.
  1068. std::optional<LocIndices>
  1069. VarLocBasedLDV::OpenRangesSet::getEntryValueBackup(DebugVariable Var) {
  1070. auto It = EntryValuesBackupVars.find(Var);
  1071. if (It != EntryValuesBackupVars.end())
  1072. return It->second;
  1073. return std::nullopt;
  1074. }
  1075. void VarLocBasedLDV::collectIDsForRegs(VarLocsInRange &Collected,
  1076. const DefinedRegsSet &Regs,
  1077. const VarLocSet &CollectFrom,
  1078. const VarLocMap &VarLocIDs) {
  1079. assert(!Regs.empty() && "Nothing to collect");
  1080. SmallVector<Register, 32> SortedRegs;
  1081. append_range(SortedRegs, Regs);
  1082. array_pod_sort(SortedRegs.begin(), SortedRegs.end());
  1083. auto It = CollectFrom.find(LocIndex::rawIndexForReg(SortedRegs.front()));
  1084. auto End = CollectFrom.end();
  1085. for (Register Reg : SortedRegs) {
  1086. // The half-open interval [FirstIndexForReg, FirstInvalidIndex) contains
  1087. // all possible VarLoc IDs for VarLocs with MLs of kind RegisterKind which
  1088. // live in Reg.
  1089. uint64_t FirstIndexForReg = LocIndex::rawIndexForReg(Reg);
  1090. uint64_t FirstInvalidIndex = LocIndex::rawIndexForReg(Reg + 1);
  1091. It.advanceToLowerBound(FirstIndexForReg);
  1092. // Iterate through that half-open interval and collect all the set IDs.
  1093. for (; It != End && *It < FirstInvalidIndex; ++It) {
  1094. LocIndex ItIdx = LocIndex::fromRawInteger(*It);
  1095. const VarLoc &VL = VarLocIDs[ItIdx];
  1096. LocIndices LI = VarLocIDs.getAllIndices(VL);
  1097. // For now, the back index is always the universal location index.
  1098. assert(LI.back().Location == LocIndex::kUniversalLocation &&
  1099. "Unexpected order of LocIndices for VarLoc; was it inserted into "
  1100. "the VarLocMap correctly?");
  1101. Collected.insert(LI.back().Index);
  1102. }
  1103. if (It == End)
  1104. return;
  1105. }
  1106. }
  1107. void VarLocBasedLDV::getUsedRegs(const VarLocSet &CollectFrom,
  1108. SmallVectorImpl<Register> &UsedRegs) const {
  1109. // All register-based VarLocs are assigned indices greater than or equal to
  1110. // FirstRegIndex.
  1111. uint64_t FirstRegIndex =
  1112. LocIndex::rawIndexForReg(LocIndex::kFirstRegLocation);
  1113. uint64_t FirstInvalidIndex =
  1114. LocIndex::rawIndexForReg(LocIndex::kFirstInvalidRegLocation);
  1115. for (auto It = CollectFrom.find(FirstRegIndex),
  1116. End = CollectFrom.find(FirstInvalidIndex);
  1117. It != End;) {
  1118. // We found a VarLoc ID for a VarLoc that lives in a register. Figure out
  1119. // which register and add it to UsedRegs.
  1120. uint32_t FoundReg = LocIndex::fromRawInteger(*It).Location;
  1121. assert((UsedRegs.empty() || FoundReg != UsedRegs.back()) &&
  1122. "Duplicate used reg");
  1123. UsedRegs.push_back(FoundReg);
  1124. // Skip to the next /set/ register. Note that this finds a lower bound, so
  1125. // even if there aren't any VarLocs living in `FoundReg+1`, we're still
  1126. // guaranteed to move on to the next register (or to end()).
  1127. uint64_t NextRegIndex = LocIndex::rawIndexForReg(FoundReg + 1);
  1128. It.advanceToLowerBound(NextRegIndex);
  1129. }
  1130. }
  1131. //===----------------------------------------------------------------------===//
  1132. // Debug Range Extension Implementation
  1133. //===----------------------------------------------------------------------===//
  1134. #ifndef NDEBUG
  1135. void VarLocBasedLDV::printVarLocInMBB(const MachineFunction &MF,
  1136. const VarLocInMBB &V,
  1137. const VarLocMap &VarLocIDs,
  1138. const char *msg,
  1139. raw_ostream &Out) const {
  1140. Out << '\n' << msg << '\n';
  1141. for (const MachineBasicBlock &BB : MF) {
  1142. if (!V.count(&BB))
  1143. continue;
  1144. const VarLocSet &L = getVarLocsInMBB(&BB, V);
  1145. if (L.empty())
  1146. continue;
  1147. SmallVector<VarLoc, 32> VarLocs;
  1148. collectAllVarLocs(VarLocs, L, VarLocIDs);
  1149. Out << "MBB: " << BB.getNumber() << ":\n";
  1150. for (const VarLoc &VL : VarLocs) {
  1151. Out << " Var: " << VL.Var.getVariable()->getName();
  1152. Out << " MI: ";
  1153. VL.dump(TRI, TII, Out);
  1154. }
  1155. }
  1156. Out << "\n";
  1157. }
  1158. #endif
  1159. VarLocBasedLDV::VarLoc::SpillLoc
  1160. VarLocBasedLDV::extractSpillBaseRegAndOffset(const MachineInstr &MI) {
  1161. assert(MI.hasOneMemOperand() &&
  1162. "Spill instruction does not have exactly one memory operand?");
  1163. auto MMOI = MI.memoperands_begin();
  1164. const PseudoSourceValue *PVal = (*MMOI)->getPseudoValue();
  1165. assert(PVal->kind() == PseudoSourceValue::FixedStack &&
  1166. "Inconsistent memory operand in spill instruction");
  1167. int FI = cast<FixedStackPseudoSourceValue>(PVal)->getFrameIndex();
  1168. const MachineBasicBlock *MBB = MI.getParent();
  1169. Register Reg;
  1170. StackOffset Offset = TFI->getFrameIndexReference(*MBB->getParent(), FI, Reg);
  1171. return {Reg, Offset};
  1172. }
  1173. /// Do cleanup of \p EntryValTransfers created by \p TRInst, by removing the
  1174. /// Transfer, which uses the to-be-deleted \p EntryVL.
  1175. void VarLocBasedLDV::cleanupEntryValueTransfers(
  1176. const MachineInstr *TRInst, OpenRangesSet &OpenRanges, VarLocMap &VarLocIDs,
  1177. const VarLoc &EntryVL, InstToEntryLocMap &EntryValTransfers) {
  1178. if (EntryValTransfers.empty() || TRInst == nullptr)
  1179. return;
  1180. auto TransRange = EntryValTransfers.equal_range(TRInst);
  1181. for (auto TDPair : llvm::make_range(TransRange.first, TransRange.second)) {
  1182. const VarLoc &EmittedEV = VarLocIDs[TDPair.second];
  1183. if (std::tie(EntryVL.Var, EntryVL.Locs[0].Value.RegNo, EntryVL.Expr) ==
  1184. std::tie(EmittedEV.Var, EmittedEV.Locs[0].Value.RegNo,
  1185. EmittedEV.Expr)) {
  1186. OpenRanges.erase(EmittedEV);
  1187. EntryValTransfers.erase(TRInst);
  1188. break;
  1189. }
  1190. }
  1191. }
  1192. /// Try to salvage the debug entry value if we encounter a new debug value
  1193. /// describing the same parameter, otherwise stop tracking the value. Return
  1194. /// true if we should stop tracking the entry value and do the cleanup of
  1195. /// emitted Entry Value Transfers, otherwise return false.
  1196. void VarLocBasedLDV::removeEntryValue(const MachineInstr &MI,
  1197. OpenRangesSet &OpenRanges,
  1198. VarLocMap &VarLocIDs,
  1199. const VarLoc &EntryVL,
  1200. InstToEntryLocMap &EntryValTransfers,
  1201. RegDefToInstMap &RegSetInstrs) {
  1202. // Skip the DBG_VALUE which is the debug entry value itself.
  1203. if (&MI == &EntryVL.MI)
  1204. return;
  1205. // If the parameter's location is not register location, we can not track
  1206. // the entry value any more. It doesn't have the TransferInst which defines
  1207. // register, so no Entry Value Transfers have been emitted already.
  1208. if (!MI.getDebugOperand(0).isReg())
  1209. return;
  1210. // Try to get non-debug instruction responsible for the DBG_VALUE.
  1211. const MachineInstr *TransferInst = nullptr;
  1212. Register Reg = MI.getDebugOperand(0).getReg();
  1213. if (Reg.isValid() && RegSetInstrs.find(Reg) != RegSetInstrs.end())
  1214. TransferInst = RegSetInstrs.find(Reg)->second;
  1215. // Case of the parameter's DBG_VALUE at the start of entry MBB.
  1216. if (!TransferInst && !LastNonDbgMI && MI.getParent()->isEntryBlock())
  1217. return;
  1218. // If the debug expression from the DBG_VALUE is not empty, we can assume the
  1219. // parameter's value has changed indicating that we should stop tracking its
  1220. // entry value as well.
  1221. if (MI.getDebugExpression()->getNumElements() == 0 && TransferInst) {
  1222. // If the DBG_VALUE comes from a copy instruction that copies the entry
  1223. // value, it means the parameter's value has not changed and we should be
  1224. // able to use its entry value.
  1225. // TODO: Try to keep tracking of an entry value if we encounter a propagated
  1226. // DBG_VALUE describing the copy of the entry value. (Propagated entry value
  1227. // does not indicate the parameter modification.)
  1228. auto DestSrc = TII->isCopyInstr(*TransferInst);
  1229. if (DestSrc) {
  1230. const MachineOperand *SrcRegOp, *DestRegOp;
  1231. SrcRegOp = DestSrc->Source;
  1232. DestRegOp = DestSrc->Destination;
  1233. if (Reg == DestRegOp->getReg()) {
  1234. for (uint64_t ID : OpenRanges.getEntryValueBackupVarLocs()) {
  1235. const VarLoc &VL = VarLocIDs[LocIndex::fromRawInteger(ID)];
  1236. if (VL.isEntryValueCopyBackupReg(Reg) &&
  1237. // Entry Values should not be variadic.
  1238. VL.MI.getDebugOperand(0).getReg() == SrcRegOp->getReg())
  1239. return;
  1240. }
  1241. }
  1242. }
  1243. }
  1244. LLVM_DEBUG(dbgs() << "Deleting a DBG entry value because of: ";
  1245. MI.print(dbgs(), /*IsStandalone*/ false,
  1246. /*SkipOpers*/ false, /*SkipDebugLoc*/ false,
  1247. /*AddNewLine*/ true, TII));
  1248. cleanupEntryValueTransfers(TransferInst, OpenRanges, VarLocIDs, EntryVL,
  1249. EntryValTransfers);
  1250. OpenRanges.erase(EntryVL);
  1251. }
  1252. /// End all previous ranges related to @MI and start a new range from @MI
  1253. /// if it is a DBG_VALUE instr.
  1254. void VarLocBasedLDV::transferDebugValue(const MachineInstr &MI,
  1255. OpenRangesSet &OpenRanges,
  1256. VarLocMap &VarLocIDs,
  1257. InstToEntryLocMap &EntryValTransfers,
  1258. RegDefToInstMap &RegSetInstrs) {
  1259. if (!MI.isDebugValue())
  1260. return;
  1261. const DILocalVariable *Var = MI.getDebugVariable();
  1262. const DIExpression *Expr = MI.getDebugExpression();
  1263. const DILocation *DebugLoc = MI.getDebugLoc();
  1264. const DILocation *InlinedAt = DebugLoc->getInlinedAt();
  1265. assert(Var->isValidLocationForIntrinsic(DebugLoc) &&
  1266. "Expected inlined-at fields to agree");
  1267. DebugVariable V(Var, Expr, InlinedAt);
  1268. // Check if this DBG_VALUE indicates a parameter's value changing.
  1269. // If that is the case, we should stop tracking its entry value.
  1270. auto EntryValBackupID = OpenRanges.getEntryValueBackup(V);
  1271. if (Var->isParameter() && EntryValBackupID) {
  1272. const VarLoc &EntryVL = VarLocIDs[EntryValBackupID->back()];
  1273. removeEntryValue(MI, OpenRanges, VarLocIDs, EntryVL, EntryValTransfers,
  1274. RegSetInstrs);
  1275. }
  1276. if (all_of(MI.debug_operands(), [](const MachineOperand &MO) {
  1277. return (MO.isReg() && MO.getReg()) || MO.isImm() || MO.isFPImm() ||
  1278. MO.isCImm() || MO.isTargetIndex();
  1279. })) {
  1280. // Use normal VarLoc constructor for registers and immediates.
  1281. VarLoc VL(MI);
  1282. // End all previous ranges of VL.Var.
  1283. OpenRanges.erase(VL);
  1284. LocIndices IDs = VarLocIDs.insert(VL);
  1285. // Add the VarLoc to OpenRanges from this DBG_VALUE.
  1286. OpenRanges.insert(IDs, VL);
  1287. } else if (MI.memoperands().size() > 0) {
  1288. llvm_unreachable("DBG_VALUE with mem operand encountered after regalloc?");
  1289. } else {
  1290. // This must be an undefined location. If it has an open range, erase it.
  1291. assert(MI.isUndefDebugValue() &&
  1292. "Unexpected non-undef DBG_VALUE encountered");
  1293. VarLoc VL(MI);
  1294. OpenRanges.erase(VL);
  1295. }
  1296. }
  1297. // This should be removed later, doesn't fit the new design.
  1298. void VarLocBasedLDV::collectAllVarLocs(SmallVectorImpl<VarLoc> &Collected,
  1299. const VarLocSet &CollectFrom,
  1300. const VarLocMap &VarLocIDs) {
  1301. // The half-open interval [FirstIndexForReg, FirstInvalidIndex) contains all
  1302. // possible VarLoc IDs for VarLocs with MLs of kind RegisterKind which live
  1303. // in Reg.
  1304. uint64_t FirstIndex = LocIndex::rawIndexForReg(LocIndex::kUniversalLocation);
  1305. uint64_t FirstInvalidIndex =
  1306. LocIndex::rawIndexForReg(LocIndex::kUniversalLocation + 1);
  1307. // Iterate through that half-open interval and collect all the set IDs.
  1308. for (auto It = CollectFrom.find(FirstIndex), End = CollectFrom.end();
  1309. It != End && *It < FirstInvalidIndex; ++It) {
  1310. LocIndex RegIdx = LocIndex::fromRawInteger(*It);
  1311. Collected.push_back(VarLocIDs[RegIdx]);
  1312. }
  1313. }
  1314. /// Turn the entry value backup locations into primary locations.
  1315. void VarLocBasedLDV::emitEntryValues(MachineInstr &MI,
  1316. OpenRangesSet &OpenRanges,
  1317. VarLocMap &VarLocIDs,
  1318. InstToEntryLocMap &EntryValTransfers,
  1319. VarLocsInRange &KillSet) {
  1320. // Do not insert entry value locations after a terminator.
  1321. if (MI.isTerminator())
  1322. return;
  1323. for (uint32_t ID : KillSet) {
  1324. // The KillSet IDs are indices for the universal location bucket.
  1325. LocIndex Idx = LocIndex(LocIndex::kUniversalLocation, ID);
  1326. const VarLoc &VL = VarLocIDs[Idx];
  1327. if (!VL.Var.getVariable()->isParameter())
  1328. continue;
  1329. auto DebugVar = VL.Var;
  1330. std::optional<LocIndices> EntryValBackupIDs =
  1331. OpenRanges.getEntryValueBackup(DebugVar);
  1332. // If the parameter has the entry value backup, it means we should
  1333. // be able to use its entry value.
  1334. if (!EntryValBackupIDs)
  1335. continue;
  1336. const VarLoc &EntryVL = VarLocIDs[EntryValBackupIDs->back()];
  1337. VarLoc EntryLoc = VarLoc::CreateEntryLoc(EntryVL.MI, EntryVL.Expr,
  1338. EntryVL.Locs[0].Value.RegNo);
  1339. LocIndices EntryValueIDs = VarLocIDs.insert(EntryLoc);
  1340. assert(EntryValueIDs.size() == 1 &&
  1341. "EntryValue loc should not be variadic");
  1342. EntryValTransfers.insert({&MI, EntryValueIDs.back()});
  1343. OpenRanges.insert(EntryValueIDs, EntryLoc);
  1344. }
  1345. }
  1346. /// Create new TransferDebugPair and insert it in \p Transfers. The VarLoc
  1347. /// with \p OldVarID should be deleted form \p OpenRanges and replaced with
  1348. /// new VarLoc. If \p NewReg is different than default zero value then the
  1349. /// new location will be register location created by the copy like instruction,
  1350. /// otherwise it is variable's location on the stack.
  1351. void VarLocBasedLDV::insertTransferDebugPair(
  1352. MachineInstr &MI, OpenRangesSet &OpenRanges, TransferMap &Transfers,
  1353. VarLocMap &VarLocIDs, LocIndex OldVarID, TransferKind Kind,
  1354. const VarLoc::MachineLoc &OldLoc, Register NewReg) {
  1355. const VarLoc &OldVarLoc = VarLocIDs[OldVarID];
  1356. auto ProcessVarLoc = [&MI, &OpenRanges, &Transfers, &VarLocIDs](VarLoc &VL) {
  1357. LocIndices LocIds = VarLocIDs.insert(VL);
  1358. // Close this variable's previous location range.
  1359. OpenRanges.erase(VL);
  1360. // Record the new location as an open range, and a postponed transfer
  1361. // inserting a DBG_VALUE for this location.
  1362. OpenRanges.insert(LocIds, VL);
  1363. assert(!MI.isTerminator() && "Cannot insert DBG_VALUE after terminator");
  1364. TransferDebugPair MIP = {&MI, LocIds.back()};
  1365. Transfers.push_back(MIP);
  1366. };
  1367. // End all previous ranges of VL.Var.
  1368. OpenRanges.erase(VarLocIDs[OldVarID]);
  1369. switch (Kind) {
  1370. case TransferKind::TransferCopy: {
  1371. assert(NewReg &&
  1372. "No register supplied when handling a copy of a debug value");
  1373. // Create a DBG_VALUE instruction to describe the Var in its new
  1374. // register location.
  1375. VarLoc VL = VarLoc::CreateCopyLoc(OldVarLoc, OldLoc, NewReg);
  1376. ProcessVarLoc(VL);
  1377. LLVM_DEBUG({
  1378. dbgs() << "Creating VarLoc for register copy:";
  1379. VL.dump(TRI, TII);
  1380. });
  1381. return;
  1382. }
  1383. case TransferKind::TransferSpill: {
  1384. // Create a DBG_VALUE instruction to describe the Var in its spilled
  1385. // location.
  1386. VarLoc::SpillLoc SpillLocation = extractSpillBaseRegAndOffset(MI);
  1387. VarLoc VL = VarLoc::CreateSpillLoc(
  1388. OldVarLoc, OldLoc, SpillLocation.SpillBase, SpillLocation.SpillOffset);
  1389. ProcessVarLoc(VL);
  1390. LLVM_DEBUG({
  1391. dbgs() << "Creating VarLoc for spill:";
  1392. VL.dump(TRI, TII);
  1393. });
  1394. return;
  1395. }
  1396. case TransferKind::TransferRestore: {
  1397. assert(NewReg &&
  1398. "No register supplied when handling a restore of a debug value");
  1399. // DebugInstr refers to the pre-spill location, therefore we can reuse
  1400. // its expression.
  1401. VarLoc VL = VarLoc::CreateCopyLoc(OldVarLoc, OldLoc, NewReg);
  1402. ProcessVarLoc(VL);
  1403. LLVM_DEBUG({
  1404. dbgs() << "Creating VarLoc for restore:";
  1405. VL.dump(TRI, TII);
  1406. });
  1407. return;
  1408. }
  1409. }
  1410. llvm_unreachable("Invalid transfer kind");
  1411. }
  1412. /// A definition of a register may mark the end of a range.
  1413. void VarLocBasedLDV::transferRegisterDef(MachineInstr &MI,
  1414. OpenRangesSet &OpenRanges,
  1415. VarLocMap &VarLocIDs,
  1416. InstToEntryLocMap &EntryValTransfers,
  1417. RegDefToInstMap &RegSetInstrs) {
  1418. // Meta Instructions do not affect the debug liveness of any register they
  1419. // define.
  1420. if (MI.isMetaInstruction())
  1421. return;
  1422. MachineFunction *MF = MI.getMF();
  1423. const TargetLowering *TLI = MF->getSubtarget().getTargetLowering();
  1424. Register SP = TLI->getStackPointerRegisterToSaveRestore();
  1425. // Find the regs killed by MI, and find regmasks of preserved regs.
  1426. DefinedRegsSet DeadRegs;
  1427. SmallVector<const uint32_t *, 4> RegMasks;
  1428. for (const MachineOperand &MO : MI.operands()) {
  1429. // Determine whether the operand is a register def.
  1430. if (MO.isReg() && MO.isDef() && MO.getReg() && MO.getReg().isPhysical() &&
  1431. !(MI.isCall() && MO.getReg() == SP)) {
  1432. // Remove ranges of all aliased registers.
  1433. for (MCRegAliasIterator RAI(MO.getReg(), TRI, true); RAI.isValid(); ++RAI)
  1434. // FIXME: Can we break out of this loop early if no insertion occurs?
  1435. DeadRegs.insert(*RAI);
  1436. RegSetInstrs.erase(MO.getReg());
  1437. RegSetInstrs.insert({MO.getReg(), &MI});
  1438. } else if (MO.isRegMask()) {
  1439. RegMasks.push_back(MO.getRegMask());
  1440. }
  1441. }
  1442. // Erase VarLocs which reside in one of the dead registers. For performance
  1443. // reasons, it's critical to not iterate over the full set of open VarLocs.
  1444. // Iterate over the set of dying/used regs instead.
  1445. if (!RegMasks.empty()) {
  1446. SmallVector<Register, 32> UsedRegs;
  1447. getUsedRegs(OpenRanges.getVarLocs(), UsedRegs);
  1448. for (Register Reg : UsedRegs) {
  1449. // Remove ranges of all clobbered registers. Register masks don't usually
  1450. // list SP as preserved. Assume that call instructions never clobber SP,
  1451. // because some backends (e.g., AArch64) never list SP in the regmask.
  1452. // While the debug info may be off for an instruction or two around
  1453. // callee-cleanup calls, transferring the DEBUG_VALUE across the call is
  1454. // still a better user experience.
  1455. if (Reg == SP)
  1456. continue;
  1457. bool AnyRegMaskKillsReg =
  1458. any_of(RegMasks, [Reg](const uint32_t *RegMask) {
  1459. return MachineOperand::clobbersPhysReg(RegMask, Reg);
  1460. });
  1461. if (AnyRegMaskKillsReg)
  1462. DeadRegs.insert(Reg);
  1463. if (AnyRegMaskKillsReg) {
  1464. RegSetInstrs.erase(Reg);
  1465. RegSetInstrs.insert({Reg, &MI});
  1466. }
  1467. }
  1468. }
  1469. if (DeadRegs.empty())
  1470. return;
  1471. VarLocsInRange KillSet;
  1472. collectIDsForRegs(KillSet, DeadRegs, OpenRanges.getVarLocs(), VarLocIDs);
  1473. OpenRanges.erase(KillSet, VarLocIDs, LocIndex::kUniversalLocation);
  1474. if (TPC) {
  1475. auto &TM = TPC->getTM<TargetMachine>();
  1476. if (TM.Options.ShouldEmitDebugEntryValues())
  1477. emitEntryValues(MI, OpenRanges, VarLocIDs, EntryValTransfers, KillSet);
  1478. }
  1479. }
  1480. void VarLocBasedLDV::transferWasmDef(MachineInstr &MI,
  1481. OpenRangesSet &OpenRanges,
  1482. VarLocMap &VarLocIDs) {
  1483. // If this is not a Wasm local.set or local.tee, which sets local values,
  1484. // return.
  1485. int Index;
  1486. int64_t Offset;
  1487. if (!TII->isExplicitTargetIndexDef(MI, Index, Offset))
  1488. return;
  1489. // Find the target indices killed by MI, and delete those variable locations
  1490. // from the open range.
  1491. VarLocsInRange KillSet;
  1492. VarLoc::WasmLoc Loc{Index, Offset};
  1493. for (uint64_t ID : OpenRanges.getWasmVarLocs()) {
  1494. LocIndex Idx = LocIndex::fromRawInteger(ID);
  1495. const VarLoc &VL = VarLocIDs[Idx];
  1496. assert(VL.containsWasmLocs() && "Broken VarLocSet?");
  1497. if (VL.usesWasmLoc(Loc))
  1498. KillSet.insert(ID);
  1499. }
  1500. OpenRanges.erase(KillSet, VarLocIDs, LocIndex::kWasmLocation);
  1501. }
  1502. bool VarLocBasedLDV::isSpillInstruction(const MachineInstr &MI,
  1503. MachineFunction *MF) {
  1504. // TODO: Handle multiple stores folded into one.
  1505. if (!MI.hasOneMemOperand())
  1506. return false;
  1507. if (!MI.getSpillSize(TII) && !MI.getFoldedSpillSize(TII))
  1508. return false; // This is not a spill instruction, since no valid size was
  1509. // returned from either function.
  1510. return true;
  1511. }
  1512. bool VarLocBasedLDV::isLocationSpill(const MachineInstr &MI,
  1513. MachineFunction *MF, Register &Reg) {
  1514. if (!isSpillInstruction(MI, MF))
  1515. return false;
  1516. auto isKilledReg = [&](const MachineOperand MO, Register &Reg) {
  1517. if (!MO.isReg() || !MO.isUse()) {
  1518. Reg = 0;
  1519. return false;
  1520. }
  1521. Reg = MO.getReg();
  1522. return MO.isKill();
  1523. };
  1524. for (const MachineOperand &MO : MI.operands()) {
  1525. // In a spill instruction generated by the InlineSpiller the spilled
  1526. // register has its kill flag set.
  1527. if (isKilledReg(MO, Reg))
  1528. return true;
  1529. if (Reg != 0) {
  1530. // Check whether next instruction kills the spilled register.
  1531. // FIXME: Current solution does not cover search for killed register in
  1532. // bundles and instructions further down the chain.
  1533. auto NextI = std::next(MI.getIterator());
  1534. // Skip next instruction that points to basic block end iterator.
  1535. if (MI.getParent()->end() == NextI)
  1536. continue;
  1537. Register RegNext;
  1538. for (const MachineOperand &MONext : NextI->operands()) {
  1539. // Return true if we came across the register from the
  1540. // previous spill instruction that is killed in NextI.
  1541. if (isKilledReg(MONext, RegNext) && RegNext == Reg)
  1542. return true;
  1543. }
  1544. }
  1545. }
  1546. // Return false if we didn't find spilled register.
  1547. return false;
  1548. }
  1549. std::optional<VarLocBasedLDV::VarLoc::SpillLoc>
  1550. VarLocBasedLDV::isRestoreInstruction(const MachineInstr &MI,
  1551. MachineFunction *MF, Register &Reg) {
  1552. if (!MI.hasOneMemOperand())
  1553. return std::nullopt;
  1554. // FIXME: Handle folded restore instructions with more than one memory
  1555. // operand.
  1556. if (MI.getRestoreSize(TII)) {
  1557. Reg = MI.getOperand(0).getReg();
  1558. return extractSpillBaseRegAndOffset(MI);
  1559. }
  1560. return std::nullopt;
  1561. }
  1562. /// A spilled register may indicate that we have to end the current range of
  1563. /// a variable and create a new one for the spill location.
  1564. /// A restored register may indicate the reverse situation.
  1565. /// We don't want to insert any instructions in process(), so we just create
  1566. /// the DBG_VALUE without inserting it and keep track of it in \p Transfers.
  1567. /// It will be inserted into the BB when we're done iterating over the
  1568. /// instructions.
  1569. void VarLocBasedLDV::transferSpillOrRestoreInst(MachineInstr &MI,
  1570. OpenRangesSet &OpenRanges,
  1571. VarLocMap &VarLocIDs,
  1572. TransferMap &Transfers) {
  1573. MachineFunction *MF = MI.getMF();
  1574. TransferKind TKind;
  1575. Register Reg;
  1576. std::optional<VarLoc::SpillLoc> Loc;
  1577. LLVM_DEBUG(dbgs() << "Examining instruction: "; MI.dump(););
  1578. // First, if there are any DBG_VALUEs pointing at a spill slot that is
  1579. // written to, then close the variable location. The value in memory
  1580. // will have changed.
  1581. VarLocsInRange KillSet;
  1582. if (isSpillInstruction(MI, MF)) {
  1583. Loc = extractSpillBaseRegAndOffset(MI);
  1584. for (uint64_t ID : OpenRanges.getSpillVarLocs()) {
  1585. LocIndex Idx = LocIndex::fromRawInteger(ID);
  1586. const VarLoc &VL = VarLocIDs[Idx];
  1587. assert(VL.containsSpillLocs() && "Broken VarLocSet?");
  1588. if (VL.usesSpillLoc(*Loc)) {
  1589. // This location is overwritten by the current instruction -- terminate
  1590. // the open range, and insert an explicit DBG_VALUE $noreg.
  1591. //
  1592. // Doing this at a later stage would require re-interpreting all
  1593. // DBG_VALUes and DIExpressions to identify whether they point at
  1594. // memory, and then analysing all memory writes to see if they
  1595. // overwrite that memory, which is expensive.
  1596. //
  1597. // At this stage, we already know which DBG_VALUEs are for spills and
  1598. // where they are located; it's best to fix handle overwrites now.
  1599. KillSet.insert(ID);
  1600. unsigned SpillLocIdx = VL.getSpillLocIdx(*Loc);
  1601. VarLoc::MachineLoc OldLoc = VL.Locs[SpillLocIdx];
  1602. VarLoc UndefVL = VarLoc::CreateCopyLoc(VL, OldLoc, 0);
  1603. LocIndices UndefLocIDs = VarLocIDs.insert(UndefVL);
  1604. Transfers.push_back({&MI, UndefLocIDs.back()});
  1605. }
  1606. }
  1607. OpenRanges.erase(KillSet, VarLocIDs, LocIndex::kSpillLocation);
  1608. }
  1609. // Try to recognise spill and restore instructions that may create a new
  1610. // variable location.
  1611. if (isLocationSpill(MI, MF, Reg)) {
  1612. TKind = TransferKind::TransferSpill;
  1613. LLVM_DEBUG(dbgs() << "Recognized as spill: "; MI.dump(););
  1614. LLVM_DEBUG(dbgs() << "Register: " << Reg << " " << printReg(Reg, TRI)
  1615. << "\n");
  1616. } else {
  1617. if (!(Loc = isRestoreInstruction(MI, MF, Reg)))
  1618. return;
  1619. TKind = TransferKind::TransferRestore;
  1620. LLVM_DEBUG(dbgs() << "Recognized as restore: "; MI.dump(););
  1621. LLVM_DEBUG(dbgs() << "Register: " << Reg << " " << printReg(Reg, TRI)
  1622. << "\n");
  1623. }
  1624. // Check if the register or spill location is the location of a debug value.
  1625. auto TransferCandidates = OpenRanges.getEmptyVarLocRange();
  1626. if (TKind == TransferKind::TransferSpill)
  1627. TransferCandidates = OpenRanges.getRegisterVarLocs(Reg);
  1628. else if (TKind == TransferKind::TransferRestore)
  1629. TransferCandidates = OpenRanges.getSpillVarLocs();
  1630. for (uint64_t ID : TransferCandidates) {
  1631. LocIndex Idx = LocIndex::fromRawInteger(ID);
  1632. const VarLoc &VL = VarLocIDs[Idx];
  1633. unsigned LocIdx;
  1634. if (TKind == TransferKind::TransferSpill) {
  1635. assert(VL.usesReg(Reg) && "Broken VarLocSet?");
  1636. LLVM_DEBUG(dbgs() << "Spilling Register " << printReg(Reg, TRI) << '('
  1637. << VL.Var.getVariable()->getName() << ")\n");
  1638. LocIdx = VL.getRegIdx(Reg);
  1639. } else {
  1640. assert(TKind == TransferKind::TransferRestore && VL.containsSpillLocs() &&
  1641. "Broken VarLocSet?");
  1642. if (!VL.usesSpillLoc(*Loc))
  1643. // The spill location is not the location of a debug value.
  1644. continue;
  1645. LLVM_DEBUG(dbgs() << "Restoring Register " << printReg(Reg, TRI) << '('
  1646. << VL.Var.getVariable()->getName() << ")\n");
  1647. LocIdx = VL.getSpillLocIdx(*Loc);
  1648. }
  1649. VarLoc::MachineLoc MLoc = VL.Locs[LocIdx];
  1650. insertTransferDebugPair(MI, OpenRanges, Transfers, VarLocIDs, Idx, TKind,
  1651. MLoc, Reg);
  1652. // FIXME: A comment should explain why it's correct to return early here,
  1653. // if that is in fact correct.
  1654. return;
  1655. }
  1656. }
  1657. /// If \p MI is a register copy instruction, that copies a previously tracked
  1658. /// value from one register to another register that is callee saved, we
  1659. /// create new DBG_VALUE instruction described with copy destination register.
  1660. void VarLocBasedLDV::transferRegisterCopy(MachineInstr &MI,
  1661. OpenRangesSet &OpenRanges,
  1662. VarLocMap &VarLocIDs,
  1663. TransferMap &Transfers) {
  1664. auto DestSrc = TII->isCopyInstr(MI);
  1665. if (!DestSrc)
  1666. return;
  1667. const MachineOperand *DestRegOp = DestSrc->Destination;
  1668. const MachineOperand *SrcRegOp = DestSrc->Source;
  1669. if (!DestRegOp->isDef())
  1670. return;
  1671. auto isCalleeSavedReg = [&](Register Reg) {
  1672. for (MCRegAliasIterator RAI(Reg, TRI, true); RAI.isValid(); ++RAI)
  1673. if (CalleeSavedRegs.test(*RAI))
  1674. return true;
  1675. return false;
  1676. };
  1677. Register SrcReg = SrcRegOp->getReg();
  1678. Register DestReg = DestRegOp->getReg();
  1679. // We want to recognize instructions where destination register is callee
  1680. // saved register. If register that could be clobbered by the call is
  1681. // included, there would be a great chance that it is going to be clobbered
  1682. // soon. It is more likely that previous register location, which is callee
  1683. // saved, is going to stay unclobbered longer, even if it is killed.
  1684. if (!isCalleeSavedReg(DestReg))
  1685. return;
  1686. // Remember an entry value movement. If we encounter a new debug value of
  1687. // a parameter describing only a moving of the value around, rather then
  1688. // modifying it, we are still able to use the entry value if needed.
  1689. if (isRegOtherThanSPAndFP(*DestRegOp, MI, TRI)) {
  1690. for (uint64_t ID : OpenRanges.getEntryValueBackupVarLocs()) {
  1691. LocIndex Idx = LocIndex::fromRawInteger(ID);
  1692. const VarLoc &VL = VarLocIDs[Idx];
  1693. if (VL.isEntryValueBackupReg(SrcReg)) {
  1694. LLVM_DEBUG(dbgs() << "Copy of the entry value: "; MI.dump(););
  1695. VarLoc EntryValLocCopyBackup =
  1696. VarLoc::CreateEntryCopyBackupLoc(VL.MI, VL.Expr, DestReg);
  1697. // Stop tracking the original entry value.
  1698. OpenRanges.erase(VL);
  1699. // Start tracking the entry value copy.
  1700. LocIndices EntryValCopyLocIDs = VarLocIDs.insert(EntryValLocCopyBackup);
  1701. OpenRanges.insert(EntryValCopyLocIDs, EntryValLocCopyBackup);
  1702. break;
  1703. }
  1704. }
  1705. }
  1706. if (!SrcRegOp->isKill())
  1707. return;
  1708. for (uint64_t ID : OpenRanges.getRegisterVarLocs(SrcReg)) {
  1709. LocIndex Idx = LocIndex::fromRawInteger(ID);
  1710. assert(VarLocIDs[Idx].usesReg(SrcReg) && "Broken VarLocSet?");
  1711. VarLoc::MachineLocValue Loc;
  1712. Loc.RegNo = SrcReg;
  1713. VarLoc::MachineLoc MLoc{VarLoc::MachineLocKind::RegisterKind, Loc};
  1714. insertTransferDebugPair(MI, OpenRanges, Transfers, VarLocIDs, Idx,
  1715. TransferKind::TransferCopy, MLoc, DestReg);
  1716. // FIXME: A comment should explain why it's correct to return early here,
  1717. // if that is in fact correct.
  1718. return;
  1719. }
  1720. }
  1721. /// Terminate all open ranges at the end of the current basic block.
  1722. bool VarLocBasedLDV::transferTerminator(MachineBasicBlock *CurMBB,
  1723. OpenRangesSet &OpenRanges,
  1724. VarLocInMBB &OutLocs,
  1725. const VarLocMap &VarLocIDs) {
  1726. bool Changed = false;
  1727. LLVM_DEBUG({
  1728. VarVec VarLocs;
  1729. OpenRanges.getUniqueVarLocs(VarLocs, VarLocIDs);
  1730. for (VarLoc &VL : VarLocs) {
  1731. // Copy OpenRanges to OutLocs, if not already present.
  1732. dbgs() << "Add to OutLocs in MBB #" << CurMBB->getNumber() << ": ";
  1733. VL.dump(TRI, TII);
  1734. }
  1735. });
  1736. VarLocSet &VLS = getVarLocsInMBB(CurMBB, OutLocs);
  1737. Changed = VLS != OpenRanges.getVarLocs();
  1738. // New OutLocs set may be different due to spill, restore or register
  1739. // copy instruction processing.
  1740. if (Changed)
  1741. VLS = OpenRanges.getVarLocs();
  1742. OpenRanges.clear();
  1743. return Changed;
  1744. }
  1745. /// Accumulate a mapping between each DILocalVariable fragment and other
  1746. /// fragments of that DILocalVariable which overlap. This reduces work during
  1747. /// the data-flow stage from "Find any overlapping fragments" to "Check if the
  1748. /// known-to-overlap fragments are present".
  1749. /// \param MI A previously unprocessed DEBUG_VALUE instruction to analyze for
  1750. /// fragment usage.
  1751. /// \param SeenFragments Map from DILocalVariable to all fragments of that
  1752. /// Variable which are known to exist.
  1753. /// \param OverlappingFragments The overlap map being constructed, from one
  1754. /// Var/Fragment pair to a vector of fragments known to overlap.
  1755. void VarLocBasedLDV::accumulateFragmentMap(MachineInstr &MI,
  1756. VarToFragments &SeenFragments,
  1757. OverlapMap &OverlappingFragments) {
  1758. DebugVariable MIVar(MI.getDebugVariable(), MI.getDebugExpression(),
  1759. MI.getDebugLoc()->getInlinedAt());
  1760. FragmentInfo ThisFragment = MIVar.getFragmentOrDefault();
  1761. // If this is the first sighting of this variable, then we are guaranteed
  1762. // there are currently no overlapping fragments either. Initialize the set
  1763. // of seen fragments, record no overlaps for the current one, and return.
  1764. auto SeenIt = SeenFragments.find(MIVar.getVariable());
  1765. if (SeenIt == SeenFragments.end()) {
  1766. SmallSet<FragmentInfo, 4> OneFragment;
  1767. OneFragment.insert(ThisFragment);
  1768. SeenFragments.insert({MIVar.getVariable(), OneFragment});
  1769. OverlappingFragments.insert({{MIVar.getVariable(), ThisFragment}, {}});
  1770. return;
  1771. }
  1772. // If this particular Variable/Fragment pair already exists in the overlap
  1773. // map, it has already been accounted for.
  1774. auto IsInOLapMap =
  1775. OverlappingFragments.insert({{MIVar.getVariable(), ThisFragment}, {}});
  1776. if (!IsInOLapMap.second)
  1777. return;
  1778. auto &ThisFragmentsOverlaps = IsInOLapMap.first->second;
  1779. auto &AllSeenFragments = SeenIt->second;
  1780. // Otherwise, examine all other seen fragments for this variable, with "this"
  1781. // fragment being a previously unseen fragment. Record any pair of
  1782. // overlapping fragments.
  1783. for (const auto &ASeenFragment : AllSeenFragments) {
  1784. // Does this previously seen fragment overlap?
  1785. if (DIExpression::fragmentsOverlap(ThisFragment, ASeenFragment)) {
  1786. // Yes: Mark the current fragment as being overlapped.
  1787. ThisFragmentsOverlaps.push_back(ASeenFragment);
  1788. // Mark the previously seen fragment as being overlapped by the current
  1789. // one.
  1790. auto ASeenFragmentsOverlaps =
  1791. OverlappingFragments.find({MIVar.getVariable(), ASeenFragment});
  1792. assert(ASeenFragmentsOverlaps != OverlappingFragments.end() &&
  1793. "Previously seen var fragment has no vector of overlaps");
  1794. ASeenFragmentsOverlaps->second.push_back(ThisFragment);
  1795. }
  1796. }
  1797. AllSeenFragments.insert(ThisFragment);
  1798. }
  1799. /// This routine creates OpenRanges.
  1800. void VarLocBasedLDV::process(MachineInstr &MI, OpenRangesSet &OpenRanges,
  1801. VarLocMap &VarLocIDs, TransferMap &Transfers,
  1802. InstToEntryLocMap &EntryValTransfers,
  1803. RegDefToInstMap &RegSetInstrs) {
  1804. if (!MI.isDebugInstr())
  1805. LastNonDbgMI = &MI;
  1806. transferDebugValue(MI, OpenRanges, VarLocIDs, EntryValTransfers,
  1807. RegSetInstrs);
  1808. transferRegisterDef(MI, OpenRanges, VarLocIDs, EntryValTransfers,
  1809. RegSetInstrs);
  1810. transferWasmDef(MI, OpenRanges, VarLocIDs);
  1811. transferRegisterCopy(MI, OpenRanges, VarLocIDs, Transfers);
  1812. transferSpillOrRestoreInst(MI, OpenRanges, VarLocIDs, Transfers);
  1813. }
  1814. /// This routine joins the analysis results of all incoming edges in @MBB by
  1815. /// inserting a new DBG_VALUE instruction at the start of the @MBB - if the same
  1816. /// source variable in all the predecessors of @MBB reside in the same location.
  1817. bool VarLocBasedLDV::join(
  1818. MachineBasicBlock &MBB, VarLocInMBB &OutLocs, VarLocInMBB &InLocs,
  1819. const VarLocMap &VarLocIDs,
  1820. SmallPtrSet<const MachineBasicBlock *, 16> &Visited,
  1821. SmallPtrSetImpl<const MachineBasicBlock *> &ArtificialBlocks) {
  1822. LLVM_DEBUG(dbgs() << "join MBB: " << MBB.getNumber() << "\n");
  1823. VarLocSet InLocsT(Alloc); // Temporary incoming locations.
  1824. // For all predecessors of this MBB, find the set of VarLocs that
  1825. // can be joined.
  1826. int NumVisited = 0;
  1827. for (auto *p : MBB.predecessors()) {
  1828. // Ignore backedges if we have not visited the predecessor yet. As the
  1829. // predecessor hasn't yet had locations propagated into it, most locations
  1830. // will not yet be valid, so treat them as all being uninitialized and
  1831. // potentially valid. If a location guessed to be correct here is
  1832. // invalidated later, we will remove it when we revisit this block.
  1833. if (!Visited.count(p)) {
  1834. LLVM_DEBUG(dbgs() << " ignoring unvisited pred MBB: " << p->getNumber()
  1835. << "\n");
  1836. continue;
  1837. }
  1838. auto OL = OutLocs.find(p);
  1839. // Join is null in case of empty OutLocs from any of the pred.
  1840. if (OL == OutLocs.end())
  1841. return false;
  1842. // Just copy over the Out locs to incoming locs for the first visited
  1843. // predecessor, and for all other predecessors join the Out locs.
  1844. VarLocSet &OutLocVLS = *OL->second;
  1845. if (!NumVisited)
  1846. InLocsT = OutLocVLS;
  1847. else
  1848. InLocsT &= OutLocVLS;
  1849. LLVM_DEBUG({
  1850. if (!InLocsT.empty()) {
  1851. VarVec VarLocs;
  1852. collectAllVarLocs(VarLocs, InLocsT, VarLocIDs);
  1853. for (const VarLoc &VL : VarLocs)
  1854. dbgs() << " gathered candidate incoming var: "
  1855. << VL.Var.getVariable()->getName() << "\n";
  1856. }
  1857. });
  1858. NumVisited++;
  1859. }
  1860. // Filter out DBG_VALUES that are out of scope.
  1861. VarLocSet KillSet(Alloc);
  1862. bool IsArtificial = ArtificialBlocks.count(&MBB);
  1863. if (!IsArtificial) {
  1864. for (uint64_t ID : InLocsT) {
  1865. LocIndex Idx = LocIndex::fromRawInteger(ID);
  1866. if (!VarLocIDs[Idx].dominates(LS, MBB)) {
  1867. KillSet.set(ID);
  1868. LLVM_DEBUG({
  1869. auto Name = VarLocIDs[Idx].Var.getVariable()->getName();
  1870. dbgs() << " killing " << Name << ", it doesn't dominate MBB\n";
  1871. });
  1872. }
  1873. }
  1874. }
  1875. InLocsT.intersectWithComplement(KillSet);
  1876. // As we are processing blocks in reverse post-order we
  1877. // should have processed at least one predecessor, unless it
  1878. // is the entry block which has no predecessor.
  1879. assert((NumVisited || MBB.pred_empty()) &&
  1880. "Should have processed at least one predecessor");
  1881. VarLocSet &ILS = getVarLocsInMBB(&MBB, InLocs);
  1882. bool Changed = false;
  1883. if (ILS != InLocsT) {
  1884. ILS = InLocsT;
  1885. Changed = true;
  1886. }
  1887. return Changed;
  1888. }
  1889. void VarLocBasedLDV::flushPendingLocs(VarLocInMBB &PendingInLocs,
  1890. VarLocMap &VarLocIDs) {
  1891. // PendingInLocs records all locations propagated into blocks, which have
  1892. // not had DBG_VALUE insts created. Go through and create those insts now.
  1893. for (auto &Iter : PendingInLocs) {
  1894. // Map is keyed on a constant pointer, unwrap it so we can insert insts.
  1895. auto &MBB = const_cast<MachineBasicBlock &>(*Iter.first);
  1896. VarLocSet &Pending = *Iter.second;
  1897. SmallVector<VarLoc, 32> VarLocs;
  1898. collectAllVarLocs(VarLocs, Pending, VarLocIDs);
  1899. for (VarLoc DiffIt : VarLocs) {
  1900. // The ID location is live-in to MBB -- work out what kind of machine
  1901. // location it is and create a DBG_VALUE.
  1902. if (DiffIt.isEntryBackupLoc())
  1903. continue;
  1904. MachineInstr *MI = DiffIt.BuildDbgValue(*MBB.getParent());
  1905. MBB.insert(MBB.instr_begin(), MI);
  1906. (void)MI;
  1907. LLVM_DEBUG(dbgs() << "Inserted: "; MI->dump(););
  1908. }
  1909. }
  1910. }
  1911. bool VarLocBasedLDV::isEntryValueCandidate(
  1912. const MachineInstr &MI, const DefinedRegsSet &DefinedRegs) const {
  1913. assert(MI.isDebugValue() && "This must be DBG_VALUE.");
  1914. // TODO: Add support for local variables that are expressed in terms of
  1915. // parameters entry values.
  1916. // TODO: Add support for modified arguments that can be expressed
  1917. // by using its entry value.
  1918. auto *DIVar = MI.getDebugVariable();
  1919. if (!DIVar->isParameter())
  1920. return false;
  1921. // Do not consider parameters that belong to an inlined function.
  1922. if (MI.getDebugLoc()->getInlinedAt())
  1923. return false;
  1924. // Only consider parameters that are described using registers. Parameters
  1925. // that are passed on the stack are not yet supported, so ignore debug
  1926. // values that are described by the frame or stack pointer.
  1927. if (!isRegOtherThanSPAndFP(MI.getDebugOperand(0), MI, TRI))
  1928. return false;
  1929. // If a parameter's value has been propagated from the caller, then the
  1930. // parameter's DBG_VALUE may be described using a register defined by some
  1931. // instruction in the entry block, in which case we shouldn't create an
  1932. // entry value.
  1933. if (DefinedRegs.count(MI.getDebugOperand(0).getReg()))
  1934. return false;
  1935. // TODO: Add support for parameters that have a pre-existing debug expressions
  1936. // (e.g. fragments).
  1937. if (MI.getDebugExpression()->getNumElements() > 0)
  1938. return false;
  1939. return true;
  1940. }
  1941. /// Collect all register defines (including aliases) for the given instruction.
  1942. static void collectRegDefs(const MachineInstr &MI, DefinedRegsSet &Regs,
  1943. const TargetRegisterInfo *TRI) {
  1944. for (const MachineOperand &MO : MI.operands()) {
  1945. if (MO.isReg() && MO.isDef() && MO.getReg() && MO.getReg().isPhysical()) {
  1946. Regs.insert(MO.getReg());
  1947. for (MCRegAliasIterator AI(MO.getReg(), TRI, true); AI.isValid(); ++AI)
  1948. Regs.insert(*AI);
  1949. }
  1950. }
  1951. }
  1952. /// This routine records the entry values of function parameters. The values
  1953. /// could be used as backup values. If we loose the track of some unmodified
  1954. /// parameters, the backup values will be used as a primary locations.
  1955. void VarLocBasedLDV::recordEntryValue(const MachineInstr &MI,
  1956. const DefinedRegsSet &DefinedRegs,
  1957. OpenRangesSet &OpenRanges,
  1958. VarLocMap &VarLocIDs) {
  1959. if (TPC) {
  1960. auto &TM = TPC->getTM<TargetMachine>();
  1961. if (!TM.Options.ShouldEmitDebugEntryValues())
  1962. return;
  1963. }
  1964. DebugVariable V(MI.getDebugVariable(), MI.getDebugExpression(),
  1965. MI.getDebugLoc()->getInlinedAt());
  1966. if (!isEntryValueCandidate(MI, DefinedRegs) ||
  1967. OpenRanges.getEntryValueBackup(V))
  1968. return;
  1969. LLVM_DEBUG(dbgs() << "Creating the backup entry location: "; MI.dump(););
  1970. // Create the entry value and use it as a backup location until it is
  1971. // valid. It is valid until a parameter is not changed.
  1972. DIExpression *NewExpr =
  1973. DIExpression::prepend(MI.getDebugExpression(), DIExpression::EntryValue);
  1974. VarLoc EntryValLocAsBackup = VarLoc::CreateEntryBackupLoc(MI, NewExpr);
  1975. LocIndices EntryValLocIDs = VarLocIDs.insert(EntryValLocAsBackup);
  1976. OpenRanges.insert(EntryValLocIDs, EntryValLocAsBackup);
  1977. }
  1978. /// Calculate the liveness information for the given machine function and
  1979. /// extend ranges across basic blocks.
  1980. bool VarLocBasedLDV::ExtendRanges(MachineFunction &MF,
  1981. MachineDominatorTree *DomTree,
  1982. TargetPassConfig *TPC, unsigned InputBBLimit,
  1983. unsigned InputDbgValLimit) {
  1984. (void)DomTree;
  1985. LLVM_DEBUG(dbgs() << "\nDebug Range Extension: " << MF.getName() << "\n");
  1986. if (!MF.getFunction().getSubprogram())
  1987. // VarLocBaseLDV will already have removed all DBG_VALUEs.
  1988. return false;
  1989. // Skip functions from NoDebug compilation units.
  1990. if (MF.getFunction().getSubprogram()->getUnit()->getEmissionKind() ==
  1991. DICompileUnit::NoDebug)
  1992. return false;
  1993. TRI = MF.getSubtarget().getRegisterInfo();
  1994. TII = MF.getSubtarget().getInstrInfo();
  1995. TFI = MF.getSubtarget().getFrameLowering();
  1996. TFI->getCalleeSaves(MF, CalleeSavedRegs);
  1997. this->TPC = TPC;
  1998. LS.initialize(MF);
  1999. bool Changed = false;
  2000. bool OLChanged = false;
  2001. bool MBBJoined = false;
  2002. VarLocMap VarLocIDs; // Map VarLoc<>unique ID for use in bitvectors.
  2003. OverlapMap OverlapFragments; // Map of overlapping variable fragments.
  2004. OpenRangesSet OpenRanges(Alloc, OverlapFragments);
  2005. // Ranges that are open until end of bb.
  2006. VarLocInMBB OutLocs; // Ranges that exist beyond bb.
  2007. VarLocInMBB InLocs; // Ranges that are incoming after joining.
  2008. TransferMap Transfers; // DBG_VALUEs associated with transfers (such as
  2009. // spills, copies and restores).
  2010. // Map responsible MI to attached Transfer emitted from Backup Entry Value.
  2011. InstToEntryLocMap EntryValTransfers;
  2012. // Map a Register to the last MI which clobbered it.
  2013. RegDefToInstMap RegSetInstrs;
  2014. VarToFragments SeenFragments;
  2015. // Blocks which are artificial, i.e. blocks which exclusively contain
  2016. // instructions without locations, or with line 0 locations.
  2017. SmallPtrSet<const MachineBasicBlock *, 16> ArtificialBlocks;
  2018. DenseMap<unsigned int, MachineBasicBlock *> OrderToBB;
  2019. DenseMap<MachineBasicBlock *, unsigned int> BBToOrder;
  2020. std::priority_queue<unsigned int, std::vector<unsigned int>,
  2021. std::greater<unsigned int>>
  2022. Worklist;
  2023. std::priority_queue<unsigned int, std::vector<unsigned int>,
  2024. std::greater<unsigned int>>
  2025. Pending;
  2026. // Set of register defines that are seen when traversing the entry block
  2027. // looking for debug entry value candidates.
  2028. DefinedRegsSet DefinedRegs;
  2029. // Only in the case of entry MBB collect DBG_VALUEs representing
  2030. // function parameters in order to generate debug entry values for them.
  2031. MachineBasicBlock &First_MBB = *(MF.begin());
  2032. for (auto &MI : First_MBB) {
  2033. collectRegDefs(MI, DefinedRegs, TRI);
  2034. if (MI.isDebugValue())
  2035. recordEntryValue(MI, DefinedRegs, OpenRanges, VarLocIDs);
  2036. }
  2037. // Initialize per-block structures and scan for fragment overlaps.
  2038. for (auto &MBB : MF)
  2039. for (auto &MI : MBB)
  2040. if (MI.isDebugValue())
  2041. accumulateFragmentMap(MI, SeenFragments, OverlapFragments);
  2042. auto hasNonArtificialLocation = [](const MachineInstr &MI) -> bool {
  2043. if (const DebugLoc &DL = MI.getDebugLoc())
  2044. return DL.getLine() != 0;
  2045. return false;
  2046. };
  2047. for (auto &MBB : MF)
  2048. if (none_of(MBB.instrs(), hasNonArtificialLocation))
  2049. ArtificialBlocks.insert(&MBB);
  2050. LLVM_DEBUG(printVarLocInMBB(MF, OutLocs, VarLocIDs,
  2051. "OutLocs after initialization", dbgs()));
  2052. ReversePostOrderTraversal<MachineFunction *> RPOT(&MF);
  2053. unsigned int RPONumber = 0;
  2054. for (MachineBasicBlock *MBB : RPOT) {
  2055. OrderToBB[RPONumber] = MBB;
  2056. BBToOrder[MBB] = RPONumber;
  2057. Worklist.push(RPONumber);
  2058. ++RPONumber;
  2059. }
  2060. if (RPONumber > InputBBLimit) {
  2061. unsigned NumInputDbgValues = 0;
  2062. for (auto &MBB : MF)
  2063. for (auto &MI : MBB)
  2064. if (MI.isDebugValue())
  2065. ++NumInputDbgValues;
  2066. if (NumInputDbgValues > InputDbgValLimit) {
  2067. LLVM_DEBUG(dbgs() << "Disabling VarLocBasedLDV: " << MF.getName()
  2068. << " has " << RPONumber << " basic blocks and "
  2069. << NumInputDbgValues
  2070. << " input DBG_VALUEs, exceeding limits.\n");
  2071. return false;
  2072. }
  2073. }
  2074. // This is a standard "union of predecessor outs" dataflow problem.
  2075. // To solve it, we perform join() and process() using the two worklist method
  2076. // until the ranges converge.
  2077. // Ranges have converged when both worklists are empty.
  2078. SmallPtrSet<const MachineBasicBlock *, 16> Visited;
  2079. while (!Worklist.empty() || !Pending.empty()) {
  2080. // We track what is on the pending worklist to avoid inserting the same
  2081. // thing twice. We could avoid this with a custom priority queue, but this
  2082. // is probably not worth it.
  2083. SmallPtrSet<MachineBasicBlock *, 16> OnPending;
  2084. LLVM_DEBUG(dbgs() << "Processing Worklist\n");
  2085. while (!Worklist.empty()) {
  2086. MachineBasicBlock *MBB = OrderToBB[Worklist.top()];
  2087. Worklist.pop();
  2088. MBBJoined = join(*MBB, OutLocs, InLocs, VarLocIDs, Visited,
  2089. ArtificialBlocks);
  2090. MBBJoined |= Visited.insert(MBB).second;
  2091. if (MBBJoined) {
  2092. MBBJoined = false;
  2093. Changed = true;
  2094. // Now that we have started to extend ranges across BBs we need to
  2095. // examine spill, copy and restore instructions to see whether they
  2096. // operate with registers that correspond to user variables.
  2097. // First load any pending inlocs.
  2098. OpenRanges.insertFromLocSet(getVarLocsInMBB(MBB, InLocs), VarLocIDs);
  2099. LastNonDbgMI = nullptr;
  2100. RegSetInstrs.clear();
  2101. for (auto &MI : *MBB)
  2102. process(MI, OpenRanges, VarLocIDs, Transfers, EntryValTransfers,
  2103. RegSetInstrs);
  2104. OLChanged |= transferTerminator(MBB, OpenRanges, OutLocs, VarLocIDs);
  2105. LLVM_DEBUG(printVarLocInMBB(MF, OutLocs, VarLocIDs,
  2106. "OutLocs after propagating", dbgs()));
  2107. LLVM_DEBUG(printVarLocInMBB(MF, InLocs, VarLocIDs,
  2108. "InLocs after propagating", dbgs()));
  2109. if (OLChanged) {
  2110. OLChanged = false;
  2111. for (auto *s : MBB->successors())
  2112. if (OnPending.insert(s).second) {
  2113. Pending.push(BBToOrder[s]);
  2114. }
  2115. }
  2116. }
  2117. }
  2118. Worklist.swap(Pending);
  2119. // At this point, pending must be empty, since it was just the empty
  2120. // worklist
  2121. assert(Pending.empty() && "Pending should be empty");
  2122. }
  2123. // Add any DBG_VALUE instructions created by location transfers.
  2124. for (auto &TR : Transfers) {
  2125. assert(!TR.TransferInst->isTerminator() &&
  2126. "Cannot insert DBG_VALUE after terminator");
  2127. MachineBasicBlock *MBB = TR.TransferInst->getParent();
  2128. const VarLoc &VL = VarLocIDs[TR.LocationID];
  2129. MachineInstr *MI = VL.BuildDbgValue(MF);
  2130. MBB->insertAfterBundle(TR.TransferInst->getIterator(), MI);
  2131. }
  2132. Transfers.clear();
  2133. // Add DBG_VALUEs created using Backup Entry Value location.
  2134. for (auto &TR : EntryValTransfers) {
  2135. MachineInstr *TRInst = const_cast<MachineInstr *>(TR.first);
  2136. assert(!TRInst->isTerminator() &&
  2137. "Cannot insert DBG_VALUE after terminator");
  2138. MachineBasicBlock *MBB = TRInst->getParent();
  2139. const VarLoc &VL = VarLocIDs[TR.second];
  2140. MachineInstr *MI = VL.BuildDbgValue(MF);
  2141. MBB->insertAfterBundle(TRInst->getIterator(), MI);
  2142. }
  2143. EntryValTransfers.clear();
  2144. // Deferred inlocs will not have had any DBG_VALUE insts created; do
  2145. // that now.
  2146. flushPendingLocs(InLocs, VarLocIDs);
  2147. LLVM_DEBUG(printVarLocInMBB(MF, OutLocs, VarLocIDs, "Final OutLocs", dbgs()));
  2148. LLVM_DEBUG(printVarLocInMBB(MF, InLocs, VarLocIDs, "Final InLocs", dbgs()));
  2149. return Changed;
  2150. }
  2151. LDVImpl *
  2152. llvm::makeVarLocBasedLiveDebugValues()
  2153. {
  2154. return new VarLocBasedLDV();
  2155. }