ProfiledBinary.h 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585
  1. //===-- ProfiledBinary.h - Binary decoder -----------------------*- C++ -*-===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. #ifndef LLVM_TOOLS_LLVM_PROFGEN_PROFILEDBINARY_H
  9. #define LLVM_TOOLS_LLVM_PROFGEN_PROFILEDBINARY_H
  10. #include "CallContext.h"
  11. #include "ErrorHandling.h"
  12. #include "llvm/ADT/DenseMap.h"
  13. #include "llvm/ADT/StringRef.h"
  14. #include "llvm/ADT/StringSet.h"
  15. #include "llvm/DebugInfo/DWARF/DWARFContext.h"
  16. #include "llvm/DebugInfo/Symbolize/Symbolize.h"
  17. #include "llvm/MC/MCAsmInfo.h"
  18. #include "llvm/MC/MCContext.h"
  19. #include "llvm/MC/MCDisassembler/MCDisassembler.h"
  20. #include "llvm/MC/MCInst.h"
  21. #include "llvm/MC/MCInstPrinter.h"
  22. #include "llvm/MC/MCInstrAnalysis.h"
  23. #include "llvm/MC/MCInstrInfo.h"
  24. #include "llvm/MC/MCObjectFileInfo.h"
  25. #include "llvm/MC/MCPseudoProbe.h"
  26. #include "llvm/MC/MCRegisterInfo.h"
  27. #include "llvm/MC/MCSubtargetInfo.h"
  28. #include "llvm/MC/MCTargetOptions.h"
  29. #include "llvm/Object/ELFObjectFile.h"
  30. #include "llvm/ProfileData/SampleProf.h"
  31. #include "llvm/Support/CommandLine.h"
  32. #include "llvm/Support/Path.h"
  33. #include "llvm/Transforms/IPO/SampleContextTracker.h"
  34. #include <list>
  35. #include <map>
  36. #include <set>
  37. #include <sstream>
  38. #include <string>
  39. #include <unordered_map>
  40. #include <unordered_set>
  41. #include <vector>
  42. extern cl::opt<bool> EnableCSPreInliner;
  43. extern cl::opt<bool> UseContextCostForPreInliner;
  44. using namespace llvm;
  45. using namespace sampleprof;
  46. using namespace llvm::object;
  47. namespace llvm {
  48. namespace sampleprof {
  49. class ProfiledBinary;
  50. class MissingFrameInferrer;
  51. struct InstructionPointer {
  52. const ProfiledBinary *Binary;
  53. // Address of the executable segment of the binary.
  54. uint64_t Address;
  55. // Index to the sorted code address array of the binary.
  56. uint64_t Index = 0;
  57. InstructionPointer(const ProfiledBinary *Binary, uint64_t Address,
  58. bool RoundToNext = false);
  59. bool advance();
  60. bool backward();
  61. void update(uint64_t Addr);
  62. };
  63. // The special frame addresses.
  64. enum SpecialFrameAddr {
  65. // Dummy root of frame trie.
  66. DummyRoot = 0,
  67. // Represent all the addresses outside of current binary.
  68. // This's also used to indicate the call stack should be truncated since this
  69. // isn't a real call context the compiler will see.
  70. ExternalAddr = 1,
  71. };
  72. using RangesTy = std::vector<std::pair<uint64_t, uint64_t>>;
  73. struct BinaryFunction {
  74. StringRef FuncName;
  75. // End of range is an exclusive bound.
  76. RangesTy Ranges;
  77. uint64_t getFuncSize() {
  78. uint64_t Sum = 0;
  79. for (auto &R : Ranges) {
  80. Sum += R.second - R.first;
  81. }
  82. return Sum;
  83. }
  84. };
  85. // Info about function range. A function can be split into multiple
  86. // non-continuous ranges, each range corresponds to one FuncRange.
  87. struct FuncRange {
  88. uint64_t StartAddress;
  89. // EndAddress is an exclusive bound.
  90. uint64_t EndAddress;
  91. // Function the range belongs to
  92. BinaryFunction *Func;
  93. // Whether the start address is the real entry of the function.
  94. bool IsFuncEntry = false;
  95. StringRef getFuncName() { return Func->FuncName; }
  96. };
  97. // PrologEpilog address tracker, used to filter out broken stack samples
  98. // Currently we use a heuristic size (two) to infer prolog and epilog
  99. // based on the start address and return address. In the future,
  100. // we will switch to Dwarf CFI based tracker
  101. struct PrologEpilogTracker {
  102. // A set of prolog and epilog addresses. Used by virtual unwinding.
  103. std::unordered_set<uint64_t> PrologEpilogSet;
  104. ProfiledBinary *Binary;
  105. PrologEpilogTracker(ProfiledBinary *Bin) : Binary(Bin){};
  106. // Take the two addresses from the start of function as prolog
  107. void
  108. inferPrologAddresses(std::map<uint64_t, FuncRange> &FuncStartAddressMap) {
  109. for (auto I : FuncStartAddressMap) {
  110. PrologEpilogSet.insert(I.first);
  111. InstructionPointer IP(Binary, I.first);
  112. if (!IP.advance())
  113. break;
  114. PrologEpilogSet.insert(IP.Address);
  115. }
  116. }
  117. // Take the last two addresses before the return address as epilog
  118. void inferEpilogAddresses(std::unordered_set<uint64_t> &RetAddrs) {
  119. for (auto Addr : RetAddrs) {
  120. PrologEpilogSet.insert(Addr);
  121. InstructionPointer IP(Binary, Addr);
  122. if (!IP.backward())
  123. break;
  124. PrologEpilogSet.insert(IP.Address);
  125. }
  126. }
  127. };
  128. // Track function byte size under different context (outlined version as well as
  129. // various inlined versions). It also provides query support to get function
  130. // size with the best matching context, which is used to help pre-inliner use
  131. // accurate post-optimization size to make decisions.
  132. // TODO: If an inlinee is completely optimized away, ideally we should have zero
  133. // for its context size, currently we would misss such context since it doesn't
  134. // have instructions. To fix this, we need to mark all inlinee with entry probe
  135. // but without instructions as having zero size.
  136. class BinarySizeContextTracker {
  137. public:
  138. // Add instruction with given size to a context
  139. void addInstructionForContext(const SampleContextFrameVector &Context,
  140. uint32_t InstrSize);
  141. // Get function size with a specific context. When there's no exact match
  142. // for the given context, try to retrieve the size of that function from
  143. // closest matching context.
  144. uint32_t getFuncSizeForContext(const ContextTrieNode *Context);
  145. // For inlinees that are full optimized away, we can establish zero size using
  146. // their remaining probes.
  147. void trackInlineesOptimizedAway(MCPseudoProbeDecoder &ProbeDecoder);
  148. using ProbeFrameStack = SmallVector<std::pair<StringRef, uint32_t>>;
  149. void trackInlineesOptimizedAway(MCPseudoProbeDecoder &ProbeDecoder,
  150. MCDecodedPseudoProbeInlineTree &ProbeNode,
  151. ProbeFrameStack &Context);
  152. void dump() { RootContext.dumpTree(); }
  153. private:
  154. // Root node for context trie tree, node that this is a reverse context trie
  155. // with callee as parent and caller as child. This way we can traverse from
  156. // root to find the best/longest matching context if an exact match does not
  157. // exist. It gives us the best possible estimate for function's post-inline,
  158. // post-optimization byte size.
  159. ContextTrieNode RootContext;
  160. };
  161. using AddressRange = std::pair<uint64_t, uint64_t>;
  162. class ProfiledBinary {
  163. // Absolute path of the executable binary.
  164. std::string Path;
  165. // Path of the debug info binary.
  166. std::string DebugBinaryPath;
  167. // Path of symbolizer path which should be pointed to binary with debug info.
  168. StringRef SymbolizerPath;
  169. // The target triple.
  170. Triple TheTriple;
  171. // The runtime base address that the first executable segment is loaded at.
  172. uint64_t BaseAddress = 0;
  173. // The runtime base address that the first loadabe segment is loaded at.
  174. uint64_t FirstLoadableAddress = 0;
  175. // The preferred load address of each executable segment.
  176. std::vector<uint64_t> PreferredTextSegmentAddresses;
  177. // The file offset of each executable segment.
  178. std::vector<uint64_t> TextSegmentOffsets;
  179. // Mutiple MC component info
  180. std::unique_ptr<const MCRegisterInfo> MRI;
  181. std::unique_ptr<const MCAsmInfo> AsmInfo;
  182. std::unique_ptr<const MCSubtargetInfo> STI;
  183. std::unique_ptr<const MCInstrInfo> MII;
  184. std::unique_ptr<MCDisassembler> DisAsm;
  185. std::unique_ptr<const MCInstrAnalysis> MIA;
  186. std::unique_ptr<MCInstPrinter> IPrinter;
  187. // A list of text sections sorted by start RVA and size. Used to check
  188. // if a given RVA is a valid code address.
  189. std::set<std::pair<uint64_t, uint64_t>> TextSections;
  190. // A map of mapping function name to BinaryFunction info.
  191. std::unordered_map<std::string, BinaryFunction> BinaryFunctions;
  192. // A list of binary functions that have samples.
  193. std::unordered_set<const BinaryFunction *> ProfiledFunctions;
  194. // GUID to Elf symbol start address map
  195. DenseMap<uint64_t, uint64_t> SymbolStartAddrs;
  196. // Start address to Elf symbol GUID map
  197. std::unordered_multimap<uint64_t, uint64_t> StartAddrToSymMap;
  198. // An ordered map of mapping function's start address to function range
  199. // relevant info. Currently to determine if the offset of ELF is the start of
  200. // a real function, we leverage the function range info from DWARF.
  201. std::map<uint64_t, FuncRange> StartAddrToFuncRangeMap;
  202. // Address to context location map. Used to expand the context.
  203. std::unordered_map<uint64_t, SampleContextFrameVector> AddressToLocStackMap;
  204. // Address to instruction size map. Also used for quick Address lookup.
  205. std::unordered_map<uint64_t, uint64_t> AddressToInstSizeMap;
  206. // An array of Addresses of all instructions sorted in increasing order. The
  207. // sorting is needed to fast advance to the next forward/backward instruction.
  208. std::vector<uint64_t> CodeAddressVec;
  209. // A set of call instruction addresses. Used by virtual unwinding.
  210. std::unordered_set<uint64_t> CallAddressSet;
  211. // A set of return instruction addresses. Used by virtual unwinding.
  212. std::unordered_set<uint64_t> RetAddressSet;
  213. // An ordered set of unconditional branch instruction addresses.
  214. std::set<uint64_t> UncondBranchAddrSet;
  215. // A set of branch instruction addresses.
  216. std::unordered_set<uint64_t> BranchAddressSet;
  217. // Estimate and track function prolog and epilog ranges.
  218. PrologEpilogTracker ProEpilogTracker;
  219. // Infer missing frames due to compiler optimizations such as tail call
  220. // elimination.
  221. std::unique_ptr<MissingFrameInferrer> MissingContextInferrer;
  222. // Track function sizes under different context
  223. BinarySizeContextTracker FuncSizeTracker;
  224. // The symbolizer used to get inline context for an instruction.
  225. std::unique_ptr<symbolize::LLVMSymbolizer> Symbolizer;
  226. // String table owning function name strings created from the symbolizer.
  227. std::unordered_set<std::string> NameStrings;
  228. // A collection of functions to print disassembly for.
  229. StringSet<> DisassembleFunctionSet;
  230. // Pseudo probe decoder
  231. MCPseudoProbeDecoder ProbeDecoder;
  232. // Function name to probe frame map for top-level outlined functions.
  233. StringMap<MCDecodedPseudoProbeInlineTree *> TopLevelProbeFrameMap;
  234. bool UsePseudoProbes = false;
  235. bool UseFSDiscriminator = false;
  236. // Whether we need to symbolize all instructions to get function context size.
  237. bool TrackFuncContextSize = false;
  238. // Indicate if the base loading address is parsed from the mmap event or uses
  239. // the preferred address
  240. bool IsLoadedByMMap = false;
  241. // Use to avoid redundant warning.
  242. bool MissingMMapWarned = false;
  243. void setPreferredTextSegmentAddresses(const ELFObjectFileBase *O);
  244. template <class ELFT>
  245. void setPreferredTextSegmentAddresses(const ELFFile<ELFT> &Obj,
  246. StringRef FileName);
  247. void checkPseudoProbe(const ELFObjectFileBase *Obj);
  248. void decodePseudoProbe(const ELFObjectFileBase *Obj);
  249. void
  250. checkUseFSDiscriminator(const ELFObjectFileBase *Obj,
  251. std::map<SectionRef, SectionSymbolsTy> &AllSymbols);
  252. // Set up disassembler and related components.
  253. void setUpDisassembler(const ELFObjectFileBase *Obj);
  254. void setupSymbolizer();
  255. // Load debug info of subprograms from DWARF section.
  256. void loadSymbolsFromDWARF(ObjectFile &Obj);
  257. // Load debug info from DWARF unit.
  258. void loadSymbolsFromDWARFUnit(DWARFUnit &CompilationUnit);
  259. // Create elf symbol to its start address mapping.
  260. void populateElfSymbolAddressList(const ELFObjectFileBase *O);
  261. // A function may be spilt into multiple non-continuous address ranges. We use
  262. // this to set whether start a function range is the real entry of the
  263. // function and also set false to the non-function label.
  264. void setIsFuncEntry(FuncRange *FRange, StringRef RangeSymName);
  265. // Warn if no entry range exists in the function.
  266. void warnNoFuncEntry();
  267. /// Dissassemble the text section and build various address maps.
  268. void disassemble(const ELFObjectFileBase *O);
  269. /// Helper function to dissassemble the symbol and extract info for unwinding
  270. bool dissassembleSymbol(std::size_t SI, ArrayRef<uint8_t> Bytes,
  271. SectionSymbolsTy &Symbols, const SectionRef &Section);
  272. /// Symbolize a given instruction pointer and return a full call context.
  273. SampleContextFrameVector symbolize(const InstructionPointer &IP,
  274. bool UseCanonicalFnName = false,
  275. bool UseProbeDiscriminator = false);
  276. /// Decode the interesting parts of the binary and build internal data
  277. /// structures. On high level, the parts of interest are:
  278. /// 1. Text sections, including the main code section and the PLT
  279. /// entries that will be used to handle cross-module call transitions.
  280. /// 2. The .debug_line section, used by Dwarf-based profile generation.
  281. /// 3. Pseudo probe related sections, used by probe-based profile
  282. /// generation.
  283. void load();
  284. public:
  285. ProfiledBinary(const StringRef ExeBinPath, const StringRef DebugBinPath);
  286. ~ProfiledBinary();
  287. void decodePseudoProbe();
  288. StringRef getPath() const { return Path; }
  289. StringRef getName() const { return llvm::sys::path::filename(Path); }
  290. uint64_t getBaseAddress() const { return BaseAddress; }
  291. void setBaseAddress(uint64_t Address) { BaseAddress = Address; }
  292. // Canonicalize to use preferred load address as base address.
  293. uint64_t canonicalizeVirtualAddress(uint64_t Address) {
  294. return Address - BaseAddress + getPreferredBaseAddress();
  295. }
  296. // Return the preferred load address for the first executable segment.
  297. uint64_t getPreferredBaseAddress() const {
  298. return PreferredTextSegmentAddresses[0];
  299. }
  300. // Return the preferred load address for the first loadable segment.
  301. uint64_t getFirstLoadableAddress() const { return FirstLoadableAddress; }
  302. // Return the file offset for the first executable segment.
  303. uint64_t getTextSegmentOffset() const { return TextSegmentOffsets[0]; }
  304. const std::vector<uint64_t> &getPreferredTextSegmentAddresses() const {
  305. return PreferredTextSegmentAddresses;
  306. }
  307. const std::vector<uint64_t> &getTextSegmentOffsets() const {
  308. return TextSegmentOffsets;
  309. }
  310. uint64_t getInstSize(uint64_t Address) const {
  311. auto I = AddressToInstSizeMap.find(Address);
  312. if (I == AddressToInstSizeMap.end())
  313. return 0;
  314. return I->second;
  315. }
  316. bool addressIsCode(uint64_t Address) const {
  317. return AddressToInstSizeMap.find(Address) != AddressToInstSizeMap.end();
  318. }
  319. bool addressIsCall(uint64_t Address) const {
  320. return CallAddressSet.count(Address);
  321. }
  322. bool addressIsReturn(uint64_t Address) const {
  323. return RetAddressSet.count(Address);
  324. }
  325. bool addressInPrologEpilog(uint64_t Address) const {
  326. return ProEpilogTracker.PrologEpilogSet.count(Address);
  327. }
  328. bool addressIsTransfer(uint64_t Address) {
  329. return BranchAddressSet.count(Address) || RetAddressSet.count(Address) ||
  330. CallAddressSet.count(Address);
  331. }
  332. bool rangeCrossUncondBranch(uint64_t Start, uint64_t End) {
  333. if (Start >= End)
  334. return false;
  335. auto R = UncondBranchAddrSet.lower_bound(Start);
  336. return R != UncondBranchAddrSet.end() && *R < End;
  337. }
  338. uint64_t getAddressforIndex(uint64_t Index) const {
  339. return CodeAddressVec[Index];
  340. }
  341. size_t getCodeAddrVecSize() const { return CodeAddressVec.size(); }
  342. bool usePseudoProbes() const { return UsePseudoProbes; }
  343. bool useFSDiscriminator() const { return UseFSDiscriminator; }
  344. // Get the index in CodeAddressVec for the address
  345. // As we might get an address which is not the code
  346. // here it would round to the next valid code address by
  347. // using lower bound operation
  348. uint32_t getIndexForAddr(uint64_t Address) const {
  349. auto Low = llvm::lower_bound(CodeAddressVec, Address);
  350. return Low - CodeAddressVec.begin();
  351. }
  352. uint64_t getCallAddrFromFrameAddr(uint64_t FrameAddr) const {
  353. if (FrameAddr == ExternalAddr)
  354. return ExternalAddr;
  355. auto I = getIndexForAddr(FrameAddr);
  356. FrameAddr = I ? getAddressforIndex(I - 1) : 0;
  357. if (FrameAddr && addressIsCall(FrameAddr))
  358. return FrameAddr;
  359. return 0;
  360. }
  361. FuncRange *findFuncRangeForStartAddr(uint64_t Address) {
  362. auto I = StartAddrToFuncRangeMap.find(Address);
  363. if (I == StartAddrToFuncRangeMap.end())
  364. return nullptr;
  365. return &I->second;
  366. }
  367. // Binary search the function range which includes the input address.
  368. FuncRange *findFuncRange(uint64_t Address) {
  369. auto I = StartAddrToFuncRangeMap.upper_bound(Address);
  370. if (I == StartAddrToFuncRangeMap.begin())
  371. return nullptr;
  372. I--;
  373. if (Address >= I->second.EndAddress)
  374. return nullptr;
  375. return &I->second;
  376. }
  377. // Get all ranges of one function.
  378. RangesTy getRanges(uint64_t Address) {
  379. auto *FRange = findFuncRange(Address);
  380. // Ignore the range which falls into plt section or system lib.
  381. if (!FRange)
  382. return RangesTy();
  383. return FRange->Func->Ranges;
  384. }
  385. const std::unordered_map<std::string, BinaryFunction> &
  386. getAllBinaryFunctions() {
  387. return BinaryFunctions;
  388. }
  389. std::unordered_set<const BinaryFunction *> &getProfiledFunctions() {
  390. return ProfiledFunctions;
  391. }
  392. void setProfiledFunctions(std::unordered_set<const BinaryFunction *> &Funcs) {
  393. ProfiledFunctions = Funcs;
  394. }
  395. BinaryFunction *getBinaryFunction(StringRef FName) {
  396. auto I = BinaryFunctions.find(FName.str());
  397. if (I == BinaryFunctions.end())
  398. return nullptr;
  399. return &I->second;
  400. }
  401. uint32_t getFuncSizeForContext(const ContextTrieNode *ContextNode) {
  402. return FuncSizeTracker.getFuncSizeForContext(ContextNode);
  403. }
  404. void inferMissingFrames(const SmallVectorImpl<uint64_t> &Context,
  405. SmallVectorImpl<uint64_t> &NewContext);
  406. // Load the symbols from debug table and populate into symbol list.
  407. void populateSymbolListFromDWARF(ProfileSymbolList &SymbolList);
  408. SampleContextFrameVector
  409. getFrameLocationStack(uint64_t Address, bool UseProbeDiscriminator = false) {
  410. InstructionPointer IP(this, Address);
  411. return symbolize(IP, true, UseProbeDiscriminator);
  412. }
  413. const SampleContextFrameVector &
  414. getCachedFrameLocationStack(uint64_t Address,
  415. bool UseProbeDiscriminator = false) {
  416. auto I = AddressToLocStackMap.emplace(Address, SampleContextFrameVector());
  417. if (I.second) {
  418. I.first->second = getFrameLocationStack(Address, UseProbeDiscriminator);
  419. }
  420. return I.first->second;
  421. }
  422. std::optional<SampleContextFrame> getInlineLeafFrameLoc(uint64_t Address) {
  423. const auto &Stack = getCachedFrameLocationStack(Address);
  424. if (Stack.empty())
  425. return {};
  426. return Stack.back();
  427. }
  428. void flushSymbolizer() { Symbolizer.reset(); }
  429. MissingFrameInferrer* getMissingContextInferrer() {
  430. return MissingContextInferrer.get();
  431. }
  432. // Compare two addresses' inline context
  433. bool inlineContextEqual(uint64_t Add1, uint64_t Add2);
  434. // Get the full context of the current stack with inline context filled in.
  435. // It will search the disassembling info stored in AddressToLocStackMap. This
  436. // is used as the key of function sample map
  437. SampleContextFrameVector
  438. getExpandedContext(const SmallVectorImpl<uint64_t> &Stack,
  439. bool &WasLeafInlined);
  440. // Go through instructions among the given range and record its size for the
  441. // inline context.
  442. void computeInlinedContextSizeForRange(uint64_t StartAddress,
  443. uint64_t EndAddress);
  444. void computeInlinedContextSizeForFunc(const BinaryFunction *Func);
  445. const MCDecodedPseudoProbe *getCallProbeForAddr(uint64_t Address) const {
  446. return ProbeDecoder.getCallProbeForAddr(Address);
  447. }
  448. void getInlineContextForProbe(const MCDecodedPseudoProbe *Probe,
  449. SampleContextFrameVector &InlineContextStack,
  450. bool IncludeLeaf = false) const {
  451. SmallVector<MCPseduoProbeFrameLocation, 16> ProbeInlineContext;
  452. ProbeDecoder.getInlineContextForProbe(Probe, ProbeInlineContext,
  453. IncludeLeaf);
  454. for (uint32_t I = 0; I < ProbeInlineContext.size(); I++) {
  455. auto &Callsite = ProbeInlineContext[I];
  456. // Clear the current context for an unknown probe.
  457. if (Callsite.second == 0 && I != ProbeInlineContext.size() - 1) {
  458. InlineContextStack.clear();
  459. continue;
  460. }
  461. InlineContextStack.emplace_back(Callsite.first,
  462. LineLocation(Callsite.second, 0));
  463. }
  464. }
  465. const AddressProbesMap &getAddress2ProbesMap() const {
  466. return ProbeDecoder.getAddress2ProbesMap();
  467. }
  468. const MCPseudoProbeFuncDesc *getFuncDescForGUID(uint64_t GUID) {
  469. return ProbeDecoder.getFuncDescForGUID(GUID);
  470. }
  471. const MCPseudoProbeFuncDesc *
  472. getInlinerDescForProbe(const MCDecodedPseudoProbe *Probe) {
  473. return ProbeDecoder.getInlinerDescForProbe(Probe);
  474. }
  475. bool getTrackFuncContextSize() { return TrackFuncContextSize; }
  476. bool getIsLoadedByMMap() { return IsLoadedByMMap; }
  477. void setIsLoadedByMMap(bool Value) { IsLoadedByMMap = Value; }
  478. bool getMissingMMapWarned() { return MissingMMapWarned; }
  479. void setMissingMMapWarned(bool Value) { MissingMMapWarned = Value; }
  480. };
  481. } // end namespace sampleprof
  482. } // end namespace llvm
  483. #endif