MemProfiler.cpp 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605
  1. //===- MemProfiler.cpp - memory allocation and access profiler ------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file is a part of MemProfiler. Memory accesses are instrumented
  10. // to increment the access count held in a shadow memory location, or
  11. // alternatively to call into the runtime. Memory intrinsic calls (memmove,
  12. // memcpy, memset) are changed to call the memory profiling runtime version
  13. // instead.
  14. //
  15. //===----------------------------------------------------------------------===//
  16. #include "llvm/Transforms/Instrumentation/MemProfiler.h"
  17. #include "llvm/ADT/SmallVector.h"
  18. #include "llvm/ADT/Statistic.h"
  19. #include "llvm/ADT/StringRef.h"
  20. #include "llvm/ADT/Triple.h"
  21. #include "llvm/Analysis/ValueTracking.h"
  22. #include "llvm/IR/Constant.h"
  23. #include "llvm/IR/DataLayout.h"
  24. #include "llvm/IR/Function.h"
  25. #include "llvm/IR/GlobalValue.h"
  26. #include "llvm/IR/IRBuilder.h"
  27. #include "llvm/IR/Instruction.h"
  28. #include "llvm/IR/IntrinsicInst.h"
  29. #include "llvm/IR/Module.h"
  30. #include "llvm/IR/Type.h"
  31. #include "llvm/IR/Value.h"
  32. #include "llvm/InitializePasses.h"
  33. #include "llvm/Pass.h"
  34. #include "llvm/ProfileData/InstrProf.h"
  35. #include "llvm/Support/CommandLine.h"
  36. #include "llvm/Support/Debug.h"
  37. #include "llvm/Transforms/Utils/BasicBlockUtils.h"
  38. #include "llvm/Transforms/Utils/ModuleUtils.h"
  39. using namespace llvm;
  40. #define DEBUG_TYPE "memprof"
  41. constexpr int LLVM_MEM_PROFILER_VERSION = 1;
  42. // Size of memory mapped to a single shadow location.
  43. constexpr uint64_t DefaultShadowGranularity = 64;
  44. // Scale from granularity down to shadow size.
  45. constexpr uint64_t DefaultShadowScale = 3;
  46. constexpr char MemProfModuleCtorName[] = "memprof.module_ctor";
  47. constexpr uint64_t MemProfCtorAndDtorPriority = 1;
  48. // On Emscripten, the system needs more than one priorities for constructors.
  49. constexpr uint64_t MemProfEmscriptenCtorAndDtorPriority = 50;
  50. constexpr char MemProfInitName[] = "__memprof_init";
  51. constexpr char MemProfVersionCheckNamePrefix[] =
  52. "__memprof_version_mismatch_check_v";
  53. constexpr char MemProfShadowMemoryDynamicAddress[] =
  54. "__memprof_shadow_memory_dynamic_address";
  55. constexpr char MemProfFilenameVar[] = "__memprof_profile_filename";
  56. // Command-line flags.
  57. static cl::opt<bool> ClInsertVersionCheck(
  58. "memprof-guard-against-version-mismatch",
  59. cl::desc("Guard against compiler/runtime version mismatch."), cl::Hidden,
  60. cl::init(true));
  61. // This flag may need to be replaced with -f[no-]memprof-reads.
  62. static cl::opt<bool> ClInstrumentReads("memprof-instrument-reads",
  63. cl::desc("instrument read instructions"),
  64. cl::Hidden, cl::init(true));
  65. static cl::opt<bool>
  66. ClInstrumentWrites("memprof-instrument-writes",
  67. cl::desc("instrument write instructions"), cl::Hidden,
  68. cl::init(true));
  69. static cl::opt<bool> ClInstrumentAtomics(
  70. "memprof-instrument-atomics",
  71. cl::desc("instrument atomic instructions (rmw, cmpxchg)"), cl::Hidden,
  72. cl::init(true));
  73. static cl::opt<bool> ClUseCalls(
  74. "memprof-use-callbacks",
  75. cl::desc("Use callbacks instead of inline instrumentation sequences."),
  76. cl::Hidden, cl::init(false));
  77. static cl::opt<std::string>
  78. ClMemoryAccessCallbackPrefix("memprof-memory-access-callback-prefix",
  79. cl::desc("Prefix for memory access callbacks"),
  80. cl::Hidden, cl::init("__memprof_"));
  81. // These flags allow to change the shadow mapping.
  82. // The shadow mapping looks like
  83. // Shadow = ((Mem & mask) >> scale) + offset
  84. static cl::opt<int> ClMappingScale("memprof-mapping-scale",
  85. cl::desc("scale of memprof shadow mapping"),
  86. cl::Hidden, cl::init(DefaultShadowScale));
  87. static cl::opt<int>
  88. ClMappingGranularity("memprof-mapping-granularity",
  89. cl::desc("granularity of memprof shadow mapping"),
  90. cl::Hidden, cl::init(DefaultShadowGranularity));
  91. static cl::opt<bool> ClStack("memprof-instrument-stack",
  92. cl::desc("Instrument scalar stack variables"),
  93. cl::Hidden, cl::init(false));
  94. // Debug flags.
  95. static cl::opt<int> ClDebug("memprof-debug", cl::desc("debug"), cl::Hidden,
  96. cl::init(0));
  97. static cl::opt<std::string> ClDebugFunc("memprof-debug-func", cl::Hidden,
  98. cl::desc("Debug func"));
  99. static cl::opt<int> ClDebugMin("memprof-debug-min", cl::desc("Debug min inst"),
  100. cl::Hidden, cl::init(-1));
  101. static cl::opt<int> ClDebugMax("memprof-debug-max", cl::desc("Debug max inst"),
  102. cl::Hidden, cl::init(-1));
  103. STATISTIC(NumInstrumentedReads, "Number of instrumented reads");
  104. STATISTIC(NumInstrumentedWrites, "Number of instrumented writes");
  105. STATISTIC(NumSkippedStackReads, "Number of non-instrumented stack reads");
  106. STATISTIC(NumSkippedStackWrites, "Number of non-instrumented stack writes");
  107. namespace {
  108. /// This struct defines the shadow mapping using the rule:
  109. /// shadow = ((mem & mask) >> Scale) ADD DynamicShadowOffset.
  110. struct ShadowMapping {
  111. ShadowMapping() {
  112. Scale = ClMappingScale;
  113. Granularity = ClMappingGranularity;
  114. Mask = ~(Granularity - 1);
  115. }
  116. int Scale;
  117. int Granularity;
  118. uint64_t Mask; // Computed as ~(Granularity-1)
  119. };
  120. static uint64_t getCtorAndDtorPriority(Triple &TargetTriple) {
  121. return TargetTriple.isOSEmscripten() ? MemProfEmscriptenCtorAndDtorPriority
  122. : MemProfCtorAndDtorPriority;
  123. }
  124. struct InterestingMemoryAccess {
  125. Value *Addr = nullptr;
  126. bool IsWrite;
  127. Type *AccessTy;
  128. uint64_t TypeSize;
  129. Value *MaybeMask = nullptr;
  130. };
  131. /// Instrument the code in module to profile memory accesses.
  132. class MemProfiler {
  133. public:
  134. MemProfiler(Module &M) {
  135. C = &(M.getContext());
  136. LongSize = M.getDataLayout().getPointerSizeInBits();
  137. IntptrTy = Type::getIntNTy(*C, LongSize);
  138. }
  139. /// If it is an interesting memory access, populate information
  140. /// about the access and return a InterestingMemoryAccess struct.
  141. /// Otherwise return std::nullopt.
  142. std::optional<InterestingMemoryAccess>
  143. isInterestingMemoryAccess(Instruction *I) const;
  144. void instrumentMop(Instruction *I, const DataLayout &DL,
  145. InterestingMemoryAccess &Access);
  146. void instrumentAddress(Instruction *OrigIns, Instruction *InsertBefore,
  147. Value *Addr, uint32_t TypeSize, bool IsWrite);
  148. void instrumentMaskedLoadOrStore(const DataLayout &DL, Value *Mask,
  149. Instruction *I, Value *Addr, Type *AccessTy,
  150. bool IsWrite);
  151. void instrumentMemIntrinsic(MemIntrinsic *MI);
  152. Value *memToShadow(Value *Shadow, IRBuilder<> &IRB);
  153. bool instrumentFunction(Function &F);
  154. bool maybeInsertMemProfInitAtFunctionEntry(Function &F);
  155. bool insertDynamicShadowAtFunctionEntry(Function &F);
  156. private:
  157. void initializeCallbacks(Module &M);
  158. LLVMContext *C;
  159. int LongSize;
  160. Type *IntptrTy;
  161. ShadowMapping Mapping;
  162. // These arrays is indexed by AccessIsWrite
  163. FunctionCallee MemProfMemoryAccessCallback[2];
  164. FunctionCallee MemProfMemoryAccessCallbackSized[2];
  165. FunctionCallee MemProfMemmove, MemProfMemcpy, MemProfMemset;
  166. Value *DynamicShadowOffset = nullptr;
  167. };
  168. class ModuleMemProfiler {
  169. public:
  170. ModuleMemProfiler(Module &M) { TargetTriple = Triple(M.getTargetTriple()); }
  171. bool instrumentModule(Module &);
  172. private:
  173. Triple TargetTriple;
  174. ShadowMapping Mapping;
  175. Function *MemProfCtorFunction = nullptr;
  176. };
  177. } // end anonymous namespace
  178. MemProfilerPass::MemProfilerPass() = default;
  179. PreservedAnalyses MemProfilerPass::run(Function &F,
  180. AnalysisManager<Function> &AM) {
  181. Module &M = *F.getParent();
  182. MemProfiler Profiler(M);
  183. if (Profiler.instrumentFunction(F))
  184. return PreservedAnalyses::none();
  185. return PreservedAnalyses::all();
  186. }
  187. ModuleMemProfilerPass::ModuleMemProfilerPass() = default;
  188. PreservedAnalyses ModuleMemProfilerPass::run(Module &M,
  189. AnalysisManager<Module> &AM) {
  190. ModuleMemProfiler Profiler(M);
  191. if (Profiler.instrumentModule(M))
  192. return PreservedAnalyses::none();
  193. return PreservedAnalyses::all();
  194. }
  195. Value *MemProfiler::memToShadow(Value *Shadow, IRBuilder<> &IRB) {
  196. // (Shadow & mask) >> scale
  197. Shadow = IRB.CreateAnd(Shadow, Mapping.Mask);
  198. Shadow = IRB.CreateLShr(Shadow, Mapping.Scale);
  199. // (Shadow >> scale) | offset
  200. assert(DynamicShadowOffset);
  201. return IRB.CreateAdd(Shadow, DynamicShadowOffset);
  202. }
  203. // Instrument memset/memmove/memcpy
  204. void MemProfiler::instrumentMemIntrinsic(MemIntrinsic *MI) {
  205. IRBuilder<> IRB(MI);
  206. if (isa<MemTransferInst>(MI)) {
  207. IRB.CreateCall(
  208. isa<MemMoveInst>(MI) ? MemProfMemmove : MemProfMemcpy,
  209. {IRB.CreatePointerCast(MI->getOperand(0), IRB.getInt8PtrTy()),
  210. IRB.CreatePointerCast(MI->getOperand(1), IRB.getInt8PtrTy()),
  211. IRB.CreateIntCast(MI->getOperand(2), IntptrTy, false)});
  212. } else if (isa<MemSetInst>(MI)) {
  213. IRB.CreateCall(
  214. MemProfMemset,
  215. {IRB.CreatePointerCast(MI->getOperand(0), IRB.getInt8PtrTy()),
  216. IRB.CreateIntCast(MI->getOperand(1), IRB.getInt32Ty(), false),
  217. IRB.CreateIntCast(MI->getOperand(2), IntptrTy, false)});
  218. }
  219. MI->eraseFromParent();
  220. }
  221. std::optional<InterestingMemoryAccess>
  222. MemProfiler::isInterestingMemoryAccess(Instruction *I) const {
  223. // Do not instrument the load fetching the dynamic shadow address.
  224. if (DynamicShadowOffset == I)
  225. return std::nullopt;
  226. InterestingMemoryAccess Access;
  227. if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
  228. if (!ClInstrumentReads)
  229. return std::nullopt;
  230. Access.IsWrite = false;
  231. Access.AccessTy = LI->getType();
  232. Access.Addr = LI->getPointerOperand();
  233. } else if (StoreInst *SI = dyn_cast<StoreInst>(I)) {
  234. if (!ClInstrumentWrites)
  235. return std::nullopt;
  236. Access.IsWrite = true;
  237. Access.AccessTy = SI->getValueOperand()->getType();
  238. Access.Addr = SI->getPointerOperand();
  239. } else if (AtomicRMWInst *RMW = dyn_cast<AtomicRMWInst>(I)) {
  240. if (!ClInstrumentAtomics)
  241. return std::nullopt;
  242. Access.IsWrite = true;
  243. Access.AccessTy = RMW->getValOperand()->getType();
  244. Access.Addr = RMW->getPointerOperand();
  245. } else if (AtomicCmpXchgInst *XCHG = dyn_cast<AtomicCmpXchgInst>(I)) {
  246. if (!ClInstrumentAtomics)
  247. return std::nullopt;
  248. Access.IsWrite = true;
  249. Access.AccessTy = XCHG->getCompareOperand()->getType();
  250. Access.Addr = XCHG->getPointerOperand();
  251. } else if (auto *CI = dyn_cast<CallInst>(I)) {
  252. auto *F = CI->getCalledFunction();
  253. if (F && (F->getIntrinsicID() == Intrinsic::masked_load ||
  254. F->getIntrinsicID() == Intrinsic::masked_store)) {
  255. unsigned OpOffset = 0;
  256. if (F->getIntrinsicID() == Intrinsic::masked_store) {
  257. if (!ClInstrumentWrites)
  258. return std::nullopt;
  259. // Masked store has an initial operand for the value.
  260. OpOffset = 1;
  261. Access.AccessTy = CI->getArgOperand(0)->getType();
  262. Access.IsWrite = true;
  263. } else {
  264. if (!ClInstrumentReads)
  265. return std::nullopt;
  266. Access.AccessTy = CI->getType();
  267. Access.IsWrite = false;
  268. }
  269. auto *BasePtr = CI->getOperand(0 + OpOffset);
  270. Access.MaybeMask = CI->getOperand(2 + OpOffset);
  271. Access.Addr = BasePtr;
  272. }
  273. }
  274. if (!Access.Addr)
  275. return std::nullopt;
  276. // Do not instrument accesses from different address spaces; we cannot deal
  277. // with them.
  278. Type *PtrTy = cast<PointerType>(Access.Addr->getType()->getScalarType());
  279. if (PtrTy->getPointerAddressSpace() != 0)
  280. return std::nullopt;
  281. // Ignore swifterror addresses.
  282. // swifterror memory addresses are mem2reg promoted by instruction
  283. // selection. As such they cannot have regular uses like an instrumentation
  284. // function and it makes no sense to track them as memory.
  285. if (Access.Addr->isSwiftError())
  286. return std::nullopt;
  287. // Peel off GEPs and BitCasts.
  288. auto *Addr = Access.Addr->stripInBoundsOffsets();
  289. if (GlobalVariable *GV = dyn_cast<GlobalVariable>(Addr)) {
  290. // Do not instrument PGO counter updates.
  291. if (GV->hasSection()) {
  292. StringRef SectionName = GV->getSection();
  293. // Check if the global is in the PGO counters section.
  294. auto OF = Triple(I->getModule()->getTargetTriple()).getObjectFormat();
  295. if (SectionName.endswith(
  296. getInstrProfSectionName(IPSK_cnts, OF, /*AddSegmentInfo=*/false)))
  297. return std::nullopt;
  298. }
  299. // Do not instrument accesses to LLVM internal variables.
  300. if (GV->getName().startswith("__llvm"))
  301. return std::nullopt;
  302. }
  303. const DataLayout &DL = I->getModule()->getDataLayout();
  304. Access.TypeSize = DL.getTypeStoreSizeInBits(Access.AccessTy);
  305. return Access;
  306. }
  307. void MemProfiler::instrumentMaskedLoadOrStore(const DataLayout &DL, Value *Mask,
  308. Instruction *I, Value *Addr,
  309. Type *AccessTy, bool IsWrite) {
  310. auto *VTy = cast<FixedVectorType>(AccessTy);
  311. uint64_t ElemTypeSize = DL.getTypeStoreSizeInBits(VTy->getScalarType());
  312. unsigned Num = VTy->getNumElements();
  313. auto *Zero = ConstantInt::get(IntptrTy, 0);
  314. for (unsigned Idx = 0; Idx < Num; ++Idx) {
  315. Value *InstrumentedAddress = nullptr;
  316. Instruction *InsertBefore = I;
  317. if (auto *Vector = dyn_cast<ConstantVector>(Mask)) {
  318. // dyn_cast as we might get UndefValue
  319. if (auto *Masked = dyn_cast<ConstantInt>(Vector->getOperand(Idx))) {
  320. if (Masked->isZero())
  321. // Mask is constant false, so no instrumentation needed.
  322. continue;
  323. // If we have a true or undef value, fall through to instrumentAddress.
  324. // with InsertBefore == I
  325. }
  326. } else {
  327. IRBuilder<> IRB(I);
  328. Value *MaskElem = IRB.CreateExtractElement(Mask, Idx);
  329. Instruction *ThenTerm = SplitBlockAndInsertIfThen(MaskElem, I, false);
  330. InsertBefore = ThenTerm;
  331. }
  332. IRBuilder<> IRB(InsertBefore);
  333. InstrumentedAddress =
  334. IRB.CreateGEP(VTy, Addr, {Zero, ConstantInt::get(IntptrTy, Idx)});
  335. instrumentAddress(I, InsertBefore, InstrumentedAddress, ElemTypeSize,
  336. IsWrite);
  337. }
  338. }
  339. void MemProfiler::instrumentMop(Instruction *I, const DataLayout &DL,
  340. InterestingMemoryAccess &Access) {
  341. // Skip instrumentation of stack accesses unless requested.
  342. if (!ClStack && isa<AllocaInst>(getUnderlyingObject(Access.Addr))) {
  343. if (Access.IsWrite)
  344. ++NumSkippedStackWrites;
  345. else
  346. ++NumSkippedStackReads;
  347. return;
  348. }
  349. if (Access.IsWrite)
  350. NumInstrumentedWrites++;
  351. else
  352. NumInstrumentedReads++;
  353. if (Access.MaybeMask) {
  354. instrumentMaskedLoadOrStore(DL, Access.MaybeMask, I, Access.Addr,
  355. Access.AccessTy, Access.IsWrite);
  356. } else {
  357. // Since the access counts will be accumulated across the entire allocation,
  358. // we only update the shadow access count for the first location and thus
  359. // don't need to worry about alignment and type size.
  360. instrumentAddress(I, I, Access.Addr, Access.TypeSize, Access.IsWrite);
  361. }
  362. }
  363. void MemProfiler::instrumentAddress(Instruction *OrigIns,
  364. Instruction *InsertBefore, Value *Addr,
  365. uint32_t TypeSize, bool IsWrite) {
  366. IRBuilder<> IRB(InsertBefore);
  367. Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy);
  368. if (ClUseCalls) {
  369. IRB.CreateCall(MemProfMemoryAccessCallback[IsWrite], AddrLong);
  370. return;
  371. }
  372. // Create an inline sequence to compute shadow location, and increment the
  373. // value by one.
  374. Type *ShadowTy = Type::getInt64Ty(*C);
  375. Type *ShadowPtrTy = PointerType::get(ShadowTy, 0);
  376. Value *ShadowPtr = memToShadow(AddrLong, IRB);
  377. Value *ShadowAddr = IRB.CreateIntToPtr(ShadowPtr, ShadowPtrTy);
  378. Value *ShadowValue = IRB.CreateLoad(ShadowTy, ShadowAddr);
  379. Value *Inc = ConstantInt::get(Type::getInt64Ty(*C), 1);
  380. ShadowValue = IRB.CreateAdd(ShadowValue, Inc);
  381. IRB.CreateStore(ShadowValue, ShadowAddr);
  382. }
  383. // Create the variable for the profile file name.
  384. void createProfileFileNameVar(Module &M) {
  385. const MDString *MemProfFilename =
  386. dyn_cast_or_null<MDString>(M.getModuleFlag("MemProfProfileFilename"));
  387. if (!MemProfFilename)
  388. return;
  389. assert(!MemProfFilename->getString().empty() &&
  390. "Unexpected MemProfProfileFilename metadata with empty string");
  391. Constant *ProfileNameConst = ConstantDataArray::getString(
  392. M.getContext(), MemProfFilename->getString(), true);
  393. GlobalVariable *ProfileNameVar = new GlobalVariable(
  394. M, ProfileNameConst->getType(), /*isConstant=*/true,
  395. GlobalValue::WeakAnyLinkage, ProfileNameConst, MemProfFilenameVar);
  396. Triple TT(M.getTargetTriple());
  397. if (TT.supportsCOMDAT()) {
  398. ProfileNameVar->setLinkage(GlobalValue::ExternalLinkage);
  399. ProfileNameVar->setComdat(M.getOrInsertComdat(MemProfFilenameVar));
  400. }
  401. }
  402. bool ModuleMemProfiler::instrumentModule(Module &M) {
  403. // Create a module constructor.
  404. std::string MemProfVersion = std::to_string(LLVM_MEM_PROFILER_VERSION);
  405. std::string VersionCheckName =
  406. ClInsertVersionCheck ? (MemProfVersionCheckNamePrefix + MemProfVersion)
  407. : "";
  408. std::tie(MemProfCtorFunction, std::ignore) =
  409. createSanitizerCtorAndInitFunctions(M, MemProfModuleCtorName,
  410. MemProfInitName, /*InitArgTypes=*/{},
  411. /*InitArgs=*/{}, VersionCheckName);
  412. const uint64_t Priority = getCtorAndDtorPriority(TargetTriple);
  413. appendToGlobalCtors(M, MemProfCtorFunction, Priority);
  414. createProfileFileNameVar(M);
  415. return true;
  416. }
  417. void MemProfiler::initializeCallbacks(Module &M) {
  418. IRBuilder<> IRB(*C);
  419. for (size_t AccessIsWrite = 0; AccessIsWrite <= 1; AccessIsWrite++) {
  420. const std::string TypeStr = AccessIsWrite ? "store" : "load";
  421. SmallVector<Type *, 3> Args2 = {IntptrTy, IntptrTy};
  422. SmallVector<Type *, 2> Args1{1, IntptrTy};
  423. MemProfMemoryAccessCallbackSized[AccessIsWrite] =
  424. M.getOrInsertFunction(ClMemoryAccessCallbackPrefix + TypeStr + "N",
  425. FunctionType::get(IRB.getVoidTy(), Args2, false));
  426. MemProfMemoryAccessCallback[AccessIsWrite] =
  427. M.getOrInsertFunction(ClMemoryAccessCallbackPrefix + TypeStr,
  428. FunctionType::get(IRB.getVoidTy(), Args1, false));
  429. }
  430. MemProfMemmove = M.getOrInsertFunction(
  431. ClMemoryAccessCallbackPrefix + "memmove", IRB.getInt8PtrTy(),
  432. IRB.getInt8PtrTy(), IRB.getInt8PtrTy(), IntptrTy);
  433. MemProfMemcpy = M.getOrInsertFunction(ClMemoryAccessCallbackPrefix + "memcpy",
  434. IRB.getInt8PtrTy(), IRB.getInt8PtrTy(),
  435. IRB.getInt8PtrTy(), IntptrTy);
  436. MemProfMemset = M.getOrInsertFunction(ClMemoryAccessCallbackPrefix + "memset",
  437. IRB.getInt8PtrTy(), IRB.getInt8PtrTy(),
  438. IRB.getInt32Ty(), IntptrTy);
  439. }
  440. bool MemProfiler::maybeInsertMemProfInitAtFunctionEntry(Function &F) {
  441. // For each NSObject descendant having a +load method, this method is invoked
  442. // by the ObjC runtime before any of the static constructors is called.
  443. // Therefore we need to instrument such methods with a call to __memprof_init
  444. // at the beginning in order to initialize our runtime before any access to
  445. // the shadow memory.
  446. // We cannot just ignore these methods, because they may call other
  447. // instrumented functions.
  448. if (F.getName().find(" load]") != std::string::npos) {
  449. FunctionCallee MemProfInitFunction =
  450. declareSanitizerInitFunction(*F.getParent(), MemProfInitName, {});
  451. IRBuilder<> IRB(&F.front(), F.front().begin());
  452. IRB.CreateCall(MemProfInitFunction, {});
  453. return true;
  454. }
  455. return false;
  456. }
  457. bool MemProfiler::insertDynamicShadowAtFunctionEntry(Function &F) {
  458. IRBuilder<> IRB(&F.front().front());
  459. Value *GlobalDynamicAddress = F.getParent()->getOrInsertGlobal(
  460. MemProfShadowMemoryDynamicAddress, IntptrTy);
  461. if (F.getParent()->getPICLevel() == PICLevel::NotPIC)
  462. cast<GlobalVariable>(GlobalDynamicAddress)->setDSOLocal(true);
  463. DynamicShadowOffset = IRB.CreateLoad(IntptrTy, GlobalDynamicAddress);
  464. return true;
  465. }
  466. bool MemProfiler::instrumentFunction(Function &F) {
  467. if (F.getLinkage() == GlobalValue::AvailableExternallyLinkage)
  468. return false;
  469. if (ClDebugFunc == F.getName())
  470. return false;
  471. if (F.getName().startswith("__memprof_"))
  472. return false;
  473. bool FunctionModified = false;
  474. // If needed, insert __memprof_init.
  475. // This function needs to be called even if the function body is not
  476. // instrumented.
  477. if (maybeInsertMemProfInitAtFunctionEntry(F))
  478. FunctionModified = true;
  479. LLVM_DEBUG(dbgs() << "MEMPROF instrumenting:\n" << F << "\n");
  480. initializeCallbacks(*F.getParent());
  481. SmallVector<Instruction *, 16> ToInstrument;
  482. // Fill the set of memory operations to instrument.
  483. for (auto &BB : F) {
  484. for (auto &Inst : BB) {
  485. if (isInterestingMemoryAccess(&Inst) || isa<MemIntrinsic>(Inst))
  486. ToInstrument.push_back(&Inst);
  487. }
  488. }
  489. if (ToInstrument.empty()) {
  490. LLVM_DEBUG(dbgs() << "MEMPROF done instrumenting: " << FunctionModified
  491. << " " << F << "\n");
  492. return FunctionModified;
  493. }
  494. FunctionModified |= insertDynamicShadowAtFunctionEntry(F);
  495. int NumInstrumented = 0;
  496. for (auto *Inst : ToInstrument) {
  497. if (ClDebugMin < 0 || ClDebugMax < 0 ||
  498. (NumInstrumented >= ClDebugMin && NumInstrumented <= ClDebugMax)) {
  499. std::optional<InterestingMemoryAccess> Access =
  500. isInterestingMemoryAccess(Inst);
  501. if (Access)
  502. instrumentMop(Inst, F.getParent()->getDataLayout(), *Access);
  503. else
  504. instrumentMemIntrinsic(cast<MemIntrinsic>(Inst));
  505. }
  506. NumInstrumented++;
  507. }
  508. if (NumInstrumented > 0)
  509. FunctionModified = true;
  510. LLVM_DEBUG(dbgs() << "MEMPROF done instrumenting: " << FunctionModified << " "
  511. << F << "\n");
  512. return FunctionModified;
  513. }