MemoryBuiltins.cpp 39 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003
  1. //===- MemoryBuiltins.cpp - Identify calls to memory builtins -------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This family of functions identifies calls to builtin functions that allocate
  10. // or free memory.
  11. //
  12. //===----------------------------------------------------------------------===//
  13. #include "llvm/Analysis/MemoryBuiltins.h"
  14. #include "llvm/ADT/APInt.h"
  15. #include "llvm/ADT/None.h"
  16. #include "llvm/ADT/Optional.h"
  17. #include "llvm/ADT/STLExtras.h"
  18. #include "llvm/ADT/Statistic.h"
  19. #include "llvm/ADT/StringRef.h"
  20. #include "llvm/Analysis/TargetFolder.h"
  21. #include "llvm/Analysis/TargetLibraryInfo.h"
  22. #include "llvm/Analysis/Utils/Local.h"
  23. #include "llvm/Analysis/ValueTracking.h"
  24. #include "llvm/IR/Argument.h"
  25. #include "llvm/IR/Attributes.h"
  26. #include "llvm/IR/Constants.h"
  27. #include "llvm/IR/DataLayout.h"
  28. #include "llvm/IR/DerivedTypes.h"
  29. #include "llvm/IR/Function.h"
  30. #include "llvm/IR/GlobalAlias.h"
  31. #include "llvm/IR/GlobalVariable.h"
  32. #include "llvm/IR/Instruction.h"
  33. #include "llvm/IR/Instructions.h"
  34. #include "llvm/IR/IntrinsicInst.h"
  35. #include "llvm/IR/Operator.h"
  36. #include "llvm/IR/Type.h"
  37. #include "llvm/IR/Value.h"
  38. #include "llvm/Support/Casting.h"
  39. #include "llvm/Support/Debug.h"
  40. #include "llvm/Support/MathExtras.h"
  41. #include "llvm/Support/raw_ostream.h"
  42. #include <cassert>
  43. #include <cstdint>
  44. #include <iterator>
  45. #include <utility>
  46. using namespace llvm;
  47. #define DEBUG_TYPE "memory-builtins"
  48. enum AllocType : uint8_t {
  49. OpNewLike = 1<<0, // allocates; never returns null
  50. MallocLike = 1<<1, // allocates; may return null
  51. AlignedAllocLike = 1<<2, // allocates with alignment; may return null
  52. CallocLike = 1<<3, // allocates + bzero
  53. ReallocLike = 1<<4, // reallocates
  54. StrDupLike = 1<<5,
  55. MallocOrOpNewLike = MallocLike | OpNewLike,
  56. MallocOrCallocLike = MallocLike | OpNewLike | CallocLike | AlignedAllocLike,
  57. AllocLike = MallocOrCallocLike | StrDupLike,
  58. AnyAlloc = AllocLike | ReallocLike
  59. };
  60. struct AllocFnsTy {
  61. AllocType AllocTy;
  62. unsigned NumParams;
  63. // First and Second size parameters (or -1 if unused)
  64. int FstParam, SndParam;
  65. // Alignment parameter for aligned_alloc and aligned new
  66. int AlignParam;
  67. };
  68. // FIXME: certain users need more information. E.g., SimplifyLibCalls needs to
  69. // know which functions are nounwind, noalias, nocapture parameters, etc.
  70. static const std::pair<LibFunc, AllocFnsTy> AllocationFnData[] = {
  71. {LibFunc_malloc, {MallocLike, 1, 0, -1, -1}},
  72. {LibFunc_vec_malloc, {MallocLike, 1, 0, -1, -1}},
  73. {LibFunc_valloc, {MallocLike, 1, 0, -1, -1}},
  74. {LibFunc_Znwj, {OpNewLike, 1, 0, -1, -1}}, // new(unsigned int)
  75. {LibFunc_ZnwjRKSt9nothrow_t, {MallocLike, 2, 0, -1, -1}}, // new(unsigned int, nothrow)
  76. {LibFunc_ZnwjSt11align_val_t, {OpNewLike, 2, 0, -1, 1}}, // new(unsigned int, align_val_t)
  77. {LibFunc_ZnwjSt11align_val_tRKSt9nothrow_t, {MallocLike, 3, 0, -1, 1}}, // new(unsigned int, align_val_t, nothrow)
  78. {LibFunc_Znwm, {OpNewLike, 1, 0, -1, -1}}, // new(unsigned long)
  79. {LibFunc_ZnwmRKSt9nothrow_t, {MallocLike, 2, 0, -1, -1}}, // new(unsigned long, nothrow)
  80. {LibFunc_ZnwmSt11align_val_t, {OpNewLike, 2, 0, -1, 1}}, // new(unsigned long, align_val_t)
  81. {LibFunc_ZnwmSt11align_val_tRKSt9nothrow_t, {MallocLike, 3, 0, -1, 1}}, // new(unsigned long, align_val_t, nothrow)
  82. {LibFunc_Znaj, {OpNewLike, 1, 0, -1, -1}}, // new[](unsigned int)
  83. {LibFunc_ZnajRKSt9nothrow_t, {MallocLike, 2, 0, -1, -1}}, // new[](unsigned int, nothrow)
  84. {LibFunc_ZnajSt11align_val_t, {OpNewLike, 2, 0, -1, 1}}, // new[](unsigned int, align_val_t)
  85. {LibFunc_ZnajSt11align_val_tRKSt9nothrow_t, {MallocLike, 3, 0, -1, 1}}, // new[](unsigned int, align_val_t, nothrow)
  86. {LibFunc_Znam, {OpNewLike, 1, 0, -1, -1}}, // new[](unsigned long)
  87. {LibFunc_ZnamRKSt9nothrow_t, {MallocLike, 2, 0, -1, -1}}, // new[](unsigned long, nothrow)
  88. {LibFunc_ZnamSt11align_val_t, {OpNewLike, 2, 0, -1, 1}}, // new[](unsigned long, align_val_t)
  89. {LibFunc_ZnamSt11align_val_tRKSt9nothrow_t, {MallocLike, 3, 0, -1, 1}}, // new[](unsigned long, align_val_t, nothrow)
  90. {LibFunc_msvc_new_int, {OpNewLike, 1, 0, -1, -1}}, // new(unsigned int)
  91. {LibFunc_msvc_new_int_nothrow, {MallocLike, 2, 0, -1, -1}}, // new(unsigned int, nothrow)
  92. {LibFunc_msvc_new_longlong, {OpNewLike, 1, 0, -1, -1}}, // new(unsigned long long)
  93. {LibFunc_msvc_new_longlong_nothrow, {MallocLike, 2, 0, -1, -1}}, // new(unsigned long long, nothrow)
  94. {LibFunc_msvc_new_array_int, {OpNewLike, 1, 0, -1, -1}}, // new[](unsigned int)
  95. {LibFunc_msvc_new_array_int_nothrow, {MallocLike, 2, 0, -1, -1}}, // new[](unsigned int, nothrow)
  96. {LibFunc_msvc_new_array_longlong, {OpNewLike, 1, 0, -1, -1}}, // new[](unsigned long long)
  97. {LibFunc_msvc_new_array_longlong_nothrow, {MallocLike, 2, 0, -1, -1}}, // new[](unsigned long long, nothrow)
  98. {LibFunc_aligned_alloc, {AlignedAllocLike, 2, 1, -1, 0}},
  99. {LibFunc_memalign, {AlignedAllocLike, 2, 1, -1, 0}},
  100. {LibFunc_calloc, {CallocLike, 2, 0, 1, -1}},
  101. {LibFunc_vec_calloc, {CallocLike, 2, 0, 1, -1}},
  102. {LibFunc_realloc, {ReallocLike, 2, 1, -1, -1}},
  103. {LibFunc_vec_realloc, {ReallocLike, 2, 1, -1, -1}},
  104. {LibFunc_reallocf, {ReallocLike, 2, 1, -1, -1}},
  105. {LibFunc_strdup, {StrDupLike, 1, -1, -1, -1}},
  106. {LibFunc_strndup, {StrDupLike, 2, 1, -1, -1}},
  107. {LibFunc___kmpc_alloc_shared, {MallocLike, 1, 0, -1, -1}},
  108. // TODO: Handle "int posix_memalign(void **, size_t, size_t)"
  109. };
  110. static const Function *getCalledFunction(const Value *V,
  111. bool &IsNoBuiltin) {
  112. // Don't care about intrinsics in this case.
  113. if (isa<IntrinsicInst>(V))
  114. return nullptr;
  115. const auto *CB = dyn_cast<CallBase>(V);
  116. if (!CB)
  117. return nullptr;
  118. IsNoBuiltin = CB->isNoBuiltin();
  119. if (const Function *Callee = CB->getCalledFunction())
  120. return Callee;
  121. return nullptr;
  122. }
  123. /// Returns the allocation data for the given value if it's a call to a known
  124. /// allocation function.
  125. static Optional<AllocFnsTy>
  126. getAllocationDataForFunction(const Function *Callee, AllocType AllocTy,
  127. const TargetLibraryInfo *TLI) {
  128. // Make sure that the function is available.
  129. LibFunc TLIFn;
  130. if (!TLI || !TLI->getLibFunc(*Callee, TLIFn) || !TLI->has(TLIFn))
  131. return None;
  132. const auto *Iter = find_if(
  133. AllocationFnData, [TLIFn](const std::pair<LibFunc, AllocFnsTy> &P) {
  134. return P.first == TLIFn;
  135. });
  136. if (Iter == std::end(AllocationFnData))
  137. return None;
  138. const AllocFnsTy *FnData = &Iter->second;
  139. if ((FnData->AllocTy & AllocTy) != FnData->AllocTy)
  140. return None;
  141. // Check function prototype.
  142. int FstParam = FnData->FstParam;
  143. int SndParam = FnData->SndParam;
  144. FunctionType *FTy = Callee->getFunctionType();
  145. if (FTy->getReturnType() == Type::getInt8PtrTy(FTy->getContext()) &&
  146. FTy->getNumParams() == FnData->NumParams &&
  147. (FstParam < 0 ||
  148. (FTy->getParamType(FstParam)->isIntegerTy(32) ||
  149. FTy->getParamType(FstParam)->isIntegerTy(64))) &&
  150. (SndParam < 0 ||
  151. FTy->getParamType(SndParam)->isIntegerTy(32) ||
  152. FTy->getParamType(SndParam)->isIntegerTy(64)))
  153. return *FnData;
  154. return None;
  155. }
  156. static Optional<AllocFnsTy> getAllocationData(const Value *V, AllocType AllocTy,
  157. const TargetLibraryInfo *TLI) {
  158. bool IsNoBuiltinCall;
  159. if (const Function *Callee = getCalledFunction(V, IsNoBuiltinCall))
  160. if (!IsNoBuiltinCall)
  161. return getAllocationDataForFunction(Callee, AllocTy, TLI);
  162. return None;
  163. }
  164. static Optional<AllocFnsTy>
  165. getAllocationData(const Value *V, AllocType AllocTy,
  166. function_ref<const TargetLibraryInfo &(Function &)> GetTLI) {
  167. bool IsNoBuiltinCall;
  168. if (const Function *Callee = getCalledFunction(V, IsNoBuiltinCall))
  169. if (!IsNoBuiltinCall)
  170. return getAllocationDataForFunction(
  171. Callee, AllocTy, &GetTLI(const_cast<Function &>(*Callee)));
  172. return None;
  173. }
  174. static Optional<AllocFnsTy> getAllocationSize(const Value *V,
  175. const TargetLibraryInfo *TLI) {
  176. bool IsNoBuiltinCall;
  177. const Function *Callee =
  178. getCalledFunction(V, IsNoBuiltinCall);
  179. if (!Callee)
  180. return None;
  181. // Prefer to use existing information over allocsize. This will give us an
  182. // accurate AllocTy.
  183. if (!IsNoBuiltinCall)
  184. if (Optional<AllocFnsTy> Data =
  185. getAllocationDataForFunction(Callee, AnyAlloc, TLI))
  186. return Data;
  187. Attribute Attr = Callee->getFnAttribute(Attribute::AllocSize);
  188. if (Attr == Attribute())
  189. return None;
  190. std::pair<unsigned, Optional<unsigned>> Args = Attr.getAllocSizeArgs();
  191. AllocFnsTy Result;
  192. // Because allocsize only tells us how many bytes are allocated, we're not
  193. // really allowed to assume anything, so we use MallocLike.
  194. Result.AllocTy = MallocLike;
  195. Result.NumParams = Callee->getNumOperands();
  196. Result.FstParam = Args.first;
  197. Result.SndParam = Args.second.getValueOr(-1);
  198. // Allocsize has no way to specify an alignment argument
  199. Result.AlignParam = -1;
  200. return Result;
  201. }
  202. /// Tests if a value is a call or invoke to a library function that
  203. /// allocates or reallocates memory (either malloc, calloc, realloc, or strdup
  204. /// like).
  205. bool llvm::isAllocationFn(const Value *V, const TargetLibraryInfo *TLI) {
  206. return getAllocationData(V, AnyAlloc, TLI).hasValue();
  207. }
  208. bool llvm::isAllocationFn(
  209. const Value *V, function_ref<const TargetLibraryInfo &(Function &)> GetTLI) {
  210. return getAllocationData(V, AnyAlloc, GetTLI).hasValue();
  211. }
  212. /// Tests if a value is a call or invoke to a library function that
  213. /// allocates uninitialized memory (such as malloc).
  214. static bool isMallocLikeFn(const Value *V, const TargetLibraryInfo *TLI) {
  215. return getAllocationData(V, MallocOrOpNewLike, TLI).hasValue();
  216. }
  217. /// Tests if a value is a call or invoke to a library function that
  218. /// allocates uninitialized memory with alignment (such as aligned_alloc).
  219. static bool isAlignedAllocLikeFn(const Value *V, const TargetLibraryInfo *TLI) {
  220. return getAllocationData(V, AlignedAllocLike, TLI)
  221. .hasValue();
  222. }
  223. /// Tests if a value is a call or invoke to a library function that
  224. /// allocates zero-filled memory (such as calloc).
  225. static bool isCallocLikeFn(const Value *V, const TargetLibraryInfo *TLI) {
  226. return getAllocationData(V, CallocLike, TLI).hasValue();
  227. }
  228. /// Tests if a value is a call or invoke to a library function that
  229. /// allocates memory similar to malloc or calloc.
  230. bool llvm::isMallocOrCallocLikeFn(const Value *V, const TargetLibraryInfo *TLI) {
  231. return getAllocationData(V, MallocOrCallocLike, TLI).hasValue();
  232. }
  233. /// Tests if a value is a call or invoke to a library function that
  234. /// allocates memory (either malloc, calloc, or strdup like).
  235. bool llvm::isAllocLikeFn(const Value *V, const TargetLibraryInfo *TLI) {
  236. return getAllocationData(V, AllocLike, TLI).hasValue();
  237. }
  238. /// Tests if a value is a call or invoke to a library function that
  239. /// reallocates memory (e.g., realloc).
  240. bool llvm::isReallocLikeFn(const Value *V, const TargetLibraryInfo *TLI) {
  241. return getAllocationData(V, ReallocLike, TLI).hasValue();
  242. }
  243. /// Tests if a functions is a call or invoke to a library function that
  244. /// reallocates memory (e.g., realloc).
  245. bool llvm::isReallocLikeFn(const Function *F, const TargetLibraryInfo *TLI) {
  246. return getAllocationDataForFunction(F, ReallocLike, TLI).hasValue();
  247. }
  248. bool llvm::isAllocRemovable(const CallBase *CB, const TargetLibraryInfo *TLI) {
  249. assert(isAllocationFn(CB, TLI));
  250. // Note: Removability is highly dependent on the source language. For
  251. // example, recent C++ requires direct calls to the global allocation
  252. // [basic.stc.dynamic.allocation] to be observable unless part of a new
  253. // expression [expr.new paragraph 13].
  254. // Historically we've treated the C family allocation routines as removable
  255. return isAllocLikeFn(CB, TLI);
  256. }
  257. Value *llvm::getAllocAlignment(const CallBase *V,
  258. const TargetLibraryInfo *TLI) {
  259. assert(isAllocationFn(V, TLI));
  260. const Optional<AllocFnsTy> FnData = getAllocationData(V, AnyAlloc, TLI);
  261. if (!FnData.hasValue() || FnData->AlignParam < 0) {
  262. return nullptr;
  263. }
  264. return V->getOperand(FnData->AlignParam);
  265. }
  266. /// When we're compiling N-bit code, and the user uses parameters that are
  267. /// greater than N bits (e.g. uint64_t on a 32-bit build), we can run into
  268. /// trouble with APInt size issues. This function handles resizing + overflow
  269. /// checks for us. Check and zext or trunc \p I depending on IntTyBits and
  270. /// I's value.
  271. static bool CheckedZextOrTrunc(APInt &I, unsigned IntTyBits) {
  272. // More bits than we can handle. Checking the bit width isn't necessary, but
  273. // it's faster than checking active bits, and should give `false` in the
  274. // vast majority of cases.
  275. if (I.getBitWidth() > IntTyBits && I.getActiveBits() > IntTyBits)
  276. return false;
  277. if (I.getBitWidth() != IntTyBits)
  278. I = I.zextOrTrunc(IntTyBits);
  279. return true;
  280. }
  281. Optional<APInt>
  282. llvm::getAllocSize(const CallBase *CB,
  283. const TargetLibraryInfo *TLI,
  284. std::function<const Value*(const Value*)> Mapper) {
  285. // Note: This handles both explicitly listed allocation functions and
  286. // allocsize. The code structure could stand to be cleaned up a bit.
  287. Optional<AllocFnsTy> FnData = getAllocationSize(CB, TLI);
  288. if (!FnData)
  289. return None;
  290. // Get the index type for this address space, results and intermediate
  291. // computations are performed at that width.
  292. auto &DL = CB->getModule()->getDataLayout();
  293. const unsigned IntTyBits = DL.getIndexTypeSizeInBits(CB->getType());
  294. // Handle strdup-like functions separately.
  295. if (FnData->AllocTy == StrDupLike) {
  296. APInt Size(IntTyBits, GetStringLength(Mapper(CB->getArgOperand(0))));
  297. if (!Size)
  298. return None;
  299. // Strndup limits strlen.
  300. if (FnData->FstParam > 0) {
  301. const ConstantInt *Arg =
  302. dyn_cast<ConstantInt>(Mapper(CB->getArgOperand(FnData->FstParam)));
  303. if (!Arg)
  304. return None;
  305. APInt MaxSize = Arg->getValue().zextOrSelf(IntTyBits);
  306. if (Size.ugt(MaxSize))
  307. Size = MaxSize + 1;
  308. }
  309. return Size;
  310. }
  311. const ConstantInt *Arg =
  312. dyn_cast<ConstantInt>(Mapper(CB->getArgOperand(FnData->FstParam)));
  313. if (!Arg)
  314. return None;
  315. APInt Size = Arg->getValue();
  316. if (!CheckedZextOrTrunc(Size, IntTyBits))
  317. return None;
  318. // Size is determined by just 1 parameter.
  319. if (FnData->SndParam < 0)
  320. return Size;
  321. Arg = dyn_cast<ConstantInt>(Mapper(CB->getArgOperand(FnData->SndParam)));
  322. if (!Arg)
  323. return None;
  324. APInt NumElems = Arg->getValue();
  325. if (!CheckedZextOrTrunc(NumElems, IntTyBits))
  326. return None;
  327. bool Overflow;
  328. Size = Size.umul_ov(NumElems, Overflow);
  329. if (Overflow)
  330. return None;
  331. return Size;
  332. }
  333. Constant *llvm::getInitialValueOfAllocation(const CallBase *Alloc,
  334. const TargetLibraryInfo *TLI,
  335. Type *Ty) {
  336. assert(isAllocationFn(Alloc, TLI));
  337. // malloc and aligned_alloc are uninitialized (undef)
  338. if (isMallocLikeFn(Alloc, TLI) || isAlignedAllocLikeFn(Alloc, TLI))
  339. return UndefValue::get(Ty);
  340. // calloc zero initializes
  341. if (isCallocLikeFn(Alloc, TLI))
  342. return Constant::getNullValue(Ty);
  343. return nullptr;
  344. }
  345. /// isLibFreeFunction - Returns true if the function is a builtin free()
  346. bool llvm::isLibFreeFunction(const Function *F, const LibFunc TLIFn) {
  347. unsigned ExpectedNumParams;
  348. if (TLIFn == LibFunc_free ||
  349. TLIFn == LibFunc_ZdlPv || // operator delete(void*)
  350. TLIFn == LibFunc_ZdaPv || // operator delete[](void*)
  351. TLIFn == LibFunc_msvc_delete_ptr32 || // operator delete(void*)
  352. TLIFn == LibFunc_msvc_delete_ptr64 || // operator delete(void*)
  353. TLIFn == LibFunc_msvc_delete_array_ptr32 || // operator delete[](void*)
  354. TLIFn == LibFunc_msvc_delete_array_ptr64) // operator delete[](void*)
  355. ExpectedNumParams = 1;
  356. else if (TLIFn == LibFunc_ZdlPvj || // delete(void*, uint)
  357. TLIFn == LibFunc_ZdlPvm || // delete(void*, ulong)
  358. TLIFn == LibFunc_ZdlPvRKSt9nothrow_t || // delete(void*, nothrow)
  359. TLIFn == LibFunc_ZdlPvSt11align_val_t || // delete(void*, align_val_t)
  360. TLIFn == LibFunc_ZdaPvj || // delete[](void*, uint)
  361. TLIFn == LibFunc_ZdaPvm || // delete[](void*, ulong)
  362. TLIFn == LibFunc_ZdaPvRKSt9nothrow_t || // delete[](void*, nothrow)
  363. TLIFn == LibFunc_ZdaPvSt11align_val_t || // delete[](void*, align_val_t)
  364. TLIFn == LibFunc_msvc_delete_ptr32_int || // delete(void*, uint)
  365. TLIFn == LibFunc_msvc_delete_ptr64_longlong || // delete(void*, ulonglong)
  366. TLIFn == LibFunc_msvc_delete_ptr32_nothrow || // delete(void*, nothrow)
  367. TLIFn == LibFunc_msvc_delete_ptr64_nothrow || // delete(void*, nothrow)
  368. TLIFn == LibFunc_msvc_delete_array_ptr32_int || // delete[](void*, uint)
  369. TLIFn == LibFunc_msvc_delete_array_ptr64_longlong || // delete[](void*, ulonglong)
  370. TLIFn == LibFunc_msvc_delete_array_ptr32_nothrow || // delete[](void*, nothrow)
  371. TLIFn == LibFunc_msvc_delete_array_ptr64_nothrow || // delete[](void*, nothrow)
  372. TLIFn == LibFunc___kmpc_free_shared) // OpenMP Offloading RTL free
  373. ExpectedNumParams = 2;
  374. else if (TLIFn == LibFunc_ZdaPvSt11align_val_tRKSt9nothrow_t || // delete(void*, align_val_t, nothrow)
  375. TLIFn == LibFunc_ZdlPvSt11align_val_tRKSt9nothrow_t || // delete[](void*, align_val_t, nothrow)
  376. TLIFn == LibFunc_ZdlPvjSt11align_val_t || // delete(void*, unsigned long, align_val_t)
  377. TLIFn == LibFunc_ZdlPvmSt11align_val_t || // delete(void*, unsigned long, align_val_t)
  378. TLIFn == LibFunc_ZdaPvjSt11align_val_t || // delete[](void*, unsigned int, align_val_t)
  379. TLIFn == LibFunc_ZdaPvmSt11align_val_t) // delete[](void*, unsigned long, align_val_t)
  380. ExpectedNumParams = 3;
  381. else
  382. return false;
  383. // Check free prototype.
  384. // FIXME: workaround for PR5130, this will be obsolete when a nobuiltin
  385. // attribute will exist.
  386. FunctionType *FTy = F->getFunctionType();
  387. if (!FTy->getReturnType()->isVoidTy())
  388. return false;
  389. if (FTy->getNumParams() != ExpectedNumParams)
  390. return false;
  391. if (FTy->getParamType(0) != Type::getInt8PtrTy(F->getContext()))
  392. return false;
  393. return true;
  394. }
  395. /// isFreeCall - Returns non-null if the value is a call to the builtin free()
  396. const CallInst *llvm::isFreeCall(const Value *I, const TargetLibraryInfo *TLI) {
  397. bool IsNoBuiltinCall;
  398. const Function *Callee = getCalledFunction(I, IsNoBuiltinCall);
  399. if (Callee == nullptr || IsNoBuiltinCall)
  400. return nullptr;
  401. LibFunc TLIFn;
  402. if (!TLI || !TLI->getLibFunc(*Callee, TLIFn) || !TLI->has(TLIFn))
  403. return nullptr;
  404. return isLibFreeFunction(Callee, TLIFn) ? dyn_cast<CallInst>(I) : nullptr;
  405. }
  406. //===----------------------------------------------------------------------===//
  407. // Utility functions to compute size of objects.
  408. //
  409. static APInt getSizeWithOverflow(const SizeOffsetType &Data) {
  410. if (Data.second.isNegative() || Data.first.ult(Data.second))
  411. return APInt(Data.first.getBitWidth(), 0);
  412. return Data.first - Data.second;
  413. }
  414. /// Compute the size of the object pointed by Ptr. Returns true and the
  415. /// object size in Size if successful, and false otherwise.
  416. /// If RoundToAlign is true, then Size is rounded up to the alignment of
  417. /// allocas, byval arguments, and global variables.
  418. bool llvm::getObjectSize(const Value *Ptr, uint64_t &Size, const DataLayout &DL,
  419. const TargetLibraryInfo *TLI, ObjectSizeOpts Opts) {
  420. ObjectSizeOffsetVisitor Visitor(DL, TLI, Ptr->getContext(), Opts);
  421. SizeOffsetType Data = Visitor.compute(const_cast<Value*>(Ptr));
  422. if (!Visitor.bothKnown(Data))
  423. return false;
  424. Size = getSizeWithOverflow(Data).getZExtValue();
  425. return true;
  426. }
  427. Value *llvm::lowerObjectSizeCall(IntrinsicInst *ObjectSize,
  428. const DataLayout &DL,
  429. const TargetLibraryInfo *TLI,
  430. bool MustSucceed) {
  431. assert(ObjectSize->getIntrinsicID() == Intrinsic::objectsize &&
  432. "ObjectSize must be a call to llvm.objectsize!");
  433. bool MaxVal = cast<ConstantInt>(ObjectSize->getArgOperand(1))->isZero();
  434. ObjectSizeOpts EvalOptions;
  435. // Unless we have to fold this to something, try to be as accurate as
  436. // possible.
  437. if (MustSucceed)
  438. EvalOptions.EvalMode =
  439. MaxVal ? ObjectSizeOpts::Mode::Max : ObjectSizeOpts::Mode::Min;
  440. else
  441. EvalOptions.EvalMode = ObjectSizeOpts::Mode::Exact;
  442. EvalOptions.NullIsUnknownSize =
  443. cast<ConstantInt>(ObjectSize->getArgOperand(2))->isOne();
  444. auto *ResultType = cast<IntegerType>(ObjectSize->getType());
  445. bool StaticOnly = cast<ConstantInt>(ObjectSize->getArgOperand(3))->isZero();
  446. if (StaticOnly) {
  447. // FIXME: Does it make sense to just return a failure value if the size won't
  448. // fit in the output and `!MustSucceed`?
  449. uint64_t Size;
  450. if (getObjectSize(ObjectSize->getArgOperand(0), Size, DL, TLI, EvalOptions) &&
  451. isUIntN(ResultType->getBitWidth(), Size))
  452. return ConstantInt::get(ResultType, Size);
  453. } else {
  454. LLVMContext &Ctx = ObjectSize->getFunction()->getContext();
  455. ObjectSizeOffsetEvaluator Eval(DL, TLI, Ctx, EvalOptions);
  456. SizeOffsetEvalType SizeOffsetPair =
  457. Eval.compute(ObjectSize->getArgOperand(0));
  458. if (SizeOffsetPair != ObjectSizeOffsetEvaluator::unknown()) {
  459. IRBuilder<TargetFolder> Builder(Ctx, TargetFolder(DL));
  460. Builder.SetInsertPoint(ObjectSize);
  461. // If we've outside the end of the object, then we can always access
  462. // exactly 0 bytes.
  463. Value *ResultSize =
  464. Builder.CreateSub(SizeOffsetPair.first, SizeOffsetPair.second);
  465. Value *UseZero =
  466. Builder.CreateICmpULT(SizeOffsetPair.first, SizeOffsetPair.second);
  467. ResultSize = Builder.CreateZExtOrTrunc(ResultSize, ResultType);
  468. Value *Ret = Builder.CreateSelect(
  469. UseZero, ConstantInt::get(ResultType, 0), ResultSize);
  470. // The non-constant size expression cannot evaluate to -1.
  471. if (!isa<Constant>(SizeOffsetPair.first) ||
  472. !isa<Constant>(SizeOffsetPair.second))
  473. Builder.CreateAssumption(
  474. Builder.CreateICmpNE(Ret, ConstantInt::get(ResultType, -1)));
  475. return Ret;
  476. }
  477. }
  478. if (!MustSucceed)
  479. return nullptr;
  480. return ConstantInt::get(ResultType, MaxVal ? -1ULL : 0);
  481. }
  482. STATISTIC(ObjectVisitorArgument,
  483. "Number of arguments with unsolved size and offset");
  484. STATISTIC(ObjectVisitorLoad,
  485. "Number of load instructions with unsolved size and offset");
  486. APInt ObjectSizeOffsetVisitor::align(APInt Size, MaybeAlign Alignment) {
  487. if (Options.RoundToAlign && Alignment)
  488. return APInt(IntTyBits, alignTo(Size.getZExtValue(), Alignment));
  489. return Size;
  490. }
  491. ObjectSizeOffsetVisitor::ObjectSizeOffsetVisitor(const DataLayout &DL,
  492. const TargetLibraryInfo *TLI,
  493. LLVMContext &Context,
  494. ObjectSizeOpts Options)
  495. : DL(DL), TLI(TLI), Options(Options) {
  496. // Pointer size must be rechecked for each object visited since it could have
  497. // a different address space.
  498. }
  499. SizeOffsetType ObjectSizeOffsetVisitor::compute(Value *V) {
  500. unsigned InitialIntTyBits = DL.getIndexTypeSizeInBits(V->getType());
  501. // Stripping pointer casts can strip address space casts which can change the
  502. // index type size. The invariant is that we use the value type to determine
  503. // the index type size and if we stripped address space casts we have to
  504. // readjust the APInt as we pass it upwards in order for the APInt to match
  505. // the type the caller passed in.
  506. APInt Offset(InitialIntTyBits, 0);
  507. V = V->stripAndAccumulateConstantOffsets(
  508. DL, Offset, /* AllowNonInbounds */ true, /* AllowInvariantGroup */ true);
  509. // Later we use the index type size and zero but it will match the type of the
  510. // value that is passed to computeImpl.
  511. IntTyBits = DL.getIndexTypeSizeInBits(V->getType());
  512. Zero = APInt::getZero(IntTyBits);
  513. bool IndexTypeSizeChanged = InitialIntTyBits != IntTyBits;
  514. if (!IndexTypeSizeChanged && Offset.isZero())
  515. return computeImpl(V);
  516. // We stripped an address space cast that changed the index type size or we
  517. // accumulated some constant offset (or both). Readjust the bit width to match
  518. // the argument index type size and apply the offset, as required.
  519. SizeOffsetType SOT = computeImpl(V);
  520. if (IndexTypeSizeChanged) {
  521. if (knownSize(SOT) && !::CheckedZextOrTrunc(SOT.first, InitialIntTyBits))
  522. SOT.first = APInt();
  523. if (knownOffset(SOT) && !::CheckedZextOrTrunc(SOT.second, InitialIntTyBits))
  524. SOT.second = APInt();
  525. }
  526. // If the computed offset is "unknown" we cannot add the stripped offset.
  527. return {SOT.first,
  528. SOT.second.getBitWidth() > 1 ? SOT.second + Offset : SOT.second};
  529. }
  530. SizeOffsetType ObjectSizeOffsetVisitor::computeImpl(Value *V) {
  531. if (Instruction *I = dyn_cast<Instruction>(V)) {
  532. // If we have already seen this instruction, bail out. Cycles can happen in
  533. // unreachable code after constant propagation.
  534. if (!SeenInsts.insert(I).second)
  535. return unknown();
  536. return visit(*I);
  537. }
  538. if (Argument *A = dyn_cast<Argument>(V))
  539. return visitArgument(*A);
  540. if (ConstantPointerNull *P = dyn_cast<ConstantPointerNull>(V))
  541. return visitConstantPointerNull(*P);
  542. if (GlobalAlias *GA = dyn_cast<GlobalAlias>(V))
  543. return visitGlobalAlias(*GA);
  544. if (GlobalVariable *GV = dyn_cast<GlobalVariable>(V))
  545. return visitGlobalVariable(*GV);
  546. if (UndefValue *UV = dyn_cast<UndefValue>(V))
  547. return visitUndefValue(*UV);
  548. LLVM_DEBUG(dbgs() << "ObjectSizeOffsetVisitor::compute() unhandled value: "
  549. << *V << '\n');
  550. return unknown();
  551. }
  552. bool ObjectSizeOffsetVisitor::CheckedZextOrTrunc(APInt &I) {
  553. return ::CheckedZextOrTrunc(I, IntTyBits);
  554. }
  555. SizeOffsetType ObjectSizeOffsetVisitor::visitAllocaInst(AllocaInst &I) {
  556. if (!I.getAllocatedType()->isSized())
  557. return unknown();
  558. if (isa<ScalableVectorType>(I.getAllocatedType()))
  559. return unknown();
  560. APInt Size(IntTyBits, DL.getTypeAllocSize(I.getAllocatedType()));
  561. if (!I.isArrayAllocation())
  562. return std::make_pair(align(Size, I.getAlign()), Zero);
  563. Value *ArraySize = I.getArraySize();
  564. if (const ConstantInt *C = dyn_cast<ConstantInt>(ArraySize)) {
  565. APInt NumElems = C->getValue();
  566. if (!CheckedZextOrTrunc(NumElems))
  567. return unknown();
  568. bool Overflow;
  569. Size = Size.umul_ov(NumElems, Overflow);
  570. return Overflow ? unknown()
  571. : std::make_pair(align(Size, I.getAlign()), Zero);
  572. }
  573. return unknown();
  574. }
  575. SizeOffsetType ObjectSizeOffsetVisitor::visitArgument(Argument &A) {
  576. Type *MemoryTy = A.getPointeeInMemoryValueType();
  577. // No interprocedural analysis is done at the moment.
  578. if (!MemoryTy|| !MemoryTy->isSized()) {
  579. ++ObjectVisitorArgument;
  580. return unknown();
  581. }
  582. APInt Size(IntTyBits, DL.getTypeAllocSize(MemoryTy));
  583. return std::make_pair(align(Size, A.getParamAlign()), Zero);
  584. }
  585. SizeOffsetType ObjectSizeOffsetVisitor::visitCallBase(CallBase &CB) {
  586. auto Mapper = [](const Value *V) { return V; };
  587. if (Optional<APInt> Size = getAllocSize(&CB, TLI, Mapper))
  588. return std::make_pair(*Size, Zero);
  589. return unknown();
  590. }
  591. SizeOffsetType
  592. ObjectSizeOffsetVisitor::visitConstantPointerNull(ConstantPointerNull& CPN) {
  593. // If null is unknown, there's nothing we can do. Additionally, non-zero
  594. // address spaces can make use of null, so we don't presume to know anything
  595. // about that.
  596. //
  597. // TODO: How should this work with address space casts? We currently just drop
  598. // them on the floor, but it's unclear what we should do when a NULL from
  599. // addrspace(1) gets casted to addrspace(0) (or vice-versa).
  600. if (Options.NullIsUnknownSize || CPN.getType()->getAddressSpace())
  601. return unknown();
  602. return std::make_pair(Zero, Zero);
  603. }
  604. SizeOffsetType
  605. ObjectSizeOffsetVisitor::visitExtractElementInst(ExtractElementInst&) {
  606. return unknown();
  607. }
  608. SizeOffsetType
  609. ObjectSizeOffsetVisitor::visitExtractValueInst(ExtractValueInst&) {
  610. // Easy cases were already folded by previous passes.
  611. return unknown();
  612. }
  613. SizeOffsetType ObjectSizeOffsetVisitor::visitGlobalAlias(GlobalAlias &GA) {
  614. if (GA.isInterposable())
  615. return unknown();
  616. return compute(GA.getAliasee());
  617. }
  618. SizeOffsetType ObjectSizeOffsetVisitor::visitGlobalVariable(GlobalVariable &GV){
  619. if (!GV.hasDefinitiveInitializer())
  620. return unknown();
  621. APInt Size(IntTyBits, DL.getTypeAllocSize(GV.getValueType()));
  622. return std::make_pair(align(Size, GV.getAlign()), Zero);
  623. }
  624. SizeOffsetType ObjectSizeOffsetVisitor::visitIntToPtrInst(IntToPtrInst&) {
  625. // clueless
  626. return unknown();
  627. }
  628. SizeOffsetType ObjectSizeOffsetVisitor::visitLoadInst(LoadInst&) {
  629. ++ObjectVisitorLoad;
  630. return unknown();
  631. }
  632. SizeOffsetType ObjectSizeOffsetVisitor::visitPHINode(PHINode&) {
  633. // too complex to analyze statically.
  634. return unknown();
  635. }
  636. SizeOffsetType ObjectSizeOffsetVisitor::visitSelectInst(SelectInst &I) {
  637. SizeOffsetType TrueSide = compute(I.getTrueValue());
  638. SizeOffsetType FalseSide = compute(I.getFalseValue());
  639. if (bothKnown(TrueSide) && bothKnown(FalseSide)) {
  640. if (TrueSide == FalseSide) {
  641. return TrueSide;
  642. }
  643. APInt TrueResult = getSizeWithOverflow(TrueSide);
  644. APInt FalseResult = getSizeWithOverflow(FalseSide);
  645. if (TrueResult == FalseResult) {
  646. return TrueSide;
  647. }
  648. if (Options.EvalMode == ObjectSizeOpts::Mode::Min) {
  649. if (TrueResult.slt(FalseResult))
  650. return TrueSide;
  651. return FalseSide;
  652. }
  653. if (Options.EvalMode == ObjectSizeOpts::Mode::Max) {
  654. if (TrueResult.sgt(FalseResult))
  655. return TrueSide;
  656. return FalseSide;
  657. }
  658. }
  659. return unknown();
  660. }
  661. SizeOffsetType ObjectSizeOffsetVisitor::visitUndefValue(UndefValue&) {
  662. return std::make_pair(Zero, Zero);
  663. }
  664. SizeOffsetType ObjectSizeOffsetVisitor::visitInstruction(Instruction &I) {
  665. LLVM_DEBUG(dbgs() << "ObjectSizeOffsetVisitor unknown instruction:" << I
  666. << '\n');
  667. return unknown();
  668. }
  669. ObjectSizeOffsetEvaluator::ObjectSizeOffsetEvaluator(
  670. const DataLayout &DL, const TargetLibraryInfo *TLI, LLVMContext &Context,
  671. ObjectSizeOpts EvalOpts)
  672. : DL(DL), TLI(TLI), Context(Context),
  673. Builder(Context, TargetFolder(DL),
  674. IRBuilderCallbackInserter(
  675. [&](Instruction *I) { InsertedInstructions.insert(I); })),
  676. EvalOpts(EvalOpts) {
  677. // IntTy and Zero must be set for each compute() since the address space may
  678. // be different for later objects.
  679. }
  680. SizeOffsetEvalType ObjectSizeOffsetEvaluator::compute(Value *V) {
  681. // XXX - Are vectors of pointers possible here?
  682. IntTy = cast<IntegerType>(DL.getIndexType(V->getType()));
  683. Zero = ConstantInt::get(IntTy, 0);
  684. SizeOffsetEvalType Result = compute_(V);
  685. if (!bothKnown(Result)) {
  686. // Erase everything that was computed in this iteration from the cache, so
  687. // that no dangling references are left behind. We could be a bit smarter if
  688. // we kept a dependency graph. It's probably not worth the complexity.
  689. for (const Value *SeenVal : SeenVals) {
  690. CacheMapTy::iterator CacheIt = CacheMap.find(SeenVal);
  691. // non-computable results can be safely cached
  692. if (CacheIt != CacheMap.end() && anyKnown(CacheIt->second))
  693. CacheMap.erase(CacheIt);
  694. }
  695. // Erase any instructions we inserted as part of the traversal.
  696. for (Instruction *I : InsertedInstructions) {
  697. I->replaceAllUsesWith(UndefValue::get(I->getType()));
  698. I->eraseFromParent();
  699. }
  700. }
  701. SeenVals.clear();
  702. InsertedInstructions.clear();
  703. return Result;
  704. }
  705. SizeOffsetEvalType ObjectSizeOffsetEvaluator::compute_(Value *V) {
  706. ObjectSizeOffsetVisitor Visitor(DL, TLI, Context, EvalOpts);
  707. SizeOffsetType Const = Visitor.compute(V);
  708. if (Visitor.bothKnown(Const))
  709. return std::make_pair(ConstantInt::get(Context, Const.first),
  710. ConstantInt::get(Context, Const.second));
  711. V = V->stripPointerCasts();
  712. // Check cache.
  713. CacheMapTy::iterator CacheIt = CacheMap.find(V);
  714. if (CacheIt != CacheMap.end())
  715. return CacheIt->second;
  716. // Always generate code immediately before the instruction being
  717. // processed, so that the generated code dominates the same BBs.
  718. BuilderTy::InsertPointGuard Guard(Builder);
  719. if (Instruction *I = dyn_cast<Instruction>(V))
  720. Builder.SetInsertPoint(I);
  721. // Now compute the size and offset.
  722. SizeOffsetEvalType Result;
  723. // Record the pointers that were handled in this run, so that they can be
  724. // cleaned later if something fails. We also use this set to break cycles that
  725. // can occur in dead code.
  726. if (!SeenVals.insert(V).second) {
  727. Result = unknown();
  728. } else if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
  729. Result = visitGEPOperator(*GEP);
  730. } else if (Instruction *I = dyn_cast<Instruction>(V)) {
  731. Result = visit(*I);
  732. } else if (isa<Argument>(V) ||
  733. (isa<ConstantExpr>(V) &&
  734. cast<ConstantExpr>(V)->getOpcode() == Instruction::IntToPtr) ||
  735. isa<GlobalAlias>(V) ||
  736. isa<GlobalVariable>(V)) {
  737. // Ignore values where we cannot do more than ObjectSizeVisitor.
  738. Result = unknown();
  739. } else {
  740. LLVM_DEBUG(
  741. dbgs() << "ObjectSizeOffsetEvaluator::compute() unhandled value: " << *V
  742. << '\n');
  743. Result = unknown();
  744. }
  745. // Don't reuse CacheIt since it may be invalid at this point.
  746. CacheMap[V] = Result;
  747. return Result;
  748. }
  749. SizeOffsetEvalType ObjectSizeOffsetEvaluator::visitAllocaInst(AllocaInst &I) {
  750. if (!I.getAllocatedType()->isSized())
  751. return unknown();
  752. // must be a VLA
  753. assert(I.isArrayAllocation());
  754. // If needed, adjust the alloca's operand size to match the pointer size.
  755. // Subsequent math operations expect the types to match.
  756. Value *ArraySize = Builder.CreateZExtOrTrunc(
  757. I.getArraySize(), DL.getIntPtrType(I.getContext()));
  758. assert(ArraySize->getType() == Zero->getType() &&
  759. "Expected zero constant to have pointer type");
  760. Value *Size = ConstantInt::get(ArraySize->getType(),
  761. DL.getTypeAllocSize(I.getAllocatedType()));
  762. Size = Builder.CreateMul(Size, ArraySize);
  763. return std::make_pair(Size, Zero);
  764. }
  765. SizeOffsetEvalType ObjectSizeOffsetEvaluator::visitCallBase(CallBase &CB) {
  766. Optional<AllocFnsTy> FnData = getAllocationSize(&CB, TLI);
  767. if (!FnData)
  768. return unknown();
  769. // Handle strdup-like functions separately.
  770. if (FnData->AllocTy == StrDupLike) {
  771. // TODO: implement evaluation of strdup/strndup
  772. return unknown();
  773. }
  774. Value *FirstArg = CB.getArgOperand(FnData->FstParam);
  775. FirstArg = Builder.CreateZExtOrTrunc(FirstArg, IntTy);
  776. if (FnData->SndParam < 0)
  777. return std::make_pair(FirstArg, Zero);
  778. Value *SecondArg = CB.getArgOperand(FnData->SndParam);
  779. SecondArg = Builder.CreateZExtOrTrunc(SecondArg, IntTy);
  780. Value *Size = Builder.CreateMul(FirstArg, SecondArg);
  781. return std::make_pair(Size, Zero);
  782. }
  783. SizeOffsetEvalType
  784. ObjectSizeOffsetEvaluator::visitExtractElementInst(ExtractElementInst&) {
  785. return unknown();
  786. }
  787. SizeOffsetEvalType
  788. ObjectSizeOffsetEvaluator::visitExtractValueInst(ExtractValueInst&) {
  789. return unknown();
  790. }
  791. SizeOffsetEvalType
  792. ObjectSizeOffsetEvaluator::visitGEPOperator(GEPOperator &GEP) {
  793. SizeOffsetEvalType PtrData = compute_(GEP.getPointerOperand());
  794. if (!bothKnown(PtrData))
  795. return unknown();
  796. Value *Offset = EmitGEPOffset(&Builder, DL, &GEP, /*NoAssumptions=*/true);
  797. Offset = Builder.CreateAdd(PtrData.second, Offset);
  798. return std::make_pair(PtrData.first, Offset);
  799. }
  800. SizeOffsetEvalType ObjectSizeOffsetEvaluator::visitIntToPtrInst(IntToPtrInst&) {
  801. // clueless
  802. return unknown();
  803. }
  804. SizeOffsetEvalType ObjectSizeOffsetEvaluator::visitLoadInst(LoadInst&) {
  805. return unknown();
  806. }
  807. SizeOffsetEvalType ObjectSizeOffsetEvaluator::visitPHINode(PHINode &PHI) {
  808. // Create 2 PHIs: one for size and another for offset.
  809. PHINode *SizePHI = Builder.CreatePHI(IntTy, PHI.getNumIncomingValues());
  810. PHINode *OffsetPHI = Builder.CreatePHI(IntTy, PHI.getNumIncomingValues());
  811. // Insert right away in the cache to handle recursive PHIs.
  812. CacheMap[&PHI] = std::make_pair(SizePHI, OffsetPHI);
  813. // Compute offset/size for each PHI incoming pointer.
  814. for (unsigned i = 0, e = PHI.getNumIncomingValues(); i != e; ++i) {
  815. Builder.SetInsertPoint(&*PHI.getIncomingBlock(i)->getFirstInsertionPt());
  816. SizeOffsetEvalType EdgeData = compute_(PHI.getIncomingValue(i));
  817. if (!bothKnown(EdgeData)) {
  818. OffsetPHI->replaceAllUsesWith(UndefValue::get(IntTy));
  819. OffsetPHI->eraseFromParent();
  820. InsertedInstructions.erase(OffsetPHI);
  821. SizePHI->replaceAllUsesWith(UndefValue::get(IntTy));
  822. SizePHI->eraseFromParent();
  823. InsertedInstructions.erase(SizePHI);
  824. return unknown();
  825. }
  826. SizePHI->addIncoming(EdgeData.first, PHI.getIncomingBlock(i));
  827. OffsetPHI->addIncoming(EdgeData.second, PHI.getIncomingBlock(i));
  828. }
  829. Value *Size = SizePHI, *Offset = OffsetPHI;
  830. if (Value *Tmp = SizePHI->hasConstantValue()) {
  831. Size = Tmp;
  832. SizePHI->replaceAllUsesWith(Size);
  833. SizePHI->eraseFromParent();
  834. InsertedInstructions.erase(SizePHI);
  835. }
  836. if (Value *Tmp = OffsetPHI->hasConstantValue()) {
  837. Offset = Tmp;
  838. OffsetPHI->replaceAllUsesWith(Offset);
  839. OffsetPHI->eraseFromParent();
  840. InsertedInstructions.erase(OffsetPHI);
  841. }
  842. return std::make_pair(Size, Offset);
  843. }
  844. SizeOffsetEvalType ObjectSizeOffsetEvaluator::visitSelectInst(SelectInst &I) {
  845. SizeOffsetEvalType TrueSide = compute_(I.getTrueValue());
  846. SizeOffsetEvalType FalseSide = compute_(I.getFalseValue());
  847. if (!bothKnown(TrueSide) || !bothKnown(FalseSide))
  848. return unknown();
  849. if (TrueSide == FalseSide)
  850. return TrueSide;
  851. Value *Size = Builder.CreateSelect(I.getCondition(), TrueSide.first,
  852. FalseSide.first);
  853. Value *Offset = Builder.CreateSelect(I.getCondition(), TrueSide.second,
  854. FalseSide.second);
  855. return std::make_pair(Size, Offset);
  856. }
  857. SizeOffsetEvalType ObjectSizeOffsetEvaluator::visitInstruction(Instruction &I) {
  858. LLVM_DEBUG(dbgs() << "ObjectSizeOffsetEvaluator unknown instruction:" << I
  859. << '\n');
  860. return unknown();
  861. }