HWAddressSanitizer.cpp 74 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899
  1. //===- HWAddressSanitizer.cpp - detector of uninitialized reads -------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. /// \file
  10. /// This file is a part of HWAddressSanitizer, an address basic correctness
  11. /// checker based on tagged addressing.
  12. //===----------------------------------------------------------------------===//
  13. #include "llvm/Transforms/Instrumentation/HWAddressSanitizer.h"
  14. #include "llvm/ADT/MapVector.h"
  15. #include "llvm/ADT/SmallVector.h"
  16. #include "llvm/ADT/StringExtras.h"
  17. #include "llvm/ADT/StringRef.h"
  18. #include "llvm/ADT/Triple.h"
  19. #include "llvm/Analysis/CFG.h"
  20. #include "llvm/Analysis/PostDominators.h"
  21. #include "llvm/Analysis/StackSafetyAnalysis.h"
  22. #include "llvm/Analysis/ValueTracking.h"
  23. #include "llvm/BinaryFormat/ELF.h"
  24. #include "llvm/IR/Attributes.h"
  25. #include "llvm/IR/BasicBlock.h"
  26. #include "llvm/IR/Constant.h"
  27. #include "llvm/IR/Constants.h"
  28. #include "llvm/IR/DataLayout.h"
  29. #include "llvm/IR/DebugInfoMetadata.h"
  30. #include "llvm/IR/DerivedTypes.h"
  31. #include "llvm/IR/Dominators.h"
  32. #include "llvm/IR/Function.h"
  33. #include "llvm/IR/IRBuilder.h"
  34. #include "llvm/IR/InlineAsm.h"
  35. #include "llvm/IR/InstVisitor.h"
  36. #include "llvm/IR/Instruction.h"
  37. #include "llvm/IR/Instructions.h"
  38. #include "llvm/IR/IntrinsicInst.h"
  39. #include "llvm/IR/Intrinsics.h"
  40. #include "llvm/IR/LLVMContext.h"
  41. #include "llvm/IR/MDBuilder.h"
  42. #include "llvm/IR/Module.h"
  43. #include "llvm/IR/Type.h"
  44. #include "llvm/IR/Value.h"
  45. #include "llvm/InitializePasses.h"
  46. #include "llvm/Pass.h"
  47. #include "llvm/PassRegistry.h"
  48. #include "llvm/Support/Casting.h"
  49. #include "llvm/Support/CommandLine.h"
  50. #include "llvm/Support/Debug.h"
  51. #include "llvm/Support/raw_ostream.h"
  52. #include "llvm/Transforms/Instrumentation.h"
  53. #include "llvm/Transforms/Instrumentation/AddressSanitizerCommon.h"
  54. #include "llvm/Transforms/Utils/BasicBlockUtils.h"
  55. #include "llvm/Transforms/Utils/ModuleUtils.h"
  56. #include "llvm/Transforms/Utils/PromoteMemToReg.h"
  57. #include <sstream>
  58. using namespace llvm;
  59. #define DEBUG_TYPE "hwasan"
  60. const char kHwasanModuleCtorName[] = "hwasan.module_ctor";
  61. const char kHwasanNoteName[] = "hwasan.note";
  62. const char kHwasanInitName[] = "__hwasan_init";
  63. const char kHwasanPersonalityThunkName[] = "__hwasan_personality_thunk";
  64. const char kHwasanShadowMemoryDynamicAddress[] =
  65. "__hwasan_shadow_memory_dynamic_address";
  66. // Accesses sizes are powers of two: 1, 2, 4, 8, 16.
  67. static const size_t kNumberOfAccessSizes = 5;
  68. static const size_t kDefaultShadowScale = 4;
  69. static const uint64_t kDynamicShadowSentinel =
  70. std::numeric_limits<uint64_t>::max();
  71. static const unsigned kShadowBaseAlignment = 32;
  72. static cl::opt<std::string>
  73. ClMemoryAccessCallbackPrefix("hwasan-memory-access-callback-prefix",
  74. cl::desc("Prefix for memory access callbacks"),
  75. cl::Hidden, cl::init("__hwasan_"));
  76. static cl::opt<bool> ClInstrumentWithCalls(
  77. "hwasan-instrument-with-calls",
  78. cl::desc("instrument reads and writes with callbacks"), cl::Hidden,
  79. cl::init(false));
  80. static cl::opt<bool> ClInstrumentReads("hwasan-instrument-reads",
  81. cl::desc("instrument read instructions"),
  82. cl::Hidden, cl::init(true));
  83. static cl::opt<bool>
  84. ClInstrumentWrites("hwasan-instrument-writes",
  85. cl::desc("instrument write instructions"), cl::Hidden,
  86. cl::init(true));
  87. static cl::opt<bool> ClInstrumentAtomics(
  88. "hwasan-instrument-atomics",
  89. cl::desc("instrument atomic instructions (rmw, cmpxchg)"), cl::Hidden,
  90. cl::init(true));
  91. static cl::opt<bool> ClInstrumentByval("hwasan-instrument-byval",
  92. cl::desc("instrument byval arguments"),
  93. cl::Hidden, cl::init(true));
  94. static cl::opt<bool>
  95. ClRecover("hwasan-recover",
  96. cl::desc("Enable recovery mode (continue-after-error)."),
  97. cl::Hidden, cl::init(false));
  98. static cl::opt<bool> ClInstrumentStack("hwasan-instrument-stack",
  99. cl::desc("instrument stack (allocas)"),
  100. cl::Hidden, cl::init(true));
  101. static cl::opt<bool>
  102. ClUseStackSafety("hwasan-use-stack-safety", cl::Hidden, cl::init(true),
  103. cl::Hidden, cl::desc("Use Stack Safety analysis results"),
  104. cl::Optional);
  105. static cl::opt<size_t> ClMaxLifetimes(
  106. "hwasan-max-lifetimes-for-alloca", cl::Hidden, cl::init(3),
  107. cl::ReallyHidden,
  108. cl::desc("How many lifetime ends to handle for a single alloca."),
  109. cl::Optional);
  110. static cl::opt<bool>
  111. ClUseAfterScope("hwasan-use-after-scope",
  112. cl::desc("detect use after scope within function"),
  113. cl::Hidden, cl::init(false));
  114. static cl::opt<bool> ClUARRetagToZero(
  115. "hwasan-uar-retag-to-zero",
  116. cl::desc("Clear alloca tags before returning from the function to allow "
  117. "non-instrumented and instrumented function calls mix. When set "
  118. "to false, allocas are retagged before returning from the "
  119. "function to detect use after return."),
  120. cl::Hidden, cl::init(true));
  121. static cl::opt<bool> ClGenerateTagsWithCalls(
  122. "hwasan-generate-tags-with-calls",
  123. cl::desc("generate new tags with runtime library calls"), cl::Hidden,
  124. cl::init(false));
  125. static cl::opt<bool> ClGlobals("hwasan-globals", cl::desc("Instrument globals"),
  126. cl::Hidden, cl::init(false), cl::ZeroOrMore);
  127. static cl::opt<int> ClMatchAllTag(
  128. "hwasan-match-all-tag",
  129. cl::desc("don't report bad accesses via pointers with this tag"),
  130. cl::Hidden, cl::init(-1));
  131. static cl::opt<bool>
  132. ClEnableKhwasan("hwasan-kernel",
  133. cl::desc("Enable KernelHWAddressSanitizer instrumentation"),
  134. cl::Hidden, cl::init(false));
  135. // These flags allow to change the shadow mapping and control how shadow memory
  136. // is accessed. The shadow mapping looks like:
  137. // Shadow = (Mem >> scale) + offset
  138. static cl::opt<uint64_t>
  139. ClMappingOffset("hwasan-mapping-offset",
  140. cl::desc("HWASan shadow mapping offset [EXPERIMENTAL]"),
  141. cl::Hidden, cl::init(0));
  142. static cl::opt<bool>
  143. ClWithIfunc("hwasan-with-ifunc",
  144. cl::desc("Access dynamic shadow through an ifunc global on "
  145. "platforms that support this"),
  146. cl::Hidden, cl::init(false));
  147. static cl::opt<bool> ClWithTls(
  148. "hwasan-with-tls",
  149. cl::desc("Access dynamic shadow through an thread-local pointer on "
  150. "platforms that support this"),
  151. cl::Hidden, cl::init(true));
  152. static cl::opt<bool>
  153. ClRecordStackHistory("hwasan-record-stack-history",
  154. cl::desc("Record stack frames with tagged allocations "
  155. "in a thread-local ring buffer"),
  156. cl::Hidden, cl::init(true));
  157. static cl::opt<bool>
  158. ClInstrumentMemIntrinsics("hwasan-instrument-mem-intrinsics",
  159. cl::desc("instrument memory intrinsics"),
  160. cl::Hidden, cl::init(true));
  161. static cl::opt<bool>
  162. ClInstrumentLandingPads("hwasan-instrument-landing-pads",
  163. cl::desc("instrument landing pads"), cl::Hidden,
  164. cl::init(false), cl::ZeroOrMore);
  165. static cl::opt<bool> ClUseShortGranules(
  166. "hwasan-use-short-granules",
  167. cl::desc("use short granules in allocas and outlined checks"), cl::Hidden,
  168. cl::init(false), cl::ZeroOrMore);
  169. static cl::opt<bool> ClInstrumentPersonalityFunctions(
  170. "hwasan-instrument-personality-functions",
  171. cl::desc("instrument personality functions"), cl::Hidden, cl::init(false),
  172. cl::ZeroOrMore);
  173. static cl::opt<bool> ClInlineAllChecks("hwasan-inline-all-checks",
  174. cl::desc("inline all checks"),
  175. cl::Hidden, cl::init(false));
  176. // Enabled from clang by "-fsanitize-hwaddress-experimental-aliasing".
  177. static cl::opt<bool> ClUsePageAliases("hwasan-experimental-use-page-aliases",
  178. cl::desc("Use page aliasing in HWASan"),
  179. cl::Hidden, cl::init(false));
  180. namespace {
  181. bool shouldUsePageAliases(const Triple &TargetTriple) {
  182. return ClUsePageAliases && TargetTriple.getArch() == Triple::x86_64;
  183. }
  184. bool shouldInstrumentStack(const Triple &TargetTriple) {
  185. return !shouldUsePageAliases(TargetTriple) && ClInstrumentStack;
  186. }
  187. bool shouldInstrumentWithCalls(const Triple &TargetTriple) {
  188. return ClInstrumentWithCalls || TargetTriple.getArch() == Triple::x86_64;
  189. }
  190. bool mightUseStackSafetyAnalysis(bool DisableOptimization) {
  191. return ClUseStackSafety.getNumOccurrences() ? ClUseStackSafety
  192. : !DisableOptimization;
  193. }
  194. bool shouldUseStackSafetyAnalysis(const Triple &TargetTriple,
  195. bool DisableOptimization) {
  196. return shouldInstrumentStack(TargetTriple) &&
  197. mightUseStackSafetyAnalysis(DisableOptimization);
  198. }
  199. bool shouldDetectUseAfterScope(const Triple &TargetTriple) {
  200. return ClUseAfterScope && shouldInstrumentStack(TargetTriple);
  201. }
  202. /// An instrumentation pass implementing detection of addressability bugs
  203. /// using tagged pointers.
  204. class HWAddressSanitizer {
  205. private:
  206. struct AllocaInfo {
  207. AllocaInst *AI;
  208. SmallVector<IntrinsicInst *, 2> LifetimeStart;
  209. SmallVector<IntrinsicInst *, 2> LifetimeEnd;
  210. };
  211. public:
  212. HWAddressSanitizer(Module &M, bool CompileKernel, bool Recover,
  213. const StackSafetyGlobalInfo *SSI)
  214. : M(M), SSI(SSI) {
  215. this->Recover = ClRecover.getNumOccurrences() > 0 ? ClRecover : Recover;
  216. this->CompileKernel = ClEnableKhwasan.getNumOccurrences() > 0
  217. ? ClEnableKhwasan
  218. : CompileKernel;
  219. initializeModule();
  220. }
  221. void setSSI(const StackSafetyGlobalInfo *S) { SSI = S; }
  222. DenseMap<AllocaInst *, AllocaInst *> padInterestingAllocas(
  223. const MapVector<AllocaInst *, AllocaInfo> &AllocasToInstrument);
  224. bool sanitizeFunction(Function &F,
  225. llvm::function_ref<const DominatorTree &()> GetDT,
  226. llvm::function_ref<const PostDominatorTree &()> GetPDT);
  227. void initializeModule();
  228. void createHwasanCtorComdat();
  229. void initializeCallbacks(Module &M);
  230. Value *getOpaqueNoopCast(IRBuilder<> &IRB, Value *Val);
  231. Value *getDynamicShadowIfunc(IRBuilder<> &IRB);
  232. Value *getShadowNonTls(IRBuilder<> &IRB);
  233. void untagPointerOperand(Instruction *I, Value *Addr);
  234. Value *memToShadow(Value *Shadow, IRBuilder<> &IRB);
  235. int64_t getAccessInfo(bool IsWrite, unsigned AccessSizeIndex);
  236. void instrumentMemAccessOutline(Value *Ptr, bool IsWrite,
  237. unsigned AccessSizeIndex,
  238. Instruction *InsertBefore);
  239. void instrumentMemAccessInline(Value *Ptr, bool IsWrite,
  240. unsigned AccessSizeIndex,
  241. Instruction *InsertBefore);
  242. bool ignoreMemIntrinsic(MemIntrinsic *MI);
  243. void instrumentMemIntrinsic(MemIntrinsic *MI);
  244. bool instrumentMemAccess(InterestingMemoryOperand &O);
  245. bool ignoreAccess(Instruction *Inst, Value *Ptr);
  246. void getInterestingMemoryOperands(
  247. Instruction *I, SmallVectorImpl<InterestingMemoryOperand> &Interesting);
  248. bool isInterestingAlloca(const AllocaInst &AI);
  249. void tagAlloca(IRBuilder<> &IRB, AllocaInst *AI, Value *Tag, size_t Size);
  250. Value *tagPointer(IRBuilder<> &IRB, Type *Ty, Value *PtrLong, Value *Tag);
  251. Value *untagPointer(IRBuilder<> &IRB, Value *PtrLong);
  252. static bool isStandardLifetime(const AllocaInfo &AllocaInfo,
  253. const DominatorTree &DT);
  254. bool instrumentStack(
  255. bool ShouldDetectUseAfterScope,
  256. MapVector<AllocaInst *, AllocaInfo> &AllocasToInstrument,
  257. SmallVector<Instruction *, 4> &UnrecognizedLifetimes,
  258. DenseMap<AllocaInst *, std::vector<DbgVariableIntrinsic *>> &AllocaDbgMap,
  259. SmallVectorImpl<Instruction *> &RetVec, Value *StackTag,
  260. llvm::function_ref<const DominatorTree &()> GetDT,
  261. llvm::function_ref<const PostDominatorTree &()> GetPDT);
  262. Value *readRegister(IRBuilder<> &IRB, StringRef Name);
  263. bool instrumentLandingPads(SmallVectorImpl<Instruction *> &RetVec);
  264. Value *getNextTagWithCall(IRBuilder<> &IRB);
  265. Value *getStackBaseTag(IRBuilder<> &IRB);
  266. Value *getAllocaTag(IRBuilder<> &IRB, Value *StackTag, AllocaInst *AI,
  267. unsigned AllocaNo);
  268. Value *getUARTag(IRBuilder<> &IRB, Value *StackTag);
  269. Value *getHwasanThreadSlotPtr(IRBuilder<> &IRB, Type *Ty);
  270. Value *applyTagMask(IRBuilder<> &IRB, Value *OldTag);
  271. unsigned retagMask(unsigned AllocaNo);
  272. void emitPrologue(IRBuilder<> &IRB, bool WithFrameRecord);
  273. void instrumentGlobal(GlobalVariable *GV, uint8_t Tag);
  274. void instrumentGlobals();
  275. void instrumentPersonalityFunctions();
  276. private:
  277. LLVMContext *C;
  278. Module &M;
  279. const StackSafetyGlobalInfo *SSI;
  280. Triple TargetTriple;
  281. FunctionCallee HWAsanMemmove, HWAsanMemcpy, HWAsanMemset;
  282. FunctionCallee HWAsanHandleVfork;
  283. /// This struct defines the shadow mapping using the rule:
  284. /// shadow = (mem >> Scale) + Offset.
  285. /// If InGlobal is true, then
  286. /// extern char __hwasan_shadow[];
  287. /// shadow = (mem >> Scale) + &__hwasan_shadow
  288. /// If InTls is true, then
  289. /// extern char *__hwasan_tls;
  290. /// shadow = (mem>>Scale) + align_up(__hwasan_shadow, kShadowBaseAlignment)
  291. ///
  292. /// If WithFrameRecord is true, then __hwasan_tls will be used to access the
  293. /// ring buffer for storing stack allocations on targets that support it.
  294. struct ShadowMapping {
  295. int Scale;
  296. uint64_t Offset;
  297. bool InGlobal;
  298. bool InTls;
  299. bool WithFrameRecord;
  300. void init(Triple &TargetTriple, bool InstrumentWithCalls);
  301. uint64_t getObjectAlignment() const { return 1ULL << Scale; }
  302. };
  303. ShadowMapping Mapping;
  304. Type *VoidTy = Type::getVoidTy(M.getContext());
  305. Type *IntptrTy;
  306. Type *Int8PtrTy;
  307. Type *Int8Ty;
  308. Type *Int32Ty;
  309. Type *Int64Ty = Type::getInt64Ty(M.getContext());
  310. bool CompileKernel;
  311. bool Recover;
  312. bool OutlinedChecks;
  313. bool UseShortGranules;
  314. bool InstrumentLandingPads;
  315. bool InstrumentWithCalls;
  316. bool InstrumentStack;
  317. bool DetectUseAfterScope;
  318. bool UsePageAliases;
  319. bool HasMatchAllTag = false;
  320. uint8_t MatchAllTag = 0;
  321. unsigned PointerTagShift;
  322. uint64_t TagMaskByte;
  323. Function *HwasanCtorFunction;
  324. FunctionCallee HwasanMemoryAccessCallback[2][kNumberOfAccessSizes];
  325. FunctionCallee HwasanMemoryAccessCallbackSized[2];
  326. FunctionCallee HwasanTagMemoryFunc;
  327. FunctionCallee HwasanGenerateTagFunc;
  328. Constant *ShadowGlobal;
  329. Value *ShadowBase = nullptr;
  330. Value *StackBaseTag = nullptr;
  331. GlobalValue *ThreadPtrGlobal = nullptr;
  332. };
  333. class HWAddressSanitizerLegacyPass : public FunctionPass {
  334. public:
  335. // Pass identification, replacement for typeid.
  336. static char ID;
  337. explicit HWAddressSanitizerLegacyPass(bool CompileKernel = false,
  338. bool Recover = false,
  339. bool DisableOptimization = false)
  340. : FunctionPass(ID), CompileKernel(CompileKernel), Recover(Recover),
  341. DisableOptimization(DisableOptimization) {
  342. initializeHWAddressSanitizerLegacyPassPass(
  343. *PassRegistry::getPassRegistry());
  344. }
  345. StringRef getPassName() const override { return "HWAddressSanitizer"; }
  346. bool doInitialization(Module &M) override {
  347. HWASan = std::make_unique<HWAddressSanitizer>(M, CompileKernel, Recover,
  348. /*SSI=*/nullptr);
  349. return true;
  350. }
  351. bool runOnFunction(Function &F) override {
  352. auto TargetTriple = Triple(F.getParent()->getTargetTriple());
  353. if (shouldUseStackSafetyAnalysis(TargetTriple, DisableOptimization)) {
  354. // We cannot call getAnalysis in doInitialization, that would cause a
  355. // crash as the required analyses are not initialized yet.
  356. HWASan->setSSI(
  357. &getAnalysis<StackSafetyGlobalInfoWrapperPass>().getResult());
  358. }
  359. return HWASan->sanitizeFunction(
  360. F,
  361. [&]() -> const DominatorTree & {
  362. return getAnalysis<DominatorTreeWrapperPass>().getDomTree();
  363. },
  364. [&]() -> const PostDominatorTree & {
  365. return getAnalysis<PostDominatorTreeWrapperPass>().getPostDomTree();
  366. });
  367. }
  368. bool doFinalization(Module &M) override {
  369. HWASan.reset();
  370. return false;
  371. }
  372. void getAnalysisUsage(AnalysisUsage &AU) const override {
  373. // This is an over-estimation of, in case we are building for an
  374. // architecture that doesn't allow stack tagging we will still load the
  375. // analysis.
  376. // This is so we don't need to plumb TargetTriple all the way to here.
  377. if (mightUseStackSafetyAnalysis(DisableOptimization))
  378. AU.addRequired<StackSafetyGlobalInfoWrapperPass>();
  379. AU.addRequired<DominatorTreeWrapperPass>();
  380. AU.addRequired<PostDominatorTreeWrapperPass>();
  381. }
  382. private:
  383. std::unique_ptr<HWAddressSanitizer> HWASan;
  384. bool CompileKernel;
  385. bool Recover;
  386. bool DisableOptimization;
  387. };
  388. } // end anonymous namespace
  389. char HWAddressSanitizerLegacyPass::ID = 0;
  390. INITIALIZE_PASS_BEGIN(
  391. HWAddressSanitizerLegacyPass, "hwasan",
  392. "HWAddressSanitizer: detect memory bugs using tagged addressing.", false,
  393. false)
  394. INITIALIZE_PASS_DEPENDENCY(StackSafetyGlobalInfoWrapperPass)
  395. INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass)
  396. INITIALIZE_PASS_DEPENDENCY(PostDominatorTreeWrapperPass)
  397. INITIALIZE_PASS_END(
  398. HWAddressSanitizerLegacyPass, "hwasan",
  399. "HWAddressSanitizer: detect memory bugs using tagged addressing.", false,
  400. false)
  401. FunctionPass *
  402. llvm::createHWAddressSanitizerLegacyPassPass(bool CompileKernel, bool Recover,
  403. bool DisableOptimization) {
  404. assert(!CompileKernel || Recover);
  405. return new HWAddressSanitizerLegacyPass(CompileKernel, Recover,
  406. DisableOptimization);
  407. }
  408. PreservedAnalyses HWAddressSanitizerPass::run(Module &M,
  409. ModuleAnalysisManager &MAM) {
  410. const StackSafetyGlobalInfo *SSI = nullptr;
  411. auto TargetTriple = llvm::Triple(M.getTargetTriple());
  412. if (shouldUseStackSafetyAnalysis(TargetTriple, Options.DisableOptimization))
  413. SSI = &MAM.getResult<StackSafetyGlobalAnalysis>(M);
  414. HWAddressSanitizer HWASan(M, Options.CompileKernel, Options.Recover, SSI);
  415. bool Modified = false;
  416. auto &FAM = MAM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
  417. for (Function &F : M) {
  418. Modified |= HWASan.sanitizeFunction(
  419. F,
  420. [&]() -> const DominatorTree & {
  421. return FAM.getResult<DominatorTreeAnalysis>(F);
  422. },
  423. [&]() -> const PostDominatorTree & {
  424. return FAM.getResult<PostDominatorTreeAnalysis>(F);
  425. });
  426. }
  427. if (Modified)
  428. return PreservedAnalyses::none();
  429. return PreservedAnalyses::all();
  430. }
  431. void HWAddressSanitizerPass::printPipeline(
  432. raw_ostream &OS, function_ref<StringRef(StringRef)> MapClassName2PassName) {
  433. static_cast<PassInfoMixin<HWAddressSanitizerPass> *>(this)->printPipeline(
  434. OS, MapClassName2PassName);
  435. OS << "<";
  436. if (Options.CompileKernel)
  437. OS << "kernel;";
  438. if (Options.Recover)
  439. OS << "recover";
  440. OS << ">";
  441. }
  442. void HWAddressSanitizer::createHwasanCtorComdat() {
  443. std::tie(HwasanCtorFunction, std::ignore) =
  444. getOrCreateSanitizerCtorAndInitFunctions(
  445. M, kHwasanModuleCtorName, kHwasanInitName,
  446. /*InitArgTypes=*/{},
  447. /*InitArgs=*/{},
  448. // This callback is invoked when the functions are created the first
  449. // time. Hook them into the global ctors list in that case:
  450. [&](Function *Ctor, FunctionCallee) {
  451. Comdat *CtorComdat = M.getOrInsertComdat(kHwasanModuleCtorName);
  452. Ctor->setComdat(CtorComdat);
  453. appendToGlobalCtors(M, Ctor, 0, Ctor);
  454. });
  455. // Create a note that contains pointers to the list of global
  456. // descriptors. Adding a note to the output file will cause the linker to
  457. // create a PT_NOTE program header pointing to the note that we can use to
  458. // find the descriptor list starting from the program headers. A function
  459. // provided by the runtime initializes the shadow memory for the globals by
  460. // accessing the descriptor list via the note. The dynamic loader needs to
  461. // call this function whenever a library is loaded.
  462. //
  463. // The reason why we use a note for this instead of a more conventional
  464. // approach of having a global constructor pass a descriptor list pointer to
  465. // the runtime is because of an order of initialization problem. With
  466. // constructors we can encounter the following problematic scenario:
  467. //
  468. // 1) library A depends on library B and also interposes one of B's symbols
  469. // 2) B's constructors are called before A's (as required for correctness)
  470. // 3) during construction, B accesses one of its "own" globals (actually
  471. // interposed by A) and triggers a HWASAN failure due to the initialization
  472. // for A not having happened yet
  473. //
  474. // Even without interposition it is possible to run into similar situations in
  475. // cases where two libraries mutually depend on each other.
  476. //
  477. // We only need one note per binary, so put everything for the note in a
  478. // comdat. This needs to be a comdat with an .init_array section to prevent
  479. // newer versions of lld from discarding the note.
  480. //
  481. // Create the note even if we aren't instrumenting globals. This ensures that
  482. // binaries linked from object files with both instrumented and
  483. // non-instrumented globals will end up with a note, even if a comdat from an
  484. // object file with non-instrumented globals is selected. The note is harmless
  485. // if the runtime doesn't support it, since it will just be ignored.
  486. Comdat *NoteComdat = M.getOrInsertComdat(kHwasanModuleCtorName);
  487. Type *Int8Arr0Ty = ArrayType::get(Int8Ty, 0);
  488. auto Start =
  489. new GlobalVariable(M, Int8Arr0Ty, true, GlobalVariable::ExternalLinkage,
  490. nullptr, "__start_hwasan_globals");
  491. Start->setVisibility(GlobalValue::HiddenVisibility);
  492. Start->setDSOLocal(true);
  493. auto Stop =
  494. new GlobalVariable(M, Int8Arr0Ty, true, GlobalVariable::ExternalLinkage,
  495. nullptr, "__stop_hwasan_globals");
  496. Stop->setVisibility(GlobalValue::HiddenVisibility);
  497. Stop->setDSOLocal(true);
  498. // Null-terminated so actually 8 bytes, which are required in order to align
  499. // the note properly.
  500. auto *Name = ConstantDataArray::get(*C, "LLVM\0\0\0");
  501. auto *NoteTy = StructType::get(Int32Ty, Int32Ty, Int32Ty, Name->getType(),
  502. Int32Ty, Int32Ty);
  503. auto *Note =
  504. new GlobalVariable(M, NoteTy, /*isConstant=*/true,
  505. GlobalValue::PrivateLinkage, nullptr, kHwasanNoteName);
  506. Note->setSection(".note.hwasan.globals");
  507. Note->setComdat(NoteComdat);
  508. Note->setAlignment(Align(4));
  509. Note->setDSOLocal(true);
  510. // The pointers in the note need to be relative so that the note ends up being
  511. // placed in rodata, which is the standard location for notes.
  512. auto CreateRelPtr = [&](Constant *Ptr) {
  513. return ConstantExpr::getTrunc(
  514. ConstantExpr::getSub(ConstantExpr::getPtrToInt(Ptr, Int64Ty),
  515. ConstantExpr::getPtrToInt(Note, Int64Ty)),
  516. Int32Ty);
  517. };
  518. Note->setInitializer(ConstantStruct::getAnon(
  519. {ConstantInt::get(Int32Ty, 8), // n_namesz
  520. ConstantInt::get(Int32Ty, 8), // n_descsz
  521. ConstantInt::get(Int32Ty, ELF::NT_LLVM_HWASAN_GLOBALS), // n_type
  522. Name, CreateRelPtr(Start), CreateRelPtr(Stop)}));
  523. appendToCompilerUsed(M, Note);
  524. // Create a zero-length global in hwasan_globals so that the linker will
  525. // always create start and stop symbols.
  526. auto Dummy = new GlobalVariable(
  527. M, Int8Arr0Ty, /*isConstantGlobal*/ true, GlobalVariable::PrivateLinkage,
  528. Constant::getNullValue(Int8Arr0Ty), "hwasan.dummy.global");
  529. Dummy->setSection("hwasan_globals");
  530. Dummy->setComdat(NoteComdat);
  531. Dummy->setMetadata(LLVMContext::MD_associated,
  532. MDNode::get(*C, ValueAsMetadata::get(Note)));
  533. appendToCompilerUsed(M, Dummy);
  534. }
  535. /// Module-level initialization.
  536. ///
  537. /// inserts a call to __hwasan_init to the module's constructor list.
  538. void HWAddressSanitizer::initializeModule() {
  539. LLVM_DEBUG(dbgs() << "Init " << M.getName() << "\n");
  540. auto &DL = M.getDataLayout();
  541. TargetTriple = Triple(M.getTargetTriple());
  542. // x86_64 currently has two modes:
  543. // - Intel LAM (default)
  544. // - pointer aliasing (heap only)
  545. bool IsX86_64 = TargetTriple.getArch() == Triple::x86_64;
  546. UsePageAliases = shouldUsePageAliases(TargetTriple);
  547. InstrumentWithCalls = shouldInstrumentWithCalls(TargetTriple);
  548. InstrumentStack = shouldInstrumentStack(TargetTriple);
  549. DetectUseAfterScope = shouldDetectUseAfterScope(TargetTriple);
  550. PointerTagShift = IsX86_64 ? 57 : 56;
  551. TagMaskByte = IsX86_64 ? 0x3F : 0xFF;
  552. Mapping.init(TargetTriple, InstrumentWithCalls);
  553. C = &(M.getContext());
  554. IRBuilder<> IRB(*C);
  555. IntptrTy = IRB.getIntPtrTy(DL);
  556. Int8PtrTy = IRB.getInt8PtrTy();
  557. Int8Ty = IRB.getInt8Ty();
  558. Int32Ty = IRB.getInt32Ty();
  559. HwasanCtorFunction = nullptr;
  560. // Older versions of Android do not have the required runtime support for
  561. // short granules, global or personality function instrumentation. On other
  562. // platforms we currently require using the latest version of the runtime.
  563. bool NewRuntime =
  564. !TargetTriple.isAndroid() || !TargetTriple.isAndroidVersionLT(30);
  565. UseShortGranules =
  566. ClUseShortGranules.getNumOccurrences() ? ClUseShortGranules : NewRuntime;
  567. OutlinedChecks =
  568. TargetTriple.isAArch64() && TargetTriple.isOSBinFormatELF() &&
  569. (ClInlineAllChecks.getNumOccurrences() ? !ClInlineAllChecks : !Recover);
  570. if (ClMatchAllTag.getNumOccurrences()) {
  571. if (ClMatchAllTag != -1) {
  572. HasMatchAllTag = true;
  573. MatchAllTag = ClMatchAllTag & 0xFF;
  574. }
  575. } else if (CompileKernel) {
  576. HasMatchAllTag = true;
  577. MatchAllTag = 0xFF;
  578. }
  579. // If we don't have personality function support, fall back to landing pads.
  580. InstrumentLandingPads = ClInstrumentLandingPads.getNumOccurrences()
  581. ? ClInstrumentLandingPads
  582. : !NewRuntime;
  583. if (!CompileKernel) {
  584. createHwasanCtorComdat();
  585. bool InstrumentGlobals =
  586. ClGlobals.getNumOccurrences() ? ClGlobals : NewRuntime;
  587. if (InstrumentGlobals && !UsePageAliases)
  588. instrumentGlobals();
  589. bool InstrumentPersonalityFunctions =
  590. ClInstrumentPersonalityFunctions.getNumOccurrences()
  591. ? ClInstrumentPersonalityFunctions
  592. : NewRuntime;
  593. if (InstrumentPersonalityFunctions)
  594. instrumentPersonalityFunctions();
  595. }
  596. if (!TargetTriple.isAndroid()) {
  597. Constant *C = M.getOrInsertGlobal("__hwasan_tls", IntptrTy, [&] {
  598. auto *GV = new GlobalVariable(M, IntptrTy, /*isConstant=*/false,
  599. GlobalValue::ExternalLinkage, nullptr,
  600. "__hwasan_tls", nullptr,
  601. GlobalVariable::InitialExecTLSModel);
  602. appendToCompilerUsed(M, GV);
  603. return GV;
  604. });
  605. ThreadPtrGlobal = cast<GlobalVariable>(C);
  606. }
  607. }
  608. void HWAddressSanitizer::initializeCallbacks(Module &M) {
  609. IRBuilder<> IRB(*C);
  610. for (size_t AccessIsWrite = 0; AccessIsWrite <= 1; AccessIsWrite++) {
  611. const std::string TypeStr = AccessIsWrite ? "store" : "load";
  612. const std::string EndingStr = Recover ? "_noabort" : "";
  613. HwasanMemoryAccessCallbackSized[AccessIsWrite] = M.getOrInsertFunction(
  614. ClMemoryAccessCallbackPrefix + TypeStr + "N" + EndingStr,
  615. FunctionType::get(IRB.getVoidTy(), {IntptrTy, IntptrTy}, false));
  616. for (size_t AccessSizeIndex = 0; AccessSizeIndex < kNumberOfAccessSizes;
  617. AccessSizeIndex++) {
  618. HwasanMemoryAccessCallback[AccessIsWrite][AccessSizeIndex] =
  619. M.getOrInsertFunction(
  620. ClMemoryAccessCallbackPrefix + TypeStr +
  621. itostr(1ULL << AccessSizeIndex) + EndingStr,
  622. FunctionType::get(IRB.getVoidTy(), {IntptrTy}, false));
  623. }
  624. }
  625. HwasanTagMemoryFunc = M.getOrInsertFunction(
  626. "__hwasan_tag_memory", IRB.getVoidTy(), Int8PtrTy, Int8Ty, IntptrTy);
  627. HwasanGenerateTagFunc =
  628. M.getOrInsertFunction("__hwasan_generate_tag", Int8Ty);
  629. ShadowGlobal = M.getOrInsertGlobal("__hwasan_shadow",
  630. ArrayType::get(IRB.getInt8Ty(), 0));
  631. const std::string MemIntrinCallbackPrefix =
  632. CompileKernel ? std::string("") : ClMemoryAccessCallbackPrefix;
  633. HWAsanMemmove = M.getOrInsertFunction(MemIntrinCallbackPrefix + "memmove",
  634. IRB.getInt8PtrTy(), IRB.getInt8PtrTy(),
  635. IRB.getInt8PtrTy(), IntptrTy);
  636. HWAsanMemcpy = M.getOrInsertFunction(MemIntrinCallbackPrefix + "memcpy",
  637. IRB.getInt8PtrTy(), IRB.getInt8PtrTy(),
  638. IRB.getInt8PtrTy(), IntptrTy);
  639. HWAsanMemset = M.getOrInsertFunction(MemIntrinCallbackPrefix + "memset",
  640. IRB.getInt8PtrTy(), IRB.getInt8PtrTy(),
  641. IRB.getInt32Ty(), IntptrTy);
  642. HWAsanHandleVfork =
  643. M.getOrInsertFunction("__hwasan_handle_vfork", IRB.getVoidTy(), IntptrTy);
  644. }
  645. Value *HWAddressSanitizer::getOpaqueNoopCast(IRBuilder<> &IRB, Value *Val) {
  646. // An empty inline asm with input reg == output reg.
  647. // An opaque no-op cast, basically.
  648. // This prevents code bloat as a result of rematerializing trivial definitions
  649. // such as constants or global addresses at every load and store.
  650. InlineAsm *Asm =
  651. InlineAsm::get(FunctionType::get(Int8PtrTy, {Val->getType()}, false),
  652. StringRef(""), StringRef("=r,0"),
  653. /*hasSideEffects=*/false);
  654. return IRB.CreateCall(Asm, {Val}, ".hwasan.shadow");
  655. }
  656. Value *HWAddressSanitizer::getDynamicShadowIfunc(IRBuilder<> &IRB) {
  657. return getOpaqueNoopCast(IRB, ShadowGlobal);
  658. }
  659. Value *HWAddressSanitizer::getShadowNonTls(IRBuilder<> &IRB) {
  660. if (Mapping.Offset != kDynamicShadowSentinel)
  661. return getOpaqueNoopCast(
  662. IRB, ConstantExpr::getIntToPtr(
  663. ConstantInt::get(IntptrTy, Mapping.Offset), Int8PtrTy));
  664. if (Mapping.InGlobal) {
  665. return getDynamicShadowIfunc(IRB);
  666. } else {
  667. Value *GlobalDynamicAddress =
  668. IRB.GetInsertBlock()->getParent()->getParent()->getOrInsertGlobal(
  669. kHwasanShadowMemoryDynamicAddress, Int8PtrTy);
  670. return IRB.CreateLoad(Int8PtrTy, GlobalDynamicAddress);
  671. }
  672. }
  673. bool HWAddressSanitizer::ignoreAccess(Instruction *Inst, Value *Ptr) {
  674. // Do not instrument acesses from different address spaces; we cannot deal
  675. // with them.
  676. Type *PtrTy = cast<PointerType>(Ptr->getType()->getScalarType());
  677. if (PtrTy->getPointerAddressSpace() != 0)
  678. return true;
  679. // Ignore swifterror addresses.
  680. // swifterror memory addresses are mem2reg promoted by instruction
  681. // selection. As such they cannot have regular uses like an instrumentation
  682. // function and it makes no sense to track them as memory.
  683. if (Ptr->isSwiftError())
  684. return true;
  685. if (findAllocaForValue(Ptr)) {
  686. if (!InstrumentStack)
  687. return true;
  688. if (SSI && SSI->stackAccessIsSafe(*Inst))
  689. return true;
  690. }
  691. return false;
  692. }
  693. void HWAddressSanitizer::getInterestingMemoryOperands(
  694. Instruction *I, SmallVectorImpl<InterestingMemoryOperand> &Interesting) {
  695. // Skip memory accesses inserted by another instrumentation.
  696. if (I->hasMetadata("nosanitize"))
  697. return;
  698. // Do not instrument the load fetching the dynamic shadow address.
  699. if (ShadowBase == I)
  700. return;
  701. if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
  702. if (!ClInstrumentReads || ignoreAccess(I, LI->getPointerOperand()))
  703. return;
  704. Interesting.emplace_back(I, LI->getPointerOperandIndex(), false,
  705. LI->getType(), LI->getAlign());
  706. } else if (StoreInst *SI = dyn_cast<StoreInst>(I)) {
  707. if (!ClInstrumentWrites || ignoreAccess(I, SI->getPointerOperand()))
  708. return;
  709. Interesting.emplace_back(I, SI->getPointerOperandIndex(), true,
  710. SI->getValueOperand()->getType(), SI->getAlign());
  711. } else if (AtomicRMWInst *RMW = dyn_cast<AtomicRMWInst>(I)) {
  712. if (!ClInstrumentAtomics || ignoreAccess(I, RMW->getPointerOperand()))
  713. return;
  714. Interesting.emplace_back(I, RMW->getPointerOperandIndex(), true,
  715. RMW->getValOperand()->getType(), None);
  716. } else if (AtomicCmpXchgInst *XCHG = dyn_cast<AtomicCmpXchgInst>(I)) {
  717. if (!ClInstrumentAtomics || ignoreAccess(I, XCHG->getPointerOperand()))
  718. return;
  719. Interesting.emplace_back(I, XCHG->getPointerOperandIndex(), true,
  720. XCHG->getCompareOperand()->getType(), None);
  721. } else if (auto CI = dyn_cast<CallInst>(I)) {
  722. for (unsigned ArgNo = 0; ArgNo < CI->arg_size(); ArgNo++) {
  723. if (!ClInstrumentByval || !CI->isByValArgument(ArgNo) ||
  724. ignoreAccess(I, CI->getArgOperand(ArgNo)))
  725. continue;
  726. Type *Ty = CI->getParamByValType(ArgNo);
  727. Interesting.emplace_back(I, ArgNo, false, Ty, Align(1));
  728. }
  729. }
  730. }
  731. static unsigned getPointerOperandIndex(Instruction *I) {
  732. if (LoadInst *LI = dyn_cast<LoadInst>(I))
  733. return LI->getPointerOperandIndex();
  734. if (StoreInst *SI = dyn_cast<StoreInst>(I))
  735. return SI->getPointerOperandIndex();
  736. if (AtomicRMWInst *RMW = dyn_cast<AtomicRMWInst>(I))
  737. return RMW->getPointerOperandIndex();
  738. if (AtomicCmpXchgInst *XCHG = dyn_cast<AtomicCmpXchgInst>(I))
  739. return XCHG->getPointerOperandIndex();
  740. report_fatal_error("Unexpected instruction");
  741. return -1;
  742. }
  743. static size_t TypeSizeToSizeIndex(uint32_t TypeSize) {
  744. size_t Res = countTrailingZeros(TypeSize / 8);
  745. assert(Res < kNumberOfAccessSizes);
  746. return Res;
  747. }
  748. void HWAddressSanitizer::untagPointerOperand(Instruction *I, Value *Addr) {
  749. if (TargetTriple.isAArch64() || TargetTriple.getArch() == Triple::x86_64)
  750. return;
  751. IRBuilder<> IRB(I);
  752. Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy);
  753. Value *UntaggedPtr =
  754. IRB.CreateIntToPtr(untagPointer(IRB, AddrLong), Addr->getType());
  755. I->setOperand(getPointerOperandIndex(I), UntaggedPtr);
  756. }
  757. Value *HWAddressSanitizer::memToShadow(Value *Mem, IRBuilder<> &IRB) {
  758. // Mem >> Scale
  759. Value *Shadow = IRB.CreateLShr(Mem, Mapping.Scale);
  760. if (Mapping.Offset == 0)
  761. return IRB.CreateIntToPtr(Shadow, Int8PtrTy);
  762. // (Mem >> Scale) + Offset
  763. return IRB.CreateGEP(Int8Ty, ShadowBase, Shadow);
  764. }
  765. int64_t HWAddressSanitizer::getAccessInfo(bool IsWrite,
  766. unsigned AccessSizeIndex) {
  767. return (CompileKernel << HWASanAccessInfo::CompileKernelShift) +
  768. (HasMatchAllTag << HWASanAccessInfo::HasMatchAllShift) +
  769. (MatchAllTag << HWASanAccessInfo::MatchAllShift) +
  770. (Recover << HWASanAccessInfo::RecoverShift) +
  771. (IsWrite << HWASanAccessInfo::IsWriteShift) +
  772. (AccessSizeIndex << HWASanAccessInfo::AccessSizeShift);
  773. }
  774. void HWAddressSanitizer::instrumentMemAccessOutline(Value *Ptr, bool IsWrite,
  775. unsigned AccessSizeIndex,
  776. Instruction *InsertBefore) {
  777. assert(!UsePageAliases);
  778. const int64_t AccessInfo = getAccessInfo(IsWrite, AccessSizeIndex);
  779. IRBuilder<> IRB(InsertBefore);
  780. Module *M = IRB.GetInsertBlock()->getParent()->getParent();
  781. Ptr = IRB.CreateBitCast(Ptr, Int8PtrTy);
  782. IRB.CreateCall(Intrinsic::getDeclaration(
  783. M, UseShortGranules
  784. ? Intrinsic::hwasan_check_memaccess_shortgranules
  785. : Intrinsic::hwasan_check_memaccess),
  786. {ShadowBase, Ptr, ConstantInt::get(Int32Ty, AccessInfo)});
  787. }
  788. void HWAddressSanitizer::instrumentMemAccessInline(Value *Ptr, bool IsWrite,
  789. unsigned AccessSizeIndex,
  790. Instruction *InsertBefore) {
  791. assert(!UsePageAliases);
  792. const int64_t AccessInfo = getAccessInfo(IsWrite, AccessSizeIndex);
  793. IRBuilder<> IRB(InsertBefore);
  794. Value *PtrLong = IRB.CreatePointerCast(Ptr, IntptrTy);
  795. Value *PtrTag = IRB.CreateTrunc(IRB.CreateLShr(PtrLong, PointerTagShift),
  796. IRB.getInt8Ty());
  797. Value *AddrLong = untagPointer(IRB, PtrLong);
  798. Value *Shadow = memToShadow(AddrLong, IRB);
  799. Value *MemTag = IRB.CreateLoad(Int8Ty, Shadow);
  800. Value *TagMismatch = IRB.CreateICmpNE(PtrTag, MemTag);
  801. if (HasMatchAllTag) {
  802. Value *TagNotIgnored = IRB.CreateICmpNE(
  803. PtrTag, ConstantInt::get(PtrTag->getType(), MatchAllTag));
  804. TagMismatch = IRB.CreateAnd(TagMismatch, TagNotIgnored);
  805. }
  806. Instruction *CheckTerm =
  807. SplitBlockAndInsertIfThen(TagMismatch, InsertBefore, false,
  808. MDBuilder(*C).createBranchWeights(1, 100000));
  809. IRB.SetInsertPoint(CheckTerm);
  810. Value *OutOfShortGranuleTagRange =
  811. IRB.CreateICmpUGT(MemTag, ConstantInt::get(Int8Ty, 15));
  812. Instruction *CheckFailTerm =
  813. SplitBlockAndInsertIfThen(OutOfShortGranuleTagRange, CheckTerm, !Recover,
  814. MDBuilder(*C).createBranchWeights(1, 100000));
  815. IRB.SetInsertPoint(CheckTerm);
  816. Value *PtrLowBits = IRB.CreateTrunc(IRB.CreateAnd(PtrLong, 15), Int8Ty);
  817. PtrLowBits = IRB.CreateAdd(
  818. PtrLowBits, ConstantInt::get(Int8Ty, (1 << AccessSizeIndex) - 1));
  819. Value *PtrLowBitsOOB = IRB.CreateICmpUGE(PtrLowBits, MemTag);
  820. SplitBlockAndInsertIfThen(PtrLowBitsOOB, CheckTerm, false,
  821. MDBuilder(*C).createBranchWeights(1, 100000),
  822. (DomTreeUpdater *)nullptr, nullptr,
  823. CheckFailTerm->getParent());
  824. IRB.SetInsertPoint(CheckTerm);
  825. Value *InlineTagAddr = IRB.CreateOr(AddrLong, 15);
  826. InlineTagAddr = IRB.CreateIntToPtr(InlineTagAddr, Int8PtrTy);
  827. Value *InlineTag = IRB.CreateLoad(Int8Ty, InlineTagAddr);
  828. Value *InlineTagMismatch = IRB.CreateICmpNE(PtrTag, InlineTag);
  829. SplitBlockAndInsertIfThen(InlineTagMismatch, CheckTerm, false,
  830. MDBuilder(*C).createBranchWeights(1, 100000),
  831. (DomTreeUpdater *)nullptr, nullptr,
  832. CheckFailTerm->getParent());
  833. IRB.SetInsertPoint(CheckFailTerm);
  834. InlineAsm *Asm;
  835. switch (TargetTriple.getArch()) {
  836. case Triple::x86_64:
  837. // The signal handler will find the data address in rdi.
  838. Asm = InlineAsm::get(
  839. FunctionType::get(IRB.getVoidTy(), {PtrLong->getType()}, false),
  840. "int3\nnopl " +
  841. itostr(0x40 + (AccessInfo & HWASanAccessInfo::RuntimeMask)) +
  842. "(%rax)",
  843. "{rdi}",
  844. /*hasSideEffects=*/true);
  845. break;
  846. case Triple::aarch64:
  847. case Triple::aarch64_be:
  848. // The signal handler will find the data address in x0.
  849. Asm = InlineAsm::get(
  850. FunctionType::get(IRB.getVoidTy(), {PtrLong->getType()}, false),
  851. "brk #" + itostr(0x900 + (AccessInfo & HWASanAccessInfo::RuntimeMask)),
  852. "{x0}",
  853. /*hasSideEffects=*/true);
  854. break;
  855. default:
  856. report_fatal_error("unsupported architecture");
  857. }
  858. IRB.CreateCall(Asm, PtrLong);
  859. if (Recover)
  860. cast<BranchInst>(CheckFailTerm)->setSuccessor(0, CheckTerm->getParent());
  861. }
  862. bool HWAddressSanitizer::ignoreMemIntrinsic(MemIntrinsic *MI) {
  863. if (MemTransferInst *MTI = dyn_cast<MemTransferInst>(MI)) {
  864. return (!ClInstrumentWrites || ignoreAccess(MTI, MTI->getDest())) &&
  865. (!ClInstrumentReads || ignoreAccess(MTI, MTI->getSource()));
  866. }
  867. if (isa<MemSetInst>(MI))
  868. return !ClInstrumentWrites || ignoreAccess(MI, MI->getDest());
  869. return false;
  870. }
  871. void HWAddressSanitizer::instrumentMemIntrinsic(MemIntrinsic *MI) {
  872. IRBuilder<> IRB(MI);
  873. if (isa<MemTransferInst>(MI)) {
  874. IRB.CreateCall(
  875. isa<MemMoveInst>(MI) ? HWAsanMemmove : HWAsanMemcpy,
  876. {IRB.CreatePointerCast(MI->getOperand(0), IRB.getInt8PtrTy()),
  877. IRB.CreatePointerCast(MI->getOperand(1), IRB.getInt8PtrTy()),
  878. IRB.CreateIntCast(MI->getOperand(2), IntptrTy, false)});
  879. } else if (isa<MemSetInst>(MI)) {
  880. IRB.CreateCall(
  881. HWAsanMemset,
  882. {IRB.CreatePointerCast(MI->getOperand(0), IRB.getInt8PtrTy()),
  883. IRB.CreateIntCast(MI->getOperand(1), IRB.getInt32Ty(), false),
  884. IRB.CreateIntCast(MI->getOperand(2), IntptrTy, false)});
  885. }
  886. MI->eraseFromParent();
  887. }
  888. bool HWAddressSanitizer::instrumentMemAccess(InterestingMemoryOperand &O) {
  889. Value *Addr = O.getPtr();
  890. LLVM_DEBUG(dbgs() << "Instrumenting: " << O.getInsn() << "\n");
  891. if (O.MaybeMask)
  892. return false; // FIXME
  893. IRBuilder<> IRB(O.getInsn());
  894. if (isPowerOf2_64(O.TypeSize) &&
  895. (O.TypeSize / 8 <= (1ULL << (kNumberOfAccessSizes - 1))) &&
  896. (!O.Alignment || *O.Alignment >= (1ULL << Mapping.Scale) ||
  897. *O.Alignment >= O.TypeSize / 8)) {
  898. size_t AccessSizeIndex = TypeSizeToSizeIndex(O.TypeSize);
  899. if (InstrumentWithCalls) {
  900. IRB.CreateCall(HwasanMemoryAccessCallback[O.IsWrite][AccessSizeIndex],
  901. IRB.CreatePointerCast(Addr, IntptrTy));
  902. } else if (OutlinedChecks) {
  903. instrumentMemAccessOutline(Addr, O.IsWrite, AccessSizeIndex, O.getInsn());
  904. } else {
  905. instrumentMemAccessInline(Addr, O.IsWrite, AccessSizeIndex, O.getInsn());
  906. }
  907. } else {
  908. IRB.CreateCall(HwasanMemoryAccessCallbackSized[O.IsWrite],
  909. {IRB.CreatePointerCast(Addr, IntptrTy),
  910. ConstantInt::get(IntptrTy, O.TypeSize / 8)});
  911. }
  912. untagPointerOperand(O.getInsn(), Addr);
  913. return true;
  914. }
  915. static uint64_t getAllocaSizeInBytes(const AllocaInst &AI) {
  916. uint64_t ArraySize = 1;
  917. if (AI.isArrayAllocation()) {
  918. const ConstantInt *CI = dyn_cast<ConstantInt>(AI.getArraySize());
  919. assert(CI && "non-constant array size");
  920. ArraySize = CI->getZExtValue();
  921. }
  922. Type *Ty = AI.getAllocatedType();
  923. uint64_t SizeInBytes = AI.getModule()->getDataLayout().getTypeAllocSize(Ty);
  924. return SizeInBytes * ArraySize;
  925. }
  926. void HWAddressSanitizer::tagAlloca(IRBuilder<> &IRB, AllocaInst *AI, Value *Tag,
  927. size_t Size) {
  928. size_t AlignedSize = alignTo(Size, Mapping.getObjectAlignment());
  929. if (!UseShortGranules)
  930. Size = AlignedSize;
  931. Value *JustTag = IRB.CreateTrunc(Tag, IRB.getInt8Ty());
  932. if (InstrumentWithCalls) {
  933. IRB.CreateCall(HwasanTagMemoryFunc,
  934. {IRB.CreatePointerCast(AI, Int8PtrTy), JustTag,
  935. ConstantInt::get(IntptrTy, AlignedSize)});
  936. } else {
  937. size_t ShadowSize = Size >> Mapping.Scale;
  938. Value *ShadowPtr = memToShadow(IRB.CreatePointerCast(AI, IntptrTy), IRB);
  939. // If this memset is not inlined, it will be intercepted in the hwasan
  940. // runtime library. That's OK, because the interceptor skips the checks if
  941. // the address is in the shadow region.
  942. // FIXME: the interceptor is not as fast as real memset. Consider lowering
  943. // llvm.memset right here into either a sequence of stores, or a call to
  944. // hwasan_tag_memory.
  945. if (ShadowSize)
  946. IRB.CreateMemSet(ShadowPtr, JustTag, ShadowSize, Align(1));
  947. if (Size != AlignedSize) {
  948. IRB.CreateStore(
  949. ConstantInt::get(Int8Ty, Size % Mapping.getObjectAlignment()),
  950. IRB.CreateConstGEP1_32(Int8Ty, ShadowPtr, ShadowSize));
  951. IRB.CreateStore(JustTag, IRB.CreateConstGEP1_32(
  952. Int8Ty, IRB.CreateBitCast(AI, Int8PtrTy),
  953. AlignedSize - 1));
  954. }
  955. }
  956. }
  957. unsigned HWAddressSanitizer::retagMask(unsigned AllocaNo) {
  958. if (TargetTriple.getArch() == Triple::x86_64)
  959. return AllocaNo & TagMaskByte;
  960. // A list of 8-bit numbers that have at most one run of non-zero bits.
  961. // x = x ^ (mask << 56) can be encoded as a single armv8 instruction for these
  962. // masks.
  963. // The list does not include the value 255, which is used for UAR.
  964. //
  965. // Because we are more likely to use earlier elements of this list than later
  966. // ones, it is sorted in increasing order of probability of collision with a
  967. // mask allocated (temporally) nearby. The program that generated this list
  968. // can be found at:
  969. // https://github.com/google/sanitizers/blob/master/hwaddress-sanitizer/sort_masks.py
  970. static unsigned FastMasks[] = {0, 128, 64, 192, 32, 96, 224, 112, 240,
  971. 48, 16, 120, 248, 56, 24, 8, 124, 252,
  972. 60, 28, 12, 4, 126, 254, 62, 30, 14,
  973. 6, 2, 127, 63, 31, 15, 7, 3, 1};
  974. return FastMasks[AllocaNo % (sizeof(FastMasks) / sizeof(FastMasks[0]))];
  975. }
  976. Value *HWAddressSanitizer::applyTagMask(IRBuilder<> &IRB, Value *OldTag) {
  977. if (TargetTriple.getArch() == Triple::x86_64) {
  978. Constant *TagMask = ConstantInt::get(IntptrTy, TagMaskByte);
  979. Value *NewTag = IRB.CreateAnd(OldTag, TagMask);
  980. return NewTag;
  981. }
  982. // aarch64 uses 8-bit tags, so no mask is needed.
  983. return OldTag;
  984. }
  985. Value *HWAddressSanitizer::getNextTagWithCall(IRBuilder<> &IRB) {
  986. return IRB.CreateZExt(IRB.CreateCall(HwasanGenerateTagFunc), IntptrTy);
  987. }
  988. Value *HWAddressSanitizer::getStackBaseTag(IRBuilder<> &IRB) {
  989. if (ClGenerateTagsWithCalls)
  990. return getNextTagWithCall(IRB);
  991. if (StackBaseTag)
  992. return StackBaseTag;
  993. // FIXME: use addressofreturnaddress (but implement it in aarch64 backend
  994. // first).
  995. Module *M = IRB.GetInsertBlock()->getParent()->getParent();
  996. auto GetStackPointerFn = Intrinsic::getDeclaration(
  997. M, Intrinsic::frameaddress,
  998. IRB.getInt8PtrTy(M->getDataLayout().getAllocaAddrSpace()));
  999. Value *StackPointer = IRB.CreateCall(
  1000. GetStackPointerFn, {Constant::getNullValue(IRB.getInt32Ty())});
  1001. // Extract some entropy from the stack pointer for the tags.
  1002. // Take bits 20..28 (ASLR entropy) and xor with bits 0..8 (these differ
  1003. // between functions).
  1004. Value *StackPointerLong = IRB.CreatePointerCast(StackPointer, IntptrTy);
  1005. Value *StackTag =
  1006. applyTagMask(IRB, IRB.CreateXor(StackPointerLong,
  1007. IRB.CreateLShr(StackPointerLong, 20)));
  1008. StackTag->setName("hwasan.stack.base.tag");
  1009. return StackTag;
  1010. }
  1011. Value *HWAddressSanitizer::getAllocaTag(IRBuilder<> &IRB, Value *StackTag,
  1012. AllocaInst *AI, unsigned AllocaNo) {
  1013. if (ClGenerateTagsWithCalls)
  1014. return getNextTagWithCall(IRB);
  1015. return IRB.CreateXor(StackTag,
  1016. ConstantInt::get(IntptrTy, retagMask(AllocaNo)));
  1017. }
  1018. Value *HWAddressSanitizer::getUARTag(IRBuilder<> &IRB, Value *StackTag) {
  1019. if (ClUARRetagToZero)
  1020. return ConstantInt::get(IntptrTy, 0);
  1021. if (ClGenerateTagsWithCalls)
  1022. return getNextTagWithCall(IRB);
  1023. return IRB.CreateXor(StackTag, ConstantInt::get(IntptrTy, TagMaskByte));
  1024. }
  1025. // Add a tag to an address.
  1026. Value *HWAddressSanitizer::tagPointer(IRBuilder<> &IRB, Type *Ty,
  1027. Value *PtrLong, Value *Tag) {
  1028. assert(!UsePageAliases);
  1029. Value *TaggedPtrLong;
  1030. if (CompileKernel) {
  1031. // Kernel addresses have 0xFF in the most significant byte.
  1032. Value *ShiftedTag =
  1033. IRB.CreateOr(IRB.CreateShl(Tag, PointerTagShift),
  1034. ConstantInt::get(IntptrTy, (1ULL << PointerTagShift) - 1));
  1035. TaggedPtrLong = IRB.CreateAnd(PtrLong, ShiftedTag);
  1036. } else {
  1037. // Userspace can simply do OR (tag << PointerTagShift);
  1038. Value *ShiftedTag = IRB.CreateShl(Tag, PointerTagShift);
  1039. TaggedPtrLong = IRB.CreateOr(PtrLong, ShiftedTag);
  1040. }
  1041. return IRB.CreateIntToPtr(TaggedPtrLong, Ty);
  1042. }
  1043. // Remove tag from an address.
  1044. Value *HWAddressSanitizer::untagPointer(IRBuilder<> &IRB, Value *PtrLong) {
  1045. assert(!UsePageAliases);
  1046. Value *UntaggedPtrLong;
  1047. if (CompileKernel) {
  1048. // Kernel addresses have 0xFF in the most significant byte.
  1049. UntaggedPtrLong =
  1050. IRB.CreateOr(PtrLong, ConstantInt::get(PtrLong->getType(),
  1051. 0xFFULL << PointerTagShift));
  1052. } else {
  1053. // Userspace addresses have 0x00.
  1054. UntaggedPtrLong =
  1055. IRB.CreateAnd(PtrLong, ConstantInt::get(PtrLong->getType(),
  1056. ~(0xFFULL << PointerTagShift)));
  1057. }
  1058. return UntaggedPtrLong;
  1059. }
  1060. Value *HWAddressSanitizer::getHwasanThreadSlotPtr(IRBuilder<> &IRB, Type *Ty) {
  1061. Module *M = IRB.GetInsertBlock()->getParent()->getParent();
  1062. if (TargetTriple.isAArch64() && TargetTriple.isAndroid()) {
  1063. // Android provides a fixed TLS slot for sanitizers. See TLS_SLOT_SANITIZER
  1064. // in Bionic's libc/private/bionic_tls.h.
  1065. Function *ThreadPointerFunc =
  1066. Intrinsic::getDeclaration(M, Intrinsic::thread_pointer);
  1067. Value *SlotPtr = IRB.CreatePointerCast(
  1068. IRB.CreateConstGEP1_32(IRB.getInt8Ty(),
  1069. IRB.CreateCall(ThreadPointerFunc), 0x30),
  1070. Ty->getPointerTo(0));
  1071. return SlotPtr;
  1072. }
  1073. if (ThreadPtrGlobal)
  1074. return ThreadPtrGlobal;
  1075. return nullptr;
  1076. }
  1077. void HWAddressSanitizer::emitPrologue(IRBuilder<> &IRB, bool WithFrameRecord) {
  1078. if (!Mapping.InTls)
  1079. ShadowBase = getShadowNonTls(IRB);
  1080. else if (!WithFrameRecord && TargetTriple.isAndroid())
  1081. ShadowBase = getDynamicShadowIfunc(IRB);
  1082. if (!WithFrameRecord && ShadowBase)
  1083. return;
  1084. Value *SlotPtr = getHwasanThreadSlotPtr(IRB, IntptrTy);
  1085. assert(SlotPtr);
  1086. Value *ThreadLong = IRB.CreateLoad(IntptrTy, SlotPtr);
  1087. // Extract the address field from ThreadLong. Unnecessary on AArch64 with TBI.
  1088. Value *ThreadLongMaybeUntagged =
  1089. TargetTriple.isAArch64() ? ThreadLong : untagPointer(IRB, ThreadLong);
  1090. if (WithFrameRecord) {
  1091. Function *F = IRB.GetInsertBlock()->getParent();
  1092. StackBaseTag = IRB.CreateAShr(ThreadLong, 3);
  1093. // Prepare ring buffer data.
  1094. Value *PC;
  1095. if (TargetTriple.getArch() == Triple::aarch64)
  1096. PC = readRegister(IRB, "pc");
  1097. else
  1098. PC = IRB.CreatePtrToInt(F, IntptrTy);
  1099. Module *M = F->getParent();
  1100. auto GetStackPointerFn = Intrinsic::getDeclaration(
  1101. M, Intrinsic::frameaddress,
  1102. IRB.getInt8PtrTy(M->getDataLayout().getAllocaAddrSpace()));
  1103. Value *SP = IRB.CreatePtrToInt(
  1104. IRB.CreateCall(GetStackPointerFn,
  1105. {Constant::getNullValue(IRB.getInt32Ty())}),
  1106. IntptrTy);
  1107. // Mix SP and PC.
  1108. // Assumptions:
  1109. // PC is 0x0000PPPPPPPPPPPP (48 bits are meaningful, others are zero)
  1110. // SP is 0xsssssssssssSSSS0 (4 lower bits are zero)
  1111. // We only really need ~20 lower non-zero bits (SSSS), so we mix like this:
  1112. // 0xSSSSPPPPPPPPPPPP
  1113. SP = IRB.CreateShl(SP, 44);
  1114. // Store data to ring buffer.
  1115. Value *RecordPtr =
  1116. IRB.CreateIntToPtr(ThreadLongMaybeUntagged, IntptrTy->getPointerTo(0));
  1117. IRB.CreateStore(IRB.CreateOr(PC, SP), RecordPtr);
  1118. // Update the ring buffer. Top byte of ThreadLong defines the size of the
  1119. // buffer in pages, it must be a power of two, and the start of the buffer
  1120. // must be aligned by twice that much. Therefore wrap around of the ring
  1121. // buffer is simply Addr &= ~((ThreadLong >> 56) << 12).
  1122. // The use of AShr instead of LShr is due to
  1123. // https://bugs.llvm.org/show_bug.cgi?id=39030
  1124. // Runtime library makes sure not to use the highest bit.
  1125. Value *WrapMask = IRB.CreateXor(
  1126. IRB.CreateShl(IRB.CreateAShr(ThreadLong, 56), 12, "", true, true),
  1127. ConstantInt::get(IntptrTy, (uint64_t)-1));
  1128. Value *ThreadLongNew = IRB.CreateAnd(
  1129. IRB.CreateAdd(ThreadLong, ConstantInt::get(IntptrTy, 8)), WrapMask);
  1130. IRB.CreateStore(ThreadLongNew, SlotPtr);
  1131. }
  1132. if (!ShadowBase) {
  1133. // Get shadow base address by aligning RecordPtr up.
  1134. // Note: this is not correct if the pointer is already aligned.
  1135. // Runtime library will make sure this never happens.
  1136. ShadowBase = IRB.CreateAdd(
  1137. IRB.CreateOr(
  1138. ThreadLongMaybeUntagged,
  1139. ConstantInt::get(IntptrTy, (1ULL << kShadowBaseAlignment) - 1)),
  1140. ConstantInt::get(IntptrTy, 1), "hwasan.shadow");
  1141. ShadowBase = IRB.CreateIntToPtr(ShadowBase, Int8PtrTy);
  1142. }
  1143. }
  1144. Value *HWAddressSanitizer::readRegister(IRBuilder<> &IRB, StringRef Name) {
  1145. Module *M = IRB.GetInsertBlock()->getParent()->getParent();
  1146. Function *ReadRegister =
  1147. Intrinsic::getDeclaration(M, Intrinsic::read_register, IntptrTy);
  1148. MDNode *MD = MDNode::get(*C, {MDString::get(*C, Name)});
  1149. Value *Args[] = {MetadataAsValue::get(*C, MD)};
  1150. return IRB.CreateCall(ReadRegister, Args);
  1151. }
  1152. bool HWAddressSanitizer::instrumentLandingPads(
  1153. SmallVectorImpl<Instruction *> &LandingPadVec) {
  1154. for (auto *LP : LandingPadVec) {
  1155. IRBuilder<> IRB(LP->getNextNode());
  1156. IRB.CreateCall(
  1157. HWAsanHandleVfork,
  1158. {readRegister(IRB, (TargetTriple.getArch() == Triple::x86_64) ? "rsp"
  1159. : "sp")});
  1160. }
  1161. return true;
  1162. }
  1163. static bool
  1164. maybeReachableFromEachOther(const SmallVectorImpl<IntrinsicInst *> &Insts,
  1165. const DominatorTree &DT) {
  1166. // If we have too many lifetime ends, give up, as the algorithm below is N^2.
  1167. if (Insts.size() > ClMaxLifetimes)
  1168. return true;
  1169. for (size_t I = 0; I < Insts.size(); ++I) {
  1170. for (size_t J = 0; J < Insts.size(); ++J) {
  1171. if (I == J)
  1172. continue;
  1173. if (isPotentiallyReachable(Insts[I], Insts[J], nullptr, &DT))
  1174. return true;
  1175. }
  1176. }
  1177. return false;
  1178. }
  1179. // static
  1180. bool HWAddressSanitizer::isStandardLifetime(const AllocaInfo &AllocaInfo,
  1181. const DominatorTree &DT) {
  1182. // An alloca that has exactly one start and end in every possible execution.
  1183. // If it has multiple ends, they have to be unreachable from each other, so
  1184. // at most one of them is actually used for each execution of the function.
  1185. return AllocaInfo.LifetimeStart.size() == 1 &&
  1186. (AllocaInfo.LifetimeEnd.size() == 1 ||
  1187. (AllocaInfo.LifetimeEnd.size() > 0 &&
  1188. !maybeReachableFromEachOther(AllocaInfo.LifetimeEnd, DT)));
  1189. }
  1190. bool HWAddressSanitizer::instrumentStack(
  1191. bool ShouldDetectUseAfterScope,
  1192. MapVector<AllocaInst *, AllocaInfo> &AllocasToInstrument,
  1193. SmallVector<Instruction *, 4> &UnrecognizedLifetimes,
  1194. DenseMap<AllocaInst *, std::vector<DbgVariableIntrinsic *>> &AllocaDbgMap,
  1195. SmallVectorImpl<Instruction *> &RetVec, Value *StackTag,
  1196. llvm::function_ref<const DominatorTree &()> GetDT,
  1197. llvm::function_ref<const PostDominatorTree &()> GetPDT) {
  1198. // Ideally, we want to calculate tagged stack base pointer, and rewrite all
  1199. // alloca addresses using that. Unfortunately, offsets are not known yet
  1200. // (unless we use ASan-style mega-alloca). Instead we keep the base tag in a
  1201. // temp, shift-OR it into each alloca address and xor with the retag mask.
  1202. // This generates one extra instruction per alloca use.
  1203. unsigned int I = 0;
  1204. for (auto &KV : AllocasToInstrument) {
  1205. auto N = I++;
  1206. auto *AI = KV.first;
  1207. AllocaInfo &Info = KV.second;
  1208. IRBuilder<> IRB(AI->getNextNode());
  1209. // Replace uses of the alloca with tagged address.
  1210. Value *Tag = getAllocaTag(IRB, StackTag, AI, N);
  1211. Value *AILong = IRB.CreatePointerCast(AI, IntptrTy);
  1212. Value *Replacement = tagPointer(IRB, AI->getType(), AILong, Tag);
  1213. std::string Name =
  1214. AI->hasName() ? AI->getName().str() : "alloca." + itostr(N);
  1215. Replacement->setName(Name + ".hwasan");
  1216. AI->replaceUsesWithIf(Replacement,
  1217. [AILong](Use &U) { return U.getUser() != AILong; });
  1218. for (auto *DDI : AllocaDbgMap.lookup(AI)) {
  1219. // Prepend "tag_offset, N" to the dwarf expression.
  1220. // Tag offset logically applies to the alloca pointer, and it makes sense
  1221. // to put it at the beginning of the expression.
  1222. SmallVector<uint64_t, 8> NewOps = {dwarf::DW_OP_LLVM_tag_offset,
  1223. retagMask(N)};
  1224. for (size_t LocNo = 0; LocNo < DDI->getNumVariableLocationOps(); ++LocNo)
  1225. if (DDI->getVariableLocationOp(LocNo) == AI)
  1226. DDI->setExpression(DIExpression::appendOpsToArg(DDI->getExpression(),
  1227. NewOps, LocNo));
  1228. }
  1229. size_t Size = getAllocaSizeInBytes(*AI);
  1230. size_t AlignedSize = alignTo(Size, Mapping.getObjectAlignment());
  1231. auto TagEnd = [&](Instruction *Node) {
  1232. IRB.SetInsertPoint(Node);
  1233. Value *UARTag = getUARTag(IRB, StackTag);
  1234. tagAlloca(IRB, AI, UARTag, AlignedSize);
  1235. };
  1236. bool StandardLifetime =
  1237. UnrecognizedLifetimes.empty() && isStandardLifetime(Info, GetDT());
  1238. if (ShouldDetectUseAfterScope && StandardLifetime) {
  1239. IntrinsicInst *Start = Info.LifetimeStart[0];
  1240. IRB.SetInsertPoint(Start->getNextNode());
  1241. tagAlloca(IRB, AI, Tag, Size);
  1242. if (!forAllReachableExits(GetDT(), GetPDT(), Start, Info.LifetimeEnd,
  1243. RetVec, TagEnd)) {
  1244. for (auto *End : Info.LifetimeEnd)
  1245. End->eraseFromParent();
  1246. }
  1247. } else {
  1248. tagAlloca(IRB, AI, Tag, Size);
  1249. for (auto *RI : RetVec)
  1250. TagEnd(RI);
  1251. if (!StandardLifetime) {
  1252. for (auto &II : Info.LifetimeStart)
  1253. II->eraseFromParent();
  1254. for (auto &II : Info.LifetimeEnd)
  1255. II->eraseFromParent();
  1256. }
  1257. }
  1258. }
  1259. for (auto &I : UnrecognizedLifetimes)
  1260. I->eraseFromParent();
  1261. return true;
  1262. }
  1263. bool HWAddressSanitizer::isInterestingAlloca(const AllocaInst &AI) {
  1264. return (AI.getAllocatedType()->isSized() &&
  1265. // FIXME: instrument dynamic allocas, too
  1266. AI.isStaticAlloca() &&
  1267. // alloca() may be called with 0 size, ignore it.
  1268. getAllocaSizeInBytes(AI) > 0 &&
  1269. // We are only interested in allocas not promotable to registers.
  1270. // Promotable allocas are common under -O0.
  1271. !isAllocaPromotable(&AI) &&
  1272. // inalloca allocas are not treated as static, and we don't want
  1273. // dynamic alloca instrumentation for them as well.
  1274. !AI.isUsedWithInAlloca() &&
  1275. // swifterror allocas are register promoted by ISel
  1276. !AI.isSwiftError()) &&
  1277. // safe allocas are not interesting
  1278. !(SSI && SSI->isSafe(AI));
  1279. }
  1280. DenseMap<AllocaInst *, AllocaInst *> HWAddressSanitizer::padInterestingAllocas(
  1281. const MapVector<AllocaInst *, AllocaInfo> &AllocasToInstrument) {
  1282. DenseMap<AllocaInst *, AllocaInst *> AllocaToPaddedAllocaMap;
  1283. for (auto &KV : AllocasToInstrument) {
  1284. AllocaInst *AI = KV.first;
  1285. uint64_t Size = getAllocaSizeInBytes(*AI);
  1286. uint64_t AlignedSize = alignTo(Size, Mapping.getObjectAlignment());
  1287. AI->setAlignment(
  1288. Align(std::max(AI->getAlignment(), Mapping.getObjectAlignment())));
  1289. if (Size != AlignedSize) {
  1290. Type *AllocatedType = AI->getAllocatedType();
  1291. if (AI->isArrayAllocation()) {
  1292. uint64_t ArraySize =
  1293. cast<ConstantInt>(AI->getArraySize())->getZExtValue();
  1294. AllocatedType = ArrayType::get(AllocatedType, ArraySize);
  1295. }
  1296. Type *TypeWithPadding = StructType::get(
  1297. AllocatedType, ArrayType::get(Int8Ty, AlignedSize - Size));
  1298. auto *NewAI = new AllocaInst(
  1299. TypeWithPadding, AI->getType()->getAddressSpace(), nullptr, "", AI);
  1300. NewAI->takeName(AI);
  1301. NewAI->setAlignment(AI->getAlign());
  1302. NewAI->setUsedWithInAlloca(AI->isUsedWithInAlloca());
  1303. NewAI->setSwiftError(AI->isSwiftError());
  1304. NewAI->copyMetadata(*AI);
  1305. auto *Bitcast = new BitCastInst(NewAI, AI->getType(), "", AI);
  1306. AI->replaceAllUsesWith(Bitcast);
  1307. AllocaToPaddedAllocaMap[AI] = NewAI;
  1308. }
  1309. }
  1310. return AllocaToPaddedAllocaMap;
  1311. }
  1312. bool HWAddressSanitizer::sanitizeFunction(
  1313. Function &F, llvm::function_ref<const DominatorTree &()> GetDT,
  1314. llvm::function_ref<const PostDominatorTree &()> GetPDT) {
  1315. if (&F == HwasanCtorFunction)
  1316. return false;
  1317. if (!F.hasFnAttribute(Attribute::SanitizeHWAddress))
  1318. return false;
  1319. LLVM_DEBUG(dbgs() << "Function: " << F.getName() << "\n");
  1320. SmallVector<InterestingMemoryOperand, 16> OperandsToInstrument;
  1321. SmallVector<MemIntrinsic *, 16> IntrinToInstrument;
  1322. MapVector<AllocaInst *, AllocaInfo> AllocasToInstrument;
  1323. SmallVector<Instruction *, 8> RetVec;
  1324. SmallVector<Instruction *, 8> LandingPadVec;
  1325. SmallVector<Instruction *, 4> UnrecognizedLifetimes;
  1326. DenseMap<AllocaInst *, std::vector<DbgVariableIntrinsic *>> AllocaDbgMap;
  1327. bool CallsReturnTwice = false;
  1328. for (auto &BB : F) {
  1329. for (auto &Inst : BB) {
  1330. if (CallInst *CI = dyn_cast<CallInst>(&Inst)) {
  1331. if (CI->canReturnTwice()) {
  1332. CallsReturnTwice = true;
  1333. }
  1334. }
  1335. if (InstrumentStack) {
  1336. if (AllocaInst *AI = dyn_cast<AllocaInst>(&Inst)) {
  1337. if (isInterestingAlloca(*AI))
  1338. AllocasToInstrument.insert({AI, {}});
  1339. continue;
  1340. }
  1341. auto *II = dyn_cast<IntrinsicInst>(&Inst);
  1342. if (II && (II->getIntrinsicID() == Intrinsic::lifetime_start ||
  1343. II->getIntrinsicID() == Intrinsic::lifetime_end)) {
  1344. AllocaInst *AI = findAllocaForValue(II->getArgOperand(1));
  1345. if (!AI) {
  1346. UnrecognizedLifetimes.push_back(&Inst);
  1347. continue;
  1348. }
  1349. if (!isInterestingAlloca(*AI))
  1350. continue;
  1351. if (II->getIntrinsicID() == Intrinsic::lifetime_start)
  1352. AllocasToInstrument[AI].LifetimeStart.push_back(II);
  1353. else
  1354. AllocasToInstrument[AI].LifetimeEnd.push_back(II);
  1355. continue;
  1356. }
  1357. }
  1358. if (isa<ReturnInst>(Inst)) {
  1359. if (CallInst *CI = Inst.getParent()->getTerminatingMustTailCall())
  1360. RetVec.push_back(CI);
  1361. else
  1362. RetVec.push_back(&Inst);
  1363. } else if (isa<ResumeInst, CleanupReturnInst>(Inst)) {
  1364. RetVec.push_back(&Inst);
  1365. }
  1366. if (auto *DVI = dyn_cast<DbgVariableIntrinsic>(&Inst)) {
  1367. for (Value *V : DVI->location_ops()) {
  1368. if (auto *Alloca = dyn_cast_or_null<AllocaInst>(V))
  1369. if (!AllocaDbgMap.count(Alloca) ||
  1370. AllocaDbgMap[Alloca].back() != DVI)
  1371. AllocaDbgMap[Alloca].push_back(DVI);
  1372. }
  1373. }
  1374. if (InstrumentLandingPads && isa<LandingPadInst>(Inst))
  1375. LandingPadVec.push_back(&Inst);
  1376. getInterestingMemoryOperands(&Inst, OperandsToInstrument);
  1377. if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(&Inst))
  1378. if (!ignoreMemIntrinsic(MI))
  1379. IntrinToInstrument.push_back(MI);
  1380. }
  1381. }
  1382. initializeCallbacks(*F.getParent());
  1383. bool Changed = false;
  1384. if (!LandingPadVec.empty())
  1385. Changed |= instrumentLandingPads(LandingPadVec);
  1386. if (AllocasToInstrument.empty() && F.hasPersonalityFn() &&
  1387. F.getPersonalityFn()->getName() == kHwasanPersonalityThunkName) {
  1388. // __hwasan_personality_thunk is a no-op for functions without an
  1389. // instrumented stack, so we can drop it.
  1390. F.setPersonalityFn(nullptr);
  1391. Changed = true;
  1392. }
  1393. if (AllocasToInstrument.empty() && OperandsToInstrument.empty() &&
  1394. IntrinToInstrument.empty())
  1395. return Changed;
  1396. assert(!ShadowBase);
  1397. Instruction *InsertPt = &*F.getEntryBlock().begin();
  1398. IRBuilder<> EntryIRB(InsertPt);
  1399. emitPrologue(EntryIRB,
  1400. /*WithFrameRecord*/ ClRecordStackHistory &&
  1401. Mapping.WithFrameRecord && !AllocasToInstrument.empty());
  1402. if (!AllocasToInstrument.empty()) {
  1403. Value *StackTag =
  1404. ClGenerateTagsWithCalls ? nullptr : getStackBaseTag(EntryIRB);
  1405. // Calls to functions that may return twice (e.g. setjmp) confuse the
  1406. // postdominator analysis, and will leave us to keep memory tagged after
  1407. // function return. Work around this by always untagging at every return
  1408. // statement if return_twice functions are called.
  1409. instrumentStack(DetectUseAfterScope && !CallsReturnTwice,
  1410. AllocasToInstrument, UnrecognizedLifetimes, AllocaDbgMap,
  1411. RetVec, StackTag, GetDT, GetPDT);
  1412. }
  1413. // Pad and align each of the allocas that we instrumented to stop small
  1414. // uninteresting allocas from hiding in instrumented alloca's padding and so
  1415. // that we have enough space to store real tags for short granules.
  1416. DenseMap<AllocaInst *, AllocaInst *> AllocaToPaddedAllocaMap =
  1417. padInterestingAllocas(AllocasToInstrument);
  1418. if (!AllocaToPaddedAllocaMap.empty()) {
  1419. for (auto &BB : F) {
  1420. for (auto &Inst : BB) {
  1421. if (auto *DVI = dyn_cast<DbgVariableIntrinsic>(&Inst)) {
  1422. SmallDenseSet<Value *> LocationOps(DVI->location_ops().begin(),
  1423. DVI->location_ops().end());
  1424. for (Value *V : LocationOps) {
  1425. if (auto *AI = dyn_cast_or_null<AllocaInst>(V)) {
  1426. if (auto *NewAI = AllocaToPaddedAllocaMap.lookup(AI))
  1427. DVI->replaceVariableLocationOp(V, NewAI);
  1428. }
  1429. }
  1430. }
  1431. }
  1432. }
  1433. for (auto &P : AllocaToPaddedAllocaMap)
  1434. P.first->eraseFromParent();
  1435. }
  1436. // If we split the entry block, move any allocas that were originally in the
  1437. // entry block back into the entry block so that they aren't treated as
  1438. // dynamic allocas.
  1439. if (EntryIRB.GetInsertBlock() != &F.getEntryBlock()) {
  1440. InsertPt = &*F.getEntryBlock().begin();
  1441. for (Instruction &I :
  1442. llvm::make_early_inc_range(*EntryIRB.GetInsertBlock())) {
  1443. if (auto *AI = dyn_cast<AllocaInst>(&I))
  1444. if (isa<ConstantInt>(AI->getArraySize()))
  1445. I.moveBefore(InsertPt);
  1446. }
  1447. }
  1448. for (auto &Operand : OperandsToInstrument)
  1449. instrumentMemAccess(Operand);
  1450. if (ClInstrumentMemIntrinsics && !IntrinToInstrument.empty()) {
  1451. for (auto Inst : IntrinToInstrument)
  1452. instrumentMemIntrinsic(cast<MemIntrinsic>(Inst));
  1453. }
  1454. ShadowBase = nullptr;
  1455. StackBaseTag = nullptr;
  1456. return true;
  1457. }
  1458. void HWAddressSanitizer::instrumentGlobal(GlobalVariable *GV, uint8_t Tag) {
  1459. assert(!UsePageAliases);
  1460. Constant *Initializer = GV->getInitializer();
  1461. uint64_t SizeInBytes =
  1462. M.getDataLayout().getTypeAllocSize(Initializer->getType());
  1463. uint64_t NewSize = alignTo(SizeInBytes, Mapping.getObjectAlignment());
  1464. if (SizeInBytes != NewSize) {
  1465. // Pad the initializer out to the next multiple of 16 bytes and add the
  1466. // required short granule tag.
  1467. std::vector<uint8_t> Init(NewSize - SizeInBytes, 0);
  1468. Init.back() = Tag;
  1469. Constant *Padding = ConstantDataArray::get(*C, Init);
  1470. Initializer = ConstantStruct::getAnon({Initializer, Padding});
  1471. }
  1472. auto *NewGV = new GlobalVariable(M, Initializer->getType(), GV->isConstant(),
  1473. GlobalValue::ExternalLinkage, Initializer,
  1474. GV->getName() + ".hwasan");
  1475. NewGV->copyAttributesFrom(GV);
  1476. NewGV->setLinkage(GlobalValue::PrivateLinkage);
  1477. NewGV->copyMetadata(GV, 0);
  1478. NewGV->setAlignment(
  1479. MaybeAlign(std::max(GV->getAlignment(), Mapping.getObjectAlignment())));
  1480. // It is invalid to ICF two globals that have different tags. In the case
  1481. // where the size of the global is a multiple of the tag granularity the
  1482. // contents of the globals may be the same but the tags (i.e. symbol values)
  1483. // may be different, and the symbols are not considered during ICF. In the
  1484. // case where the size is not a multiple of the granularity, the short granule
  1485. // tags would discriminate two globals with different tags, but there would
  1486. // otherwise be nothing stopping such a global from being incorrectly ICF'd
  1487. // with an uninstrumented (i.e. tag 0) global that happened to have the short
  1488. // granule tag in the last byte.
  1489. NewGV->setUnnamedAddr(GlobalValue::UnnamedAddr::None);
  1490. // Descriptor format (assuming little-endian):
  1491. // bytes 0-3: relative address of global
  1492. // bytes 4-6: size of global (16MB ought to be enough for anyone, but in case
  1493. // it isn't, we create multiple descriptors)
  1494. // byte 7: tag
  1495. auto *DescriptorTy = StructType::get(Int32Ty, Int32Ty);
  1496. const uint64_t MaxDescriptorSize = 0xfffff0;
  1497. for (uint64_t DescriptorPos = 0; DescriptorPos < SizeInBytes;
  1498. DescriptorPos += MaxDescriptorSize) {
  1499. auto *Descriptor =
  1500. new GlobalVariable(M, DescriptorTy, true, GlobalValue::PrivateLinkage,
  1501. nullptr, GV->getName() + ".hwasan.descriptor");
  1502. auto *GVRelPtr = ConstantExpr::getTrunc(
  1503. ConstantExpr::getAdd(
  1504. ConstantExpr::getSub(
  1505. ConstantExpr::getPtrToInt(NewGV, Int64Ty),
  1506. ConstantExpr::getPtrToInt(Descriptor, Int64Ty)),
  1507. ConstantInt::get(Int64Ty, DescriptorPos)),
  1508. Int32Ty);
  1509. uint32_t Size = std::min(SizeInBytes - DescriptorPos, MaxDescriptorSize);
  1510. auto *SizeAndTag = ConstantInt::get(Int32Ty, Size | (uint32_t(Tag) << 24));
  1511. Descriptor->setComdat(NewGV->getComdat());
  1512. Descriptor->setInitializer(ConstantStruct::getAnon({GVRelPtr, SizeAndTag}));
  1513. Descriptor->setSection("hwasan_globals");
  1514. Descriptor->setMetadata(LLVMContext::MD_associated,
  1515. MDNode::get(*C, ValueAsMetadata::get(NewGV)));
  1516. appendToCompilerUsed(M, Descriptor);
  1517. }
  1518. Constant *Aliasee = ConstantExpr::getIntToPtr(
  1519. ConstantExpr::getAdd(
  1520. ConstantExpr::getPtrToInt(NewGV, Int64Ty),
  1521. ConstantInt::get(Int64Ty, uint64_t(Tag) << PointerTagShift)),
  1522. GV->getType());
  1523. auto *Alias = GlobalAlias::create(GV->getValueType(), GV->getAddressSpace(),
  1524. GV->getLinkage(), "", Aliasee, &M);
  1525. Alias->setVisibility(GV->getVisibility());
  1526. Alias->takeName(GV);
  1527. GV->replaceAllUsesWith(Alias);
  1528. GV->eraseFromParent();
  1529. }
  1530. static DenseSet<GlobalVariable *> getExcludedGlobals(Module &M) {
  1531. NamedMDNode *Globals = M.getNamedMetadata("llvm.asan.globals");
  1532. if (!Globals)
  1533. return DenseSet<GlobalVariable *>();
  1534. DenseSet<GlobalVariable *> Excluded(Globals->getNumOperands());
  1535. for (auto MDN : Globals->operands()) {
  1536. // Metadata node contains the global and the fields of "Entry".
  1537. assert(MDN->getNumOperands() == 5);
  1538. auto *V = mdconst::extract_or_null<Constant>(MDN->getOperand(0));
  1539. // The optimizer may optimize away a global entirely.
  1540. if (!V)
  1541. continue;
  1542. auto *StrippedV = V->stripPointerCasts();
  1543. auto *GV = dyn_cast<GlobalVariable>(StrippedV);
  1544. if (!GV)
  1545. continue;
  1546. ConstantInt *IsExcluded = mdconst::extract<ConstantInt>(MDN->getOperand(4));
  1547. if (IsExcluded->isOne())
  1548. Excluded.insert(GV);
  1549. }
  1550. return Excluded;
  1551. }
  1552. void HWAddressSanitizer::instrumentGlobals() {
  1553. std::vector<GlobalVariable *> Globals;
  1554. auto ExcludedGlobals = getExcludedGlobals(M);
  1555. for (GlobalVariable &GV : M.globals()) {
  1556. if (ExcludedGlobals.count(&GV))
  1557. continue;
  1558. if (GV.isDeclarationForLinker() || GV.getName().startswith("llvm.") ||
  1559. GV.isThreadLocal())
  1560. continue;
  1561. // Common symbols can't have aliases point to them, so they can't be tagged.
  1562. if (GV.hasCommonLinkage())
  1563. continue;
  1564. // Globals with custom sections may be used in __start_/__stop_ enumeration,
  1565. // which would be broken both by adding tags and potentially by the extra
  1566. // padding/alignment that we insert.
  1567. if (GV.hasSection())
  1568. continue;
  1569. Globals.push_back(&GV);
  1570. }
  1571. MD5 Hasher;
  1572. Hasher.update(M.getSourceFileName());
  1573. MD5::MD5Result Hash;
  1574. Hasher.final(Hash);
  1575. uint8_t Tag = Hash[0];
  1576. for (GlobalVariable *GV : Globals) {
  1577. Tag &= TagMaskByte;
  1578. // Skip tag 0 in order to avoid collisions with untagged memory.
  1579. if (Tag == 0)
  1580. Tag = 1;
  1581. instrumentGlobal(GV, Tag++);
  1582. }
  1583. }
  1584. void HWAddressSanitizer::instrumentPersonalityFunctions() {
  1585. // We need to untag stack frames as we unwind past them. That is the job of
  1586. // the personality function wrapper, which either wraps an existing
  1587. // personality function or acts as a personality function on its own. Each
  1588. // function that has a personality function or that can be unwound past has
  1589. // its personality function changed to a thunk that calls the personality
  1590. // function wrapper in the runtime.
  1591. MapVector<Constant *, std::vector<Function *>> PersonalityFns;
  1592. for (Function &F : M) {
  1593. if (F.isDeclaration() || !F.hasFnAttribute(Attribute::SanitizeHWAddress))
  1594. continue;
  1595. if (F.hasPersonalityFn()) {
  1596. PersonalityFns[F.getPersonalityFn()->stripPointerCasts()].push_back(&F);
  1597. } else if (!F.hasFnAttribute(Attribute::NoUnwind)) {
  1598. PersonalityFns[nullptr].push_back(&F);
  1599. }
  1600. }
  1601. if (PersonalityFns.empty())
  1602. return;
  1603. FunctionCallee HwasanPersonalityWrapper = M.getOrInsertFunction(
  1604. "__hwasan_personality_wrapper", Int32Ty, Int32Ty, Int32Ty, Int64Ty,
  1605. Int8PtrTy, Int8PtrTy, Int8PtrTy, Int8PtrTy, Int8PtrTy);
  1606. FunctionCallee UnwindGetGR = M.getOrInsertFunction("_Unwind_GetGR", VoidTy);
  1607. FunctionCallee UnwindGetCFA = M.getOrInsertFunction("_Unwind_GetCFA", VoidTy);
  1608. for (auto &P : PersonalityFns) {
  1609. std::string ThunkName = kHwasanPersonalityThunkName;
  1610. if (P.first)
  1611. ThunkName += ("." + P.first->getName()).str();
  1612. FunctionType *ThunkFnTy = FunctionType::get(
  1613. Int32Ty, {Int32Ty, Int32Ty, Int64Ty, Int8PtrTy, Int8PtrTy}, false);
  1614. bool IsLocal = P.first && (!isa<GlobalValue>(P.first) ||
  1615. cast<GlobalValue>(P.first)->hasLocalLinkage());
  1616. auto *ThunkFn = Function::Create(ThunkFnTy,
  1617. IsLocal ? GlobalValue::InternalLinkage
  1618. : GlobalValue::LinkOnceODRLinkage,
  1619. ThunkName, &M);
  1620. if (!IsLocal) {
  1621. ThunkFn->setVisibility(GlobalValue::HiddenVisibility);
  1622. ThunkFn->setComdat(M.getOrInsertComdat(ThunkName));
  1623. }
  1624. auto *BB = BasicBlock::Create(*C, "entry", ThunkFn);
  1625. IRBuilder<> IRB(BB);
  1626. CallInst *WrapperCall = IRB.CreateCall(
  1627. HwasanPersonalityWrapper,
  1628. {ThunkFn->getArg(0), ThunkFn->getArg(1), ThunkFn->getArg(2),
  1629. ThunkFn->getArg(3), ThunkFn->getArg(4),
  1630. P.first ? IRB.CreateBitCast(P.first, Int8PtrTy)
  1631. : Constant::getNullValue(Int8PtrTy),
  1632. IRB.CreateBitCast(UnwindGetGR.getCallee(), Int8PtrTy),
  1633. IRB.CreateBitCast(UnwindGetCFA.getCallee(), Int8PtrTy)});
  1634. WrapperCall->setTailCall();
  1635. IRB.CreateRet(WrapperCall);
  1636. for (Function *F : P.second)
  1637. F->setPersonalityFn(ThunkFn);
  1638. }
  1639. }
  1640. void HWAddressSanitizer::ShadowMapping::init(Triple &TargetTriple,
  1641. bool InstrumentWithCalls) {
  1642. Scale = kDefaultShadowScale;
  1643. if (TargetTriple.isOSFuchsia()) {
  1644. // Fuchsia is always PIE, which means that the beginning of the address
  1645. // space is always available.
  1646. InGlobal = false;
  1647. InTls = false;
  1648. Offset = 0;
  1649. WithFrameRecord = true;
  1650. } else if (ClMappingOffset.getNumOccurrences() > 0) {
  1651. InGlobal = false;
  1652. InTls = false;
  1653. Offset = ClMappingOffset;
  1654. WithFrameRecord = false;
  1655. } else if (ClEnableKhwasan || InstrumentWithCalls) {
  1656. InGlobal = false;
  1657. InTls = false;
  1658. Offset = 0;
  1659. WithFrameRecord = false;
  1660. } else if (ClWithIfunc) {
  1661. InGlobal = true;
  1662. InTls = false;
  1663. Offset = kDynamicShadowSentinel;
  1664. WithFrameRecord = false;
  1665. } else if (ClWithTls) {
  1666. InGlobal = false;
  1667. InTls = true;
  1668. Offset = kDynamicShadowSentinel;
  1669. WithFrameRecord = true;
  1670. } else {
  1671. InGlobal = false;
  1672. InTls = false;
  1673. Offset = kDynamicShadowSentinel;
  1674. WithFrameRecord = false;
  1675. }
  1676. }