AddressSanitizer.cpp 142 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499
  1. //===- AddressSanitizer.cpp - memory error detector -----------------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file is a part of AddressSanitizer, an address basic correctness
  10. // checker.
  11. // Details of the algorithm:
  12. // https://github.com/google/sanitizers/wiki/AddressSanitizerAlgorithm
  13. //
  14. // FIXME: This sanitizer does not yet handle scalable vectors
  15. //
  16. //===----------------------------------------------------------------------===//
  17. #include "llvm/Transforms/Instrumentation/AddressSanitizer.h"
  18. #include "llvm/ADT/ArrayRef.h"
  19. #include "llvm/ADT/DenseMap.h"
  20. #include "llvm/ADT/DepthFirstIterator.h"
  21. #include "llvm/ADT/SmallPtrSet.h"
  22. #include "llvm/ADT/SmallVector.h"
  23. #include "llvm/ADT/Statistic.h"
  24. #include "llvm/ADT/StringExtras.h"
  25. #include "llvm/ADT/StringRef.h"
  26. #include "llvm/ADT/Triple.h"
  27. #include "llvm/ADT/Twine.h"
  28. #include "llvm/Analysis/GlobalsModRef.h"
  29. #include "llvm/Analysis/MemoryBuiltins.h"
  30. #include "llvm/Analysis/StackSafetyAnalysis.h"
  31. #include "llvm/Analysis/TargetLibraryInfo.h"
  32. #include "llvm/Analysis/ValueTracking.h"
  33. #include "llvm/BinaryFormat/MachO.h"
  34. #include "llvm/Demangle/Demangle.h"
  35. #include "llvm/IR/Argument.h"
  36. #include "llvm/IR/Attributes.h"
  37. #include "llvm/IR/BasicBlock.h"
  38. #include "llvm/IR/Comdat.h"
  39. #include "llvm/IR/Constant.h"
  40. #include "llvm/IR/Constants.h"
  41. #include "llvm/IR/DIBuilder.h"
  42. #include "llvm/IR/DataLayout.h"
  43. #include "llvm/IR/DebugInfoMetadata.h"
  44. #include "llvm/IR/DebugLoc.h"
  45. #include "llvm/IR/DerivedTypes.h"
  46. #include "llvm/IR/Function.h"
  47. #include "llvm/IR/GlobalAlias.h"
  48. #include "llvm/IR/GlobalValue.h"
  49. #include "llvm/IR/GlobalVariable.h"
  50. #include "llvm/IR/IRBuilder.h"
  51. #include "llvm/IR/InlineAsm.h"
  52. #include "llvm/IR/InstVisitor.h"
  53. #include "llvm/IR/InstrTypes.h"
  54. #include "llvm/IR/Instruction.h"
  55. #include "llvm/IR/Instructions.h"
  56. #include "llvm/IR/IntrinsicInst.h"
  57. #include "llvm/IR/Intrinsics.h"
  58. #include "llvm/IR/LLVMContext.h"
  59. #include "llvm/IR/MDBuilder.h"
  60. #include "llvm/IR/Metadata.h"
  61. #include "llvm/IR/Module.h"
  62. #include "llvm/IR/Type.h"
  63. #include "llvm/IR/Use.h"
  64. #include "llvm/IR/Value.h"
  65. #include "llvm/MC/MCSectionMachO.h"
  66. #include "llvm/Support/Casting.h"
  67. #include "llvm/Support/CommandLine.h"
  68. #include "llvm/Support/Debug.h"
  69. #include "llvm/Support/ErrorHandling.h"
  70. #include "llvm/Support/MathExtras.h"
  71. #include "llvm/Support/raw_ostream.h"
  72. #include "llvm/Transforms/Instrumentation.h"
  73. #include "llvm/Transforms/Instrumentation/AddressSanitizerCommon.h"
  74. #include "llvm/Transforms/Instrumentation/AddressSanitizerOptions.h"
  75. #include "llvm/Transforms/Utils/ASanStackFrameLayout.h"
  76. #include "llvm/Transforms/Utils/BasicBlockUtils.h"
  77. #include "llvm/Transforms/Utils/Local.h"
  78. #include "llvm/Transforms/Utils/ModuleUtils.h"
  79. #include "llvm/Transforms/Utils/PromoteMemToReg.h"
  80. #include <algorithm>
  81. #include <cassert>
  82. #include <cstddef>
  83. #include <cstdint>
  84. #include <iomanip>
  85. #include <limits>
  86. #include <sstream>
  87. #include <string>
  88. #include <tuple>
  89. using namespace llvm;
  90. #define DEBUG_TYPE "asan"
  91. static const uint64_t kDefaultShadowScale = 3;
  92. static const uint64_t kDefaultShadowOffset32 = 1ULL << 29;
  93. static const uint64_t kDefaultShadowOffset64 = 1ULL << 44;
  94. static const uint64_t kDynamicShadowSentinel =
  95. std::numeric_limits<uint64_t>::max();
  96. static const uint64_t kSmallX86_64ShadowOffsetBase = 0x7FFFFFFF; // < 2G.
  97. static const uint64_t kSmallX86_64ShadowOffsetAlignMask = ~0xFFFULL;
  98. static const uint64_t kLinuxKasan_ShadowOffset64 = 0xdffffc0000000000;
  99. static const uint64_t kPPC64_ShadowOffset64 = 1ULL << 44;
  100. static const uint64_t kSystemZ_ShadowOffset64 = 1ULL << 52;
  101. static const uint64_t kMIPS_ShadowOffsetN32 = 1ULL << 29;
  102. static const uint64_t kMIPS32_ShadowOffset32 = 0x0aaa0000;
  103. static const uint64_t kMIPS64_ShadowOffset64 = 1ULL << 37;
  104. static const uint64_t kAArch64_ShadowOffset64 = 1ULL << 36;
  105. static const uint64_t kLoongArch64_ShadowOffset64 = 1ULL << 46;
  106. static const uint64_t kRISCV64_ShadowOffset64 = 0xd55550000;
  107. static const uint64_t kFreeBSD_ShadowOffset32 = 1ULL << 30;
  108. static const uint64_t kFreeBSD_ShadowOffset64 = 1ULL << 46;
  109. static const uint64_t kFreeBSDAArch64_ShadowOffset64 = 1ULL << 47;
  110. static const uint64_t kFreeBSDKasan_ShadowOffset64 = 0xdffff7c000000000;
  111. static const uint64_t kNetBSD_ShadowOffset32 = 1ULL << 30;
  112. static const uint64_t kNetBSD_ShadowOffset64 = 1ULL << 46;
  113. static const uint64_t kNetBSDKasan_ShadowOffset64 = 0xdfff900000000000;
  114. static const uint64_t kPS_ShadowOffset64 = 1ULL << 40;
  115. static const uint64_t kWindowsShadowOffset32 = 3ULL << 28;
  116. static const uint64_t kEmscriptenShadowOffset = 0;
  117. // The shadow memory space is dynamically allocated.
  118. static const uint64_t kWindowsShadowOffset64 = kDynamicShadowSentinel;
  119. static const size_t kMinStackMallocSize = 1 << 6; // 64B
  120. static const size_t kMaxStackMallocSize = 1 << 16; // 64K
  121. static const uintptr_t kCurrentStackFrameMagic = 0x41B58AB3;
  122. static const uintptr_t kRetiredStackFrameMagic = 0x45E0360E;
  123. const char kAsanModuleCtorName[] = "asan.module_ctor";
  124. const char kAsanModuleDtorName[] = "asan.module_dtor";
  125. static const uint64_t kAsanCtorAndDtorPriority = 1;
  126. // On Emscripten, the system needs more than one priorities for constructors.
  127. static const uint64_t kAsanEmscriptenCtorAndDtorPriority = 50;
  128. const char kAsanReportErrorTemplate[] = "__asan_report_";
  129. const char kAsanRegisterGlobalsName[] = "__asan_register_globals";
  130. const char kAsanUnregisterGlobalsName[] = "__asan_unregister_globals";
  131. const char kAsanRegisterImageGlobalsName[] = "__asan_register_image_globals";
  132. const char kAsanUnregisterImageGlobalsName[] =
  133. "__asan_unregister_image_globals";
  134. const char kAsanRegisterElfGlobalsName[] = "__asan_register_elf_globals";
  135. const char kAsanUnregisterElfGlobalsName[] = "__asan_unregister_elf_globals";
  136. const char kAsanPoisonGlobalsName[] = "__asan_before_dynamic_init";
  137. const char kAsanUnpoisonGlobalsName[] = "__asan_after_dynamic_init";
  138. const char kAsanInitName[] = "__asan_init";
  139. const char kAsanVersionCheckNamePrefix[] = "__asan_version_mismatch_check_v";
  140. const char kAsanPtrCmp[] = "__sanitizer_ptr_cmp";
  141. const char kAsanPtrSub[] = "__sanitizer_ptr_sub";
  142. const char kAsanHandleNoReturnName[] = "__asan_handle_no_return";
  143. static const int kMaxAsanStackMallocSizeClass = 10;
  144. const char kAsanStackMallocNameTemplate[] = "__asan_stack_malloc_";
  145. const char kAsanStackMallocAlwaysNameTemplate[] =
  146. "__asan_stack_malloc_always_";
  147. const char kAsanStackFreeNameTemplate[] = "__asan_stack_free_";
  148. const char kAsanGenPrefix[] = "___asan_gen_";
  149. const char kODRGenPrefix[] = "__odr_asan_gen_";
  150. const char kSanCovGenPrefix[] = "__sancov_gen_";
  151. const char kAsanSetShadowPrefix[] = "__asan_set_shadow_";
  152. const char kAsanPoisonStackMemoryName[] = "__asan_poison_stack_memory";
  153. const char kAsanUnpoisonStackMemoryName[] = "__asan_unpoison_stack_memory";
  154. // ASan version script has __asan_* wildcard. Triple underscore prevents a
  155. // linker (gold) warning about attempting to export a local symbol.
  156. const char kAsanGlobalsRegisteredFlagName[] = "___asan_globals_registered";
  157. const char kAsanOptionDetectUseAfterReturn[] =
  158. "__asan_option_detect_stack_use_after_return";
  159. const char kAsanShadowMemoryDynamicAddress[] =
  160. "__asan_shadow_memory_dynamic_address";
  161. const char kAsanAllocaPoison[] = "__asan_alloca_poison";
  162. const char kAsanAllocasUnpoison[] = "__asan_allocas_unpoison";
  163. const char kAMDGPUAddressSharedName[] = "llvm.amdgcn.is.shared";
  164. const char kAMDGPUAddressPrivateName[] = "llvm.amdgcn.is.private";
  165. // Accesses sizes are powers of two: 1, 2, 4, 8, 16.
  166. static const size_t kNumberOfAccessSizes = 5;
  167. static const uint64_t kAllocaRzSize = 32;
  168. // ASanAccessInfo implementation constants.
  169. constexpr size_t kCompileKernelShift = 0;
  170. constexpr size_t kCompileKernelMask = 0x1;
  171. constexpr size_t kAccessSizeIndexShift = 1;
  172. constexpr size_t kAccessSizeIndexMask = 0xf;
  173. constexpr size_t kIsWriteShift = 5;
  174. constexpr size_t kIsWriteMask = 0x1;
  175. // Command-line flags.
  176. static cl::opt<bool> ClEnableKasan(
  177. "asan-kernel", cl::desc("Enable KernelAddressSanitizer instrumentation"),
  178. cl::Hidden, cl::init(false));
  179. static cl::opt<bool> ClRecover(
  180. "asan-recover",
  181. cl::desc("Enable recovery mode (continue-after-error)."),
  182. cl::Hidden, cl::init(false));
  183. static cl::opt<bool> ClInsertVersionCheck(
  184. "asan-guard-against-version-mismatch",
  185. cl::desc("Guard against compiler/runtime version mismatch."),
  186. cl::Hidden, cl::init(true));
  187. // This flag may need to be replaced with -f[no-]asan-reads.
  188. static cl::opt<bool> ClInstrumentReads("asan-instrument-reads",
  189. cl::desc("instrument read instructions"),
  190. cl::Hidden, cl::init(true));
  191. static cl::opt<bool> ClInstrumentWrites(
  192. "asan-instrument-writes", cl::desc("instrument write instructions"),
  193. cl::Hidden, cl::init(true));
  194. static cl::opt<bool>
  195. ClUseStackSafety("asan-use-stack-safety", cl::Hidden, cl::init(false),
  196. cl::Hidden, cl::desc("Use Stack Safety analysis results"),
  197. cl::Optional);
  198. static cl::opt<bool> ClInstrumentAtomics(
  199. "asan-instrument-atomics",
  200. cl::desc("instrument atomic instructions (rmw, cmpxchg)"), cl::Hidden,
  201. cl::init(true));
  202. static cl::opt<bool>
  203. ClInstrumentByval("asan-instrument-byval",
  204. cl::desc("instrument byval call arguments"), cl::Hidden,
  205. cl::init(true));
  206. static cl::opt<bool> ClAlwaysSlowPath(
  207. "asan-always-slow-path",
  208. cl::desc("use instrumentation with slow path for all accesses"), cl::Hidden,
  209. cl::init(false));
  210. static cl::opt<bool> ClForceDynamicShadow(
  211. "asan-force-dynamic-shadow",
  212. cl::desc("Load shadow address into a local variable for each function"),
  213. cl::Hidden, cl::init(false));
  214. static cl::opt<bool>
  215. ClWithIfunc("asan-with-ifunc",
  216. cl::desc("Access dynamic shadow through an ifunc global on "
  217. "platforms that support this"),
  218. cl::Hidden, cl::init(true));
  219. static cl::opt<bool> ClWithIfuncSuppressRemat(
  220. "asan-with-ifunc-suppress-remat",
  221. cl::desc("Suppress rematerialization of dynamic shadow address by passing "
  222. "it through inline asm in prologue."),
  223. cl::Hidden, cl::init(true));
  224. // This flag limits the number of instructions to be instrumented
  225. // in any given BB. Normally, this should be set to unlimited (INT_MAX),
  226. // but due to http://llvm.org/bugs/show_bug.cgi?id=12652 we temporary
  227. // set it to 10000.
  228. static cl::opt<int> ClMaxInsnsToInstrumentPerBB(
  229. "asan-max-ins-per-bb", cl::init(10000),
  230. cl::desc("maximal number of instructions to instrument in any given BB"),
  231. cl::Hidden);
  232. // This flag may need to be replaced with -f[no]asan-stack.
  233. static cl::opt<bool> ClStack("asan-stack", cl::desc("Handle stack memory"),
  234. cl::Hidden, cl::init(true));
  235. static cl::opt<uint32_t> ClMaxInlinePoisoningSize(
  236. "asan-max-inline-poisoning-size",
  237. cl::desc(
  238. "Inline shadow poisoning for blocks up to the given size in bytes."),
  239. cl::Hidden, cl::init(64));
  240. static cl::opt<AsanDetectStackUseAfterReturnMode> ClUseAfterReturn(
  241. "asan-use-after-return",
  242. cl::desc("Sets the mode of detection for stack-use-after-return."),
  243. cl::values(
  244. clEnumValN(AsanDetectStackUseAfterReturnMode::Never, "never",
  245. "Never detect stack use after return."),
  246. clEnumValN(
  247. AsanDetectStackUseAfterReturnMode::Runtime, "runtime",
  248. "Detect stack use after return if "
  249. "binary flag 'ASAN_OPTIONS=detect_stack_use_after_return' is set."),
  250. clEnumValN(AsanDetectStackUseAfterReturnMode::Always, "always",
  251. "Always detect stack use after return.")),
  252. cl::Hidden, cl::init(AsanDetectStackUseAfterReturnMode::Runtime));
  253. static cl::opt<bool> ClRedzoneByvalArgs("asan-redzone-byval-args",
  254. cl::desc("Create redzones for byval "
  255. "arguments (extra copy "
  256. "required)"), cl::Hidden,
  257. cl::init(true));
  258. static cl::opt<bool> ClUseAfterScope("asan-use-after-scope",
  259. cl::desc("Check stack-use-after-scope"),
  260. cl::Hidden, cl::init(false));
  261. // This flag may need to be replaced with -f[no]asan-globals.
  262. static cl::opt<bool> ClGlobals("asan-globals",
  263. cl::desc("Handle global objects"), cl::Hidden,
  264. cl::init(true));
  265. static cl::opt<bool> ClInitializers("asan-initialization-order",
  266. cl::desc("Handle C++ initializer order"),
  267. cl::Hidden, cl::init(true));
  268. static cl::opt<bool> ClInvalidPointerPairs(
  269. "asan-detect-invalid-pointer-pair",
  270. cl::desc("Instrument <, <=, >, >=, - with pointer operands"), cl::Hidden,
  271. cl::init(false));
  272. static cl::opt<bool> ClInvalidPointerCmp(
  273. "asan-detect-invalid-pointer-cmp",
  274. cl::desc("Instrument <, <=, >, >= with pointer operands"), cl::Hidden,
  275. cl::init(false));
  276. static cl::opt<bool> ClInvalidPointerSub(
  277. "asan-detect-invalid-pointer-sub",
  278. cl::desc("Instrument - operations with pointer operands"), cl::Hidden,
  279. cl::init(false));
  280. static cl::opt<unsigned> ClRealignStack(
  281. "asan-realign-stack",
  282. cl::desc("Realign stack to the value of this flag (power of two)"),
  283. cl::Hidden, cl::init(32));
  284. static cl::opt<int> ClInstrumentationWithCallsThreshold(
  285. "asan-instrumentation-with-call-threshold",
  286. cl::desc(
  287. "If the function being instrumented contains more than "
  288. "this number of memory accesses, use callbacks instead of "
  289. "inline checks (-1 means never use callbacks)."),
  290. cl::Hidden, cl::init(7000));
  291. static cl::opt<std::string> ClMemoryAccessCallbackPrefix(
  292. "asan-memory-access-callback-prefix",
  293. cl::desc("Prefix for memory access callbacks"), cl::Hidden,
  294. cl::init("__asan_"));
  295. static cl::opt<bool> ClKasanMemIntrinCallbackPrefix(
  296. "asan-kernel-mem-intrinsic-prefix",
  297. cl::desc("Use prefix for memory intrinsics in KASAN mode"), cl::Hidden,
  298. cl::init(false));
  299. static cl::opt<bool>
  300. ClInstrumentDynamicAllocas("asan-instrument-dynamic-allocas",
  301. cl::desc("instrument dynamic allocas"),
  302. cl::Hidden, cl::init(true));
  303. static cl::opt<bool> ClSkipPromotableAllocas(
  304. "asan-skip-promotable-allocas",
  305. cl::desc("Do not instrument promotable allocas"), cl::Hidden,
  306. cl::init(true));
  307. static cl::opt<AsanCtorKind> ClConstructorKind(
  308. "asan-constructor-kind",
  309. cl::desc("Sets the ASan constructor kind"),
  310. cl::values(clEnumValN(AsanCtorKind::None, "none", "No constructors"),
  311. clEnumValN(AsanCtorKind::Global, "global",
  312. "Use global constructors")),
  313. cl::init(AsanCtorKind::Global), cl::Hidden);
  314. // These flags allow to change the shadow mapping.
  315. // The shadow mapping looks like
  316. // Shadow = (Mem >> scale) + offset
  317. static cl::opt<int> ClMappingScale("asan-mapping-scale",
  318. cl::desc("scale of asan shadow mapping"),
  319. cl::Hidden, cl::init(0));
  320. static cl::opt<uint64_t>
  321. ClMappingOffset("asan-mapping-offset",
  322. cl::desc("offset of asan shadow mapping [EXPERIMENTAL]"),
  323. cl::Hidden, cl::init(0));
  324. // Optimization flags. Not user visible, used mostly for testing
  325. // and benchmarking the tool.
  326. static cl::opt<bool> ClOpt("asan-opt", cl::desc("Optimize instrumentation"),
  327. cl::Hidden, cl::init(true));
  328. static cl::opt<bool> ClOptimizeCallbacks("asan-optimize-callbacks",
  329. cl::desc("Optimize callbacks"),
  330. cl::Hidden, cl::init(false));
  331. static cl::opt<bool> ClOptSameTemp(
  332. "asan-opt-same-temp", cl::desc("Instrument the same temp just once"),
  333. cl::Hidden, cl::init(true));
  334. static cl::opt<bool> ClOptGlobals("asan-opt-globals",
  335. cl::desc("Don't instrument scalar globals"),
  336. cl::Hidden, cl::init(true));
  337. static cl::opt<bool> ClOptStack(
  338. "asan-opt-stack", cl::desc("Don't instrument scalar stack variables"),
  339. cl::Hidden, cl::init(false));
  340. static cl::opt<bool> ClDynamicAllocaStack(
  341. "asan-stack-dynamic-alloca",
  342. cl::desc("Use dynamic alloca to represent stack variables"), cl::Hidden,
  343. cl::init(true));
  344. static cl::opt<uint32_t> ClForceExperiment(
  345. "asan-force-experiment",
  346. cl::desc("Force optimization experiment (for testing)"), cl::Hidden,
  347. cl::init(0));
  348. static cl::opt<bool>
  349. ClUsePrivateAlias("asan-use-private-alias",
  350. cl::desc("Use private aliases for global variables"),
  351. cl::Hidden, cl::init(true));
  352. static cl::opt<bool>
  353. ClUseOdrIndicator("asan-use-odr-indicator",
  354. cl::desc("Use odr indicators to improve ODR reporting"),
  355. cl::Hidden, cl::init(true));
  356. static cl::opt<bool>
  357. ClUseGlobalsGC("asan-globals-live-support",
  358. cl::desc("Use linker features to support dead "
  359. "code stripping of globals"),
  360. cl::Hidden, cl::init(true));
  361. // This is on by default even though there is a bug in gold:
  362. // https://sourceware.org/bugzilla/show_bug.cgi?id=19002
  363. static cl::opt<bool>
  364. ClWithComdat("asan-with-comdat",
  365. cl::desc("Place ASan constructors in comdat sections"),
  366. cl::Hidden, cl::init(true));
  367. static cl::opt<AsanDtorKind> ClOverrideDestructorKind(
  368. "asan-destructor-kind",
  369. cl::desc("Sets the ASan destructor kind. The default is to use the value "
  370. "provided to the pass constructor"),
  371. cl::values(clEnumValN(AsanDtorKind::None, "none", "No destructors"),
  372. clEnumValN(AsanDtorKind::Global, "global",
  373. "Use global destructors")),
  374. cl::init(AsanDtorKind::Invalid), cl::Hidden);
  375. // Debug flags.
  376. static cl::opt<int> ClDebug("asan-debug", cl::desc("debug"), cl::Hidden,
  377. cl::init(0));
  378. static cl::opt<int> ClDebugStack("asan-debug-stack", cl::desc("debug stack"),
  379. cl::Hidden, cl::init(0));
  380. static cl::opt<std::string> ClDebugFunc("asan-debug-func", cl::Hidden,
  381. cl::desc("Debug func"));
  382. static cl::opt<int> ClDebugMin("asan-debug-min", cl::desc("Debug min inst"),
  383. cl::Hidden, cl::init(-1));
  384. static cl::opt<int> ClDebugMax("asan-debug-max", cl::desc("Debug max inst"),
  385. cl::Hidden, cl::init(-1));
  386. STATISTIC(NumInstrumentedReads, "Number of instrumented reads");
  387. STATISTIC(NumInstrumentedWrites, "Number of instrumented writes");
  388. STATISTIC(NumOptimizedAccessesToGlobalVar,
  389. "Number of optimized accesses to global vars");
  390. STATISTIC(NumOptimizedAccessesToStackVar,
  391. "Number of optimized accesses to stack vars");
  392. namespace {
  393. /// This struct defines the shadow mapping using the rule:
  394. /// shadow = (mem >> Scale) ADD-or-OR Offset.
  395. /// If InGlobal is true, then
  396. /// extern char __asan_shadow[];
  397. /// shadow = (mem >> Scale) + &__asan_shadow
  398. struct ShadowMapping {
  399. int Scale;
  400. uint64_t Offset;
  401. bool OrShadowOffset;
  402. bool InGlobal;
  403. };
  404. } // end anonymous namespace
  405. static ShadowMapping getShadowMapping(const Triple &TargetTriple, int LongSize,
  406. bool IsKasan) {
  407. bool IsAndroid = TargetTriple.isAndroid();
  408. bool IsIOS = TargetTriple.isiOS() || TargetTriple.isWatchOS() ||
  409. TargetTriple.isDriverKit();
  410. bool IsMacOS = TargetTriple.isMacOSX();
  411. bool IsFreeBSD = TargetTriple.isOSFreeBSD();
  412. bool IsNetBSD = TargetTriple.isOSNetBSD();
  413. bool IsPS = TargetTriple.isPS();
  414. bool IsLinux = TargetTriple.isOSLinux();
  415. bool IsPPC64 = TargetTriple.getArch() == Triple::ppc64 ||
  416. TargetTriple.getArch() == Triple::ppc64le;
  417. bool IsSystemZ = TargetTriple.getArch() == Triple::systemz;
  418. bool IsX86_64 = TargetTriple.getArch() == Triple::x86_64;
  419. bool IsMIPSN32ABI = TargetTriple.getEnvironment() == Triple::GNUABIN32;
  420. bool IsMIPS32 = TargetTriple.isMIPS32();
  421. bool IsMIPS64 = TargetTriple.isMIPS64();
  422. bool IsArmOrThumb = TargetTriple.isARM() || TargetTriple.isThumb();
  423. bool IsAArch64 = TargetTriple.getArch() == Triple::aarch64;
  424. bool IsLoongArch64 = TargetTriple.getArch() == Triple::loongarch64;
  425. bool IsRISCV64 = TargetTriple.getArch() == Triple::riscv64;
  426. bool IsWindows = TargetTriple.isOSWindows();
  427. bool IsFuchsia = TargetTriple.isOSFuchsia();
  428. bool IsEmscripten = TargetTriple.isOSEmscripten();
  429. bool IsAMDGPU = TargetTriple.isAMDGPU();
  430. ShadowMapping Mapping;
  431. Mapping.Scale = kDefaultShadowScale;
  432. if (ClMappingScale.getNumOccurrences() > 0) {
  433. Mapping.Scale = ClMappingScale;
  434. }
  435. if (LongSize == 32) {
  436. if (IsAndroid)
  437. Mapping.Offset = kDynamicShadowSentinel;
  438. else if (IsMIPSN32ABI)
  439. Mapping.Offset = kMIPS_ShadowOffsetN32;
  440. else if (IsMIPS32)
  441. Mapping.Offset = kMIPS32_ShadowOffset32;
  442. else if (IsFreeBSD)
  443. Mapping.Offset = kFreeBSD_ShadowOffset32;
  444. else if (IsNetBSD)
  445. Mapping.Offset = kNetBSD_ShadowOffset32;
  446. else if (IsIOS)
  447. Mapping.Offset = kDynamicShadowSentinel;
  448. else if (IsWindows)
  449. Mapping.Offset = kWindowsShadowOffset32;
  450. else if (IsEmscripten)
  451. Mapping.Offset = kEmscriptenShadowOffset;
  452. else
  453. Mapping.Offset = kDefaultShadowOffset32;
  454. } else { // LongSize == 64
  455. // Fuchsia is always PIE, which means that the beginning of the address
  456. // space is always available.
  457. if (IsFuchsia)
  458. Mapping.Offset = 0;
  459. else if (IsPPC64)
  460. Mapping.Offset = kPPC64_ShadowOffset64;
  461. else if (IsSystemZ)
  462. Mapping.Offset = kSystemZ_ShadowOffset64;
  463. else if (IsFreeBSD && IsAArch64)
  464. Mapping.Offset = kFreeBSDAArch64_ShadowOffset64;
  465. else if (IsFreeBSD && !IsMIPS64) {
  466. if (IsKasan)
  467. Mapping.Offset = kFreeBSDKasan_ShadowOffset64;
  468. else
  469. Mapping.Offset = kFreeBSD_ShadowOffset64;
  470. } else if (IsNetBSD) {
  471. if (IsKasan)
  472. Mapping.Offset = kNetBSDKasan_ShadowOffset64;
  473. else
  474. Mapping.Offset = kNetBSD_ShadowOffset64;
  475. } else if (IsPS)
  476. Mapping.Offset = kPS_ShadowOffset64;
  477. else if (IsLinux && IsX86_64) {
  478. if (IsKasan)
  479. Mapping.Offset = kLinuxKasan_ShadowOffset64;
  480. else
  481. Mapping.Offset = (kSmallX86_64ShadowOffsetBase &
  482. (kSmallX86_64ShadowOffsetAlignMask << Mapping.Scale));
  483. } else if (IsWindows && IsX86_64) {
  484. Mapping.Offset = kWindowsShadowOffset64;
  485. } else if (IsMIPS64)
  486. Mapping.Offset = kMIPS64_ShadowOffset64;
  487. else if (IsIOS)
  488. Mapping.Offset = kDynamicShadowSentinel;
  489. else if (IsMacOS && IsAArch64)
  490. Mapping.Offset = kDynamicShadowSentinel;
  491. else if (IsAArch64)
  492. Mapping.Offset = kAArch64_ShadowOffset64;
  493. else if (IsLoongArch64)
  494. Mapping.Offset = kLoongArch64_ShadowOffset64;
  495. else if (IsRISCV64)
  496. Mapping.Offset = kRISCV64_ShadowOffset64;
  497. else if (IsAMDGPU)
  498. Mapping.Offset = (kSmallX86_64ShadowOffsetBase &
  499. (kSmallX86_64ShadowOffsetAlignMask << Mapping.Scale));
  500. else
  501. Mapping.Offset = kDefaultShadowOffset64;
  502. }
  503. if (ClForceDynamicShadow) {
  504. Mapping.Offset = kDynamicShadowSentinel;
  505. }
  506. if (ClMappingOffset.getNumOccurrences() > 0) {
  507. Mapping.Offset = ClMappingOffset;
  508. }
  509. // OR-ing shadow offset if more efficient (at least on x86) if the offset
  510. // is a power of two, but on ppc64 and loongarch64 we have to use add since
  511. // the shadow offset is not necessarily 1/8-th of the address space. On
  512. // SystemZ, we could OR the constant in a single instruction, but it's more
  513. // efficient to load it once and use indexed addressing.
  514. Mapping.OrShadowOffset = !IsAArch64 && !IsPPC64 && !IsSystemZ && !IsPS &&
  515. !IsRISCV64 && !IsLoongArch64 &&
  516. !(Mapping.Offset & (Mapping.Offset - 1)) &&
  517. Mapping.Offset != kDynamicShadowSentinel;
  518. bool IsAndroidWithIfuncSupport =
  519. IsAndroid && !TargetTriple.isAndroidVersionLT(21);
  520. Mapping.InGlobal = ClWithIfunc && IsAndroidWithIfuncSupport && IsArmOrThumb;
  521. return Mapping;
  522. }
  523. namespace llvm {
  524. void getAddressSanitizerParams(const Triple &TargetTriple, int LongSize,
  525. bool IsKasan, uint64_t *ShadowBase,
  526. int *MappingScale, bool *OrShadowOffset) {
  527. auto Mapping = getShadowMapping(TargetTriple, LongSize, IsKasan);
  528. *ShadowBase = Mapping.Offset;
  529. *MappingScale = Mapping.Scale;
  530. *OrShadowOffset = Mapping.OrShadowOffset;
  531. }
  532. ASanAccessInfo::ASanAccessInfo(int32_t Packed)
  533. : Packed(Packed),
  534. AccessSizeIndex((Packed >> kAccessSizeIndexShift) & kAccessSizeIndexMask),
  535. IsWrite((Packed >> kIsWriteShift) & kIsWriteMask),
  536. CompileKernel((Packed >> kCompileKernelShift) & kCompileKernelMask) {}
  537. ASanAccessInfo::ASanAccessInfo(bool IsWrite, bool CompileKernel,
  538. uint8_t AccessSizeIndex)
  539. : Packed((IsWrite << kIsWriteShift) +
  540. (CompileKernel << kCompileKernelShift) +
  541. (AccessSizeIndex << kAccessSizeIndexShift)),
  542. AccessSizeIndex(AccessSizeIndex), IsWrite(IsWrite),
  543. CompileKernel(CompileKernel) {}
  544. } // namespace llvm
  545. static uint64_t getRedzoneSizeForScale(int MappingScale) {
  546. // Redzone used for stack and globals is at least 32 bytes.
  547. // For scales 6 and 7, the redzone has to be 64 and 128 bytes respectively.
  548. return std::max(32U, 1U << MappingScale);
  549. }
  550. static uint64_t GetCtorAndDtorPriority(Triple &TargetTriple) {
  551. if (TargetTriple.isOSEmscripten()) {
  552. return kAsanEmscriptenCtorAndDtorPriority;
  553. } else {
  554. return kAsanCtorAndDtorPriority;
  555. }
  556. }
  557. namespace {
  558. /// AddressSanitizer: instrument the code in module to find memory bugs.
  559. struct AddressSanitizer {
  560. AddressSanitizer(Module &M, const StackSafetyGlobalInfo *SSGI,
  561. bool CompileKernel = false, bool Recover = false,
  562. bool UseAfterScope = false,
  563. AsanDetectStackUseAfterReturnMode UseAfterReturn =
  564. AsanDetectStackUseAfterReturnMode::Runtime)
  565. : CompileKernel(ClEnableKasan.getNumOccurrences() > 0 ? ClEnableKasan
  566. : CompileKernel),
  567. Recover(ClRecover.getNumOccurrences() > 0 ? ClRecover : Recover),
  568. UseAfterScope(UseAfterScope || ClUseAfterScope),
  569. UseAfterReturn(ClUseAfterReturn.getNumOccurrences() ? ClUseAfterReturn
  570. : UseAfterReturn),
  571. SSGI(SSGI) {
  572. C = &(M.getContext());
  573. LongSize = M.getDataLayout().getPointerSizeInBits();
  574. IntptrTy = Type::getIntNTy(*C, LongSize);
  575. Int8PtrTy = Type::getInt8PtrTy(*C);
  576. Int32Ty = Type::getInt32Ty(*C);
  577. TargetTriple = Triple(M.getTargetTriple());
  578. Mapping = getShadowMapping(TargetTriple, LongSize, this->CompileKernel);
  579. assert(this->UseAfterReturn != AsanDetectStackUseAfterReturnMode::Invalid);
  580. }
  581. uint64_t getAllocaSizeInBytes(const AllocaInst &AI) const {
  582. uint64_t ArraySize = 1;
  583. if (AI.isArrayAllocation()) {
  584. const ConstantInt *CI = dyn_cast<ConstantInt>(AI.getArraySize());
  585. assert(CI && "non-constant array size");
  586. ArraySize = CI->getZExtValue();
  587. }
  588. Type *Ty = AI.getAllocatedType();
  589. uint64_t SizeInBytes =
  590. AI.getModule()->getDataLayout().getTypeAllocSize(Ty);
  591. return SizeInBytes * ArraySize;
  592. }
  593. /// Check if we want (and can) handle this alloca.
  594. bool isInterestingAlloca(const AllocaInst &AI);
  595. bool ignoreAccess(Instruction *Inst, Value *Ptr);
  596. void getInterestingMemoryOperands(
  597. Instruction *I, SmallVectorImpl<InterestingMemoryOperand> &Interesting);
  598. void instrumentMop(ObjectSizeOffsetVisitor &ObjSizeVis,
  599. InterestingMemoryOperand &O, bool UseCalls,
  600. const DataLayout &DL);
  601. void instrumentPointerComparisonOrSubtraction(Instruction *I);
  602. void instrumentAddress(Instruction *OrigIns, Instruction *InsertBefore,
  603. Value *Addr, uint32_t TypeSize, bool IsWrite,
  604. Value *SizeArgument, bool UseCalls, uint32_t Exp);
  605. Instruction *instrumentAMDGPUAddress(Instruction *OrigIns,
  606. Instruction *InsertBefore, Value *Addr,
  607. uint32_t TypeSize, bool IsWrite,
  608. Value *SizeArgument);
  609. void instrumentUnusualSizeOrAlignment(Instruction *I,
  610. Instruction *InsertBefore, Value *Addr,
  611. uint32_t TypeSize, bool IsWrite,
  612. Value *SizeArgument, bool UseCalls,
  613. uint32_t Exp);
  614. Value *createSlowPathCmp(IRBuilder<> &IRB, Value *AddrLong,
  615. Value *ShadowValue, uint32_t TypeSize);
  616. Instruction *generateCrashCode(Instruction *InsertBefore, Value *Addr,
  617. bool IsWrite, size_t AccessSizeIndex,
  618. Value *SizeArgument, uint32_t Exp);
  619. void instrumentMemIntrinsic(MemIntrinsic *MI);
  620. Value *memToShadow(Value *Shadow, IRBuilder<> &IRB);
  621. bool suppressInstrumentationSiteForDebug(int &Instrumented);
  622. bool instrumentFunction(Function &F, const TargetLibraryInfo *TLI);
  623. bool maybeInsertAsanInitAtFunctionEntry(Function &F);
  624. bool maybeInsertDynamicShadowAtFunctionEntry(Function &F);
  625. void markEscapedLocalAllocas(Function &F);
  626. private:
  627. friend struct FunctionStackPoisoner;
  628. void initializeCallbacks(Module &M, const TargetLibraryInfo *TLI);
  629. bool LooksLikeCodeInBug11395(Instruction *I);
  630. bool GlobalIsLinkerInitialized(GlobalVariable *G);
  631. bool isSafeAccess(ObjectSizeOffsetVisitor &ObjSizeVis, Value *Addr,
  632. uint64_t TypeSize) const;
  633. /// Helper to cleanup per-function state.
  634. struct FunctionStateRAII {
  635. AddressSanitizer *Pass;
  636. FunctionStateRAII(AddressSanitizer *Pass) : Pass(Pass) {
  637. assert(Pass->ProcessedAllocas.empty() &&
  638. "last pass forgot to clear cache");
  639. assert(!Pass->LocalDynamicShadow);
  640. }
  641. ~FunctionStateRAII() {
  642. Pass->LocalDynamicShadow = nullptr;
  643. Pass->ProcessedAllocas.clear();
  644. }
  645. };
  646. LLVMContext *C;
  647. Triple TargetTriple;
  648. int LongSize;
  649. bool CompileKernel;
  650. bool Recover;
  651. bool UseAfterScope;
  652. AsanDetectStackUseAfterReturnMode UseAfterReturn;
  653. Type *IntptrTy;
  654. Type *Int8PtrTy;
  655. Type *Int32Ty;
  656. ShadowMapping Mapping;
  657. FunctionCallee AsanHandleNoReturnFunc;
  658. FunctionCallee AsanPtrCmpFunction, AsanPtrSubFunction;
  659. Constant *AsanShadowGlobal;
  660. // These arrays is indexed by AccessIsWrite, Experiment and log2(AccessSize).
  661. FunctionCallee AsanErrorCallback[2][2][kNumberOfAccessSizes];
  662. FunctionCallee AsanMemoryAccessCallback[2][2][kNumberOfAccessSizes];
  663. // These arrays is indexed by AccessIsWrite and Experiment.
  664. FunctionCallee AsanErrorCallbackSized[2][2];
  665. FunctionCallee AsanMemoryAccessCallbackSized[2][2];
  666. FunctionCallee AsanMemmove, AsanMemcpy, AsanMemset;
  667. Value *LocalDynamicShadow = nullptr;
  668. const StackSafetyGlobalInfo *SSGI;
  669. DenseMap<const AllocaInst *, bool> ProcessedAllocas;
  670. FunctionCallee AMDGPUAddressShared;
  671. FunctionCallee AMDGPUAddressPrivate;
  672. };
  673. class ModuleAddressSanitizer {
  674. public:
  675. ModuleAddressSanitizer(Module &M, bool CompileKernel = false,
  676. bool Recover = false, bool UseGlobalsGC = true,
  677. bool UseOdrIndicator = true,
  678. AsanDtorKind DestructorKind = AsanDtorKind::Global,
  679. AsanCtorKind ConstructorKind = AsanCtorKind::Global)
  680. : CompileKernel(ClEnableKasan.getNumOccurrences() > 0 ? ClEnableKasan
  681. : CompileKernel),
  682. Recover(ClRecover.getNumOccurrences() > 0 ? ClRecover : Recover),
  683. UseGlobalsGC(UseGlobalsGC && ClUseGlobalsGC && !this->CompileKernel),
  684. // Enable aliases as they should have no downside with ODR indicators.
  685. UsePrivateAlias(ClUsePrivateAlias.getNumOccurrences() > 0
  686. ? ClUsePrivateAlias
  687. : UseOdrIndicator),
  688. UseOdrIndicator(ClUseOdrIndicator.getNumOccurrences() > 0
  689. ? ClUseOdrIndicator
  690. : UseOdrIndicator),
  691. // Not a typo: ClWithComdat is almost completely pointless without
  692. // ClUseGlobalsGC (because then it only works on modules without
  693. // globals, which are rare); it is a prerequisite for ClUseGlobalsGC;
  694. // and both suffer from gold PR19002 for which UseGlobalsGC constructor
  695. // argument is designed as workaround. Therefore, disable both
  696. // ClWithComdat and ClUseGlobalsGC unless the frontend says it's ok to
  697. // do globals-gc.
  698. UseCtorComdat(UseGlobalsGC && ClWithComdat && !this->CompileKernel),
  699. DestructorKind(DestructorKind),
  700. ConstructorKind(ConstructorKind) {
  701. C = &(M.getContext());
  702. int LongSize = M.getDataLayout().getPointerSizeInBits();
  703. IntptrTy = Type::getIntNTy(*C, LongSize);
  704. TargetTriple = Triple(M.getTargetTriple());
  705. Mapping = getShadowMapping(TargetTriple, LongSize, this->CompileKernel);
  706. if (ClOverrideDestructorKind != AsanDtorKind::Invalid)
  707. this->DestructorKind = ClOverrideDestructorKind;
  708. assert(this->DestructorKind != AsanDtorKind::Invalid);
  709. }
  710. bool instrumentModule(Module &);
  711. private:
  712. void initializeCallbacks(Module &M);
  713. bool InstrumentGlobals(IRBuilder<> &IRB, Module &M, bool *CtorComdat);
  714. void InstrumentGlobalsCOFF(IRBuilder<> &IRB, Module &M,
  715. ArrayRef<GlobalVariable *> ExtendedGlobals,
  716. ArrayRef<Constant *> MetadataInitializers);
  717. void InstrumentGlobalsELF(IRBuilder<> &IRB, Module &M,
  718. ArrayRef<GlobalVariable *> ExtendedGlobals,
  719. ArrayRef<Constant *> MetadataInitializers,
  720. const std::string &UniqueModuleId);
  721. void InstrumentGlobalsMachO(IRBuilder<> &IRB, Module &M,
  722. ArrayRef<GlobalVariable *> ExtendedGlobals,
  723. ArrayRef<Constant *> MetadataInitializers);
  724. void
  725. InstrumentGlobalsWithMetadataArray(IRBuilder<> &IRB, Module &M,
  726. ArrayRef<GlobalVariable *> ExtendedGlobals,
  727. ArrayRef<Constant *> MetadataInitializers);
  728. GlobalVariable *CreateMetadataGlobal(Module &M, Constant *Initializer,
  729. StringRef OriginalName);
  730. void SetComdatForGlobalMetadata(GlobalVariable *G, GlobalVariable *Metadata,
  731. StringRef InternalSuffix);
  732. Instruction *CreateAsanModuleDtor(Module &M);
  733. const GlobalVariable *getExcludedAliasedGlobal(const GlobalAlias &GA) const;
  734. bool shouldInstrumentGlobal(GlobalVariable *G) const;
  735. bool ShouldUseMachOGlobalsSection() const;
  736. StringRef getGlobalMetadataSection() const;
  737. void poisonOneInitializer(Function &GlobalInit, GlobalValue *ModuleName);
  738. void createInitializerPoisonCalls(Module &M, GlobalValue *ModuleName);
  739. uint64_t getMinRedzoneSizeForGlobal() const {
  740. return getRedzoneSizeForScale(Mapping.Scale);
  741. }
  742. uint64_t getRedzoneSizeForGlobal(uint64_t SizeInBytes) const;
  743. int GetAsanVersion(const Module &M) const;
  744. bool CompileKernel;
  745. bool Recover;
  746. bool UseGlobalsGC;
  747. bool UsePrivateAlias;
  748. bool UseOdrIndicator;
  749. bool UseCtorComdat;
  750. AsanDtorKind DestructorKind;
  751. AsanCtorKind ConstructorKind;
  752. Type *IntptrTy;
  753. LLVMContext *C;
  754. Triple TargetTriple;
  755. ShadowMapping Mapping;
  756. FunctionCallee AsanPoisonGlobals;
  757. FunctionCallee AsanUnpoisonGlobals;
  758. FunctionCallee AsanRegisterGlobals;
  759. FunctionCallee AsanUnregisterGlobals;
  760. FunctionCallee AsanRegisterImageGlobals;
  761. FunctionCallee AsanUnregisterImageGlobals;
  762. FunctionCallee AsanRegisterElfGlobals;
  763. FunctionCallee AsanUnregisterElfGlobals;
  764. Function *AsanCtorFunction = nullptr;
  765. Function *AsanDtorFunction = nullptr;
  766. };
  767. // Stack poisoning does not play well with exception handling.
  768. // When an exception is thrown, we essentially bypass the code
  769. // that unpoisones the stack. This is why the run-time library has
  770. // to intercept __cxa_throw (as well as longjmp, etc) and unpoison the entire
  771. // stack in the interceptor. This however does not work inside the
  772. // actual function which catches the exception. Most likely because the
  773. // compiler hoists the load of the shadow value somewhere too high.
  774. // This causes asan to report a non-existing bug on 453.povray.
  775. // It sounds like an LLVM bug.
  776. struct FunctionStackPoisoner : public InstVisitor<FunctionStackPoisoner> {
  777. Function &F;
  778. AddressSanitizer &ASan;
  779. DIBuilder DIB;
  780. LLVMContext *C;
  781. Type *IntptrTy;
  782. Type *IntptrPtrTy;
  783. ShadowMapping Mapping;
  784. SmallVector<AllocaInst *, 16> AllocaVec;
  785. SmallVector<AllocaInst *, 16> StaticAllocasToMoveUp;
  786. SmallVector<Instruction *, 8> RetVec;
  787. FunctionCallee AsanStackMallocFunc[kMaxAsanStackMallocSizeClass + 1],
  788. AsanStackFreeFunc[kMaxAsanStackMallocSizeClass + 1];
  789. FunctionCallee AsanSetShadowFunc[0x100] = {};
  790. FunctionCallee AsanPoisonStackMemoryFunc, AsanUnpoisonStackMemoryFunc;
  791. FunctionCallee AsanAllocaPoisonFunc, AsanAllocasUnpoisonFunc;
  792. // Stores a place and arguments of poisoning/unpoisoning call for alloca.
  793. struct AllocaPoisonCall {
  794. IntrinsicInst *InsBefore;
  795. AllocaInst *AI;
  796. uint64_t Size;
  797. bool DoPoison;
  798. };
  799. SmallVector<AllocaPoisonCall, 8> DynamicAllocaPoisonCallVec;
  800. SmallVector<AllocaPoisonCall, 8> StaticAllocaPoisonCallVec;
  801. bool HasUntracedLifetimeIntrinsic = false;
  802. SmallVector<AllocaInst *, 1> DynamicAllocaVec;
  803. SmallVector<IntrinsicInst *, 1> StackRestoreVec;
  804. AllocaInst *DynamicAllocaLayout = nullptr;
  805. IntrinsicInst *LocalEscapeCall = nullptr;
  806. bool HasInlineAsm = false;
  807. bool HasReturnsTwiceCall = false;
  808. bool PoisonStack;
  809. FunctionStackPoisoner(Function &F, AddressSanitizer &ASan)
  810. : F(F), ASan(ASan), DIB(*F.getParent(), /*AllowUnresolved*/ false),
  811. C(ASan.C), IntptrTy(ASan.IntptrTy),
  812. IntptrPtrTy(PointerType::get(IntptrTy, 0)), Mapping(ASan.Mapping),
  813. PoisonStack(ClStack &&
  814. !Triple(F.getParent()->getTargetTriple()).isAMDGPU()) {}
  815. bool runOnFunction() {
  816. if (!PoisonStack)
  817. return false;
  818. if (ClRedzoneByvalArgs)
  819. copyArgsPassedByValToAllocas();
  820. // Collect alloca, ret, lifetime instructions etc.
  821. for (BasicBlock *BB : depth_first(&F.getEntryBlock())) visit(*BB);
  822. if (AllocaVec.empty() && DynamicAllocaVec.empty()) return false;
  823. initializeCallbacks(*F.getParent());
  824. if (HasUntracedLifetimeIntrinsic) {
  825. // If there are lifetime intrinsics which couldn't be traced back to an
  826. // alloca, we may not know exactly when a variable enters scope, and
  827. // therefore should "fail safe" by not poisoning them.
  828. StaticAllocaPoisonCallVec.clear();
  829. DynamicAllocaPoisonCallVec.clear();
  830. }
  831. processDynamicAllocas();
  832. processStaticAllocas();
  833. if (ClDebugStack) {
  834. LLVM_DEBUG(dbgs() << F);
  835. }
  836. return true;
  837. }
  838. // Arguments marked with the "byval" attribute are implicitly copied without
  839. // using an alloca instruction. To produce redzones for those arguments, we
  840. // copy them a second time into memory allocated with an alloca instruction.
  841. void copyArgsPassedByValToAllocas();
  842. // Finds all Alloca instructions and puts
  843. // poisoned red zones around all of them.
  844. // Then unpoison everything back before the function returns.
  845. void processStaticAllocas();
  846. void processDynamicAllocas();
  847. void createDynamicAllocasInitStorage();
  848. // ----------------------- Visitors.
  849. /// Collect all Ret instructions, or the musttail call instruction if it
  850. /// precedes the return instruction.
  851. void visitReturnInst(ReturnInst &RI) {
  852. if (CallInst *CI = RI.getParent()->getTerminatingMustTailCall())
  853. RetVec.push_back(CI);
  854. else
  855. RetVec.push_back(&RI);
  856. }
  857. /// Collect all Resume instructions.
  858. void visitResumeInst(ResumeInst &RI) { RetVec.push_back(&RI); }
  859. /// Collect all CatchReturnInst instructions.
  860. void visitCleanupReturnInst(CleanupReturnInst &CRI) { RetVec.push_back(&CRI); }
  861. void unpoisonDynamicAllocasBeforeInst(Instruction *InstBefore,
  862. Value *SavedStack) {
  863. IRBuilder<> IRB(InstBefore);
  864. Value *DynamicAreaPtr = IRB.CreatePtrToInt(SavedStack, IntptrTy);
  865. // When we insert _asan_allocas_unpoison before @llvm.stackrestore, we
  866. // need to adjust extracted SP to compute the address of the most recent
  867. // alloca. We have a special @llvm.get.dynamic.area.offset intrinsic for
  868. // this purpose.
  869. if (!isa<ReturnInst>(InstBefore)) {
  870. Function *DynamicAreaOffsetFunc = Intrinsic::getDeclaration(
  871. InstBefore->getModule(), Intrinsic::get_dynamic_area_offset,
  872. {IntptrTy});
  873. Value *DynamicAreaOffset = IRB.CreateCall(DynamicAreaOffsetFunc, {});
  874. DynamicAreaPtr = IRB.CreateAdd(IRB.CreatePtrToInt(SavedStack, IntptrTy),
  875. DynamicAreaOffset);
  876. }
  877. IRB.CreateCall(
  878. AsanAllocasUnpoisonFunc,
  879. {IRB.CreateLoad(IntptrTy, DynamicAllocaLayout), DynamicAreaPtr});
  880. }
  881. // Unpoison dynamic allocas redzones.
  882. void unpoisonDynamicAllocas() {
  883. for (Instruction *Ret : RetVec)
  884. unpoisonDynamicAllocasBeforeInst(Ret, DynamicAllocaLayout);
  885. for (Instruction *StackRestoreInst : StackRestoreVec)
  886. unpoisonDynamicAllocasBeforeInst(StackRestoreInst,
  887. StackRestoreInst->getOperand(0));
  888. }
  889. // Deploy and poison redzones around dynamic alloca call. To do this, we
  890. // should replace this call with another one with changed parameters and
  891. // replace all its uses with new address, so
  892. // addr = alloca type, old_size, align
  893. // is replaced by
  894. // new_size = (old_size + additional_size) * sizeof(type)
  895. // tmp = alloca i8, new_size, max(align, 32)
  896. // addr = tmp + 32 (first 32 bytes are for the left redzone).
  897. // Additional_size is added to make new memory allocation contain not only
  898. // requested memory, but also left, partial and right redzones.
  899. void handleDynamicAllocaCall(AllocaInst *AI);
  900. /// Collect Alloca instructions we want (and can) handle.
  901. void visitAllocaInst(AllocaInst &AI) {
  902. if (!ASan.isInterestingAlloca(AI)) {
  903. if (AI.isStaticAlloca()) {
  904. // Skip over allocas that are present *before* the first instrumented
  905. // alloca, we don't want to move those around.
  906. if (AllocaVec.empty())
  907. return;
  908. StaticAllocasToMoveUp.push_back(&AI);
  909. }
  910. return;
  911. }
  912. if (!AI.isStaticAlloca())
  913. DynamicAllocaVec.push_back(&AI);
  914. else
  915. AllocaVec.push_back(&AI);
  916. }
  917. /// Collect lifetime intrinsic calls to check for use-after-scope
  918. /// errors.
  919. void visitIntrinsicInst(IntrinsicInst &II) {
  920. Intrinsic::ID ID = II.getIntrinsicID();
  921. if (ID == Intrinsic::stackrestore) StackRestoreVec.push_back(&II);
  922. if (ID == Intrinsic::localescape) LocalEscapeCall = &II;
  923. if (!ASan.UseAfterScope)
  924. return;
  925. if (!II.isLifetimeStartOrEnd())
  926. return;
  927. // Found lifetime intrinsic, add ASan instrumentation if necessary.
  928. auto *Size = cast<ConstantInt>(II.getArgOperand(0));
  929. // If size argument is undefined, don't do anything.
  930. if (Size->isMinusOne()) return;
  931. // Check that size doesn't saturate uint64_t and can
  932. // be stored in IntptrTy.
  933. const uint64_t SizeValue = Size->getValue().getLimitedValue();
  934. if (SizeValue == ~0ULL ||
  935. !ConstantInt::isValueValidForType(IntptrTy, SizeValue))
  936. return;
  937. // Find alloca instruction that corresponds to llvm.lifetime argument.
  938. // Currently we can only handle lifetime markers pointing to the
  939. // beginning of the alloca.
  940. AllocaInst *AI = findAllocaForValue(II.getArgOperand(1), true);
  941. if (!AI) {
  942. HasUntracedLifetimeIntrinsic = true;
  943. return;
  944. }
  945. // We're interested only in allocas we can handle.
  946. if (!ASan.isInterestingAlloca(*AI))
  947. return;
  948. bool DoPoison = (ID == Intrinsic::lifetime_end);
  949. AllocaPoisonCall APC = {&II, AI, SizeValue, DoPoison};
  950. if (AI->isStaticAlloca())
  951. StaticAllocaPoisonCallVec.push_back(APC);
  952. else if (ClInstrumentDynamicAllocas)
  953. DynamicAllocaPoisonCallVec.push_back(APC);
  954. }
  955. void visitCallBase(CallBase &CB) {
  956. if (CallInst *CI = dyn_cast<CallInst>(&CB)) {
  957. HasInlineAsm |= CI->isInlineAsm() && &CB != ASan.LocalDynamicShadow;
  958. HasReturnsTwiceCall |= CI->canReturnTwice();
  959. }
  960. }
  961. // ---------------------- Helpers.
  962. void initializeCallbacks(Module &M);
  963. // Copies bytes from ShadowBytes into shadow memory for indexes where
  964. // ShadowMask is not zero. If ShadowMask[i] is zero, we assume that
  965. // ShadowBytes[i] is constantly zero and doesn't need to be overwritten.
  966. void copyToShadow(ArrayRef<uint8_t> ShadowMask, ArrayRef<uint8_t> ShadowBytes,
  967. IRBuilder<> &IRB, Value *ShadowBase);
  968. void copyToShadow(ArrayRef<uint8_t> ShadowMask, ArrayRef<uint8_t> ShadowBytes,
  969. size_t Begin, size_t End, IRBuilder<> &IRB,
  970. Value *ShadowBase);
  971. void copyToShadowInline(ArrayRef<uint8_t> ShadowMask,
  972. ArrayRef<uint8_t> ShadowBytes, size_t Begin,
  973. size_t End, IRBuilder<> &IRB, Value *ShadowBase);
  974. void poisonAlloca(Value *V, uint64_t Size, IRBuilder<> &IRB, bool DoPoison);
  975. Value *createAllocaForLayout(IRBuilder<> &IRB, const ASanStackFrameLayout &L,
  976. bool Dynamic);
  977. PHINode *createPHI(IRBuilder<> &IRB, Value *Cond, Value *ValueIfTrue,
  978. Instruction *ThenTerm, Value *ValueIfFalse);
  979. };
  980. } // end anonymous namespace
  981. void AddressSanitizerPass::printPipeline(
  982. raw_ostream &OS, function_ref<StringRef(StringRef)> MapClassName2PassName) {
  983. static_cast<PassInfoMixin<AddressSanitizerPass> *>(this)->printPipeline(
  984. OS, MapClassName2PassName);
  985. OS << "<";
  986. if (Options.CompileKernel)
  987. OS << "kernel";
  988. OS << ">";
  989. }
  990. AddressSanitizerPass::AddressSanitizerPass(
  991. const AddressSanitizerOptions &Options, bool UseGlobalGC,
  992. bool UseOdrIndicator, AsanDtorKind DestructorKind,
  993. AsanCtorKind ConstructorKind)
  994. : Options(Options), UseGlobalGC(UseGlobalGC),
  995. UseOdrIndicator(UseOdrIndicator), DestructorKind(DestructorKind),
  996. ConstructorKind(ClConstructorKind) {}
  997. PreservedAnalyses AddressSanitizerPass::run(Module &M,
  998. ModuleAnalysisManager &MAM) {
  999. ModuleAddressSanitizer ModuleSanitizer(M, Options.CompileKernel,
  1000. Options.Recover, UseGlobalGC,
  1001. UseOdrIndicator, DestructorKind,
  1002. ConstructorKind);
  1003. bool Modified = false;
  1004. auto &FAM = MAM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
  1005. const StackSafetyGlobalInfo *const SSGI =
  1006. ClUseStackSafety ? &MAM.getResult<StackSafetyGlobalAnalysis>(M) : nullptr;
  1007. for (Function &F : M) {
  1008. AddressSanitizer FunctionSanitizer(M, SSGI, Options.CompileKernel,
  1009. Options.Recover, Options.UseAfterScope,
  1010. Options.UseAfterReturn);
  1011. const TargetLibraryInfo &TLI = FAM.getResult<TargetLibraryAnalysis>(F);
  1012. Modified |= FunctionSanitizer.instrumentFunction(F, &TLI);
  1013. }
  1014. Modified |= ModuleSanitizer.instrumentModule(M);
  1015. if (!Modified)
  1016. return PreservedAnalyses::all();
  1017. PreservedAnalyses PA = PreservedAnalyses::none();
  1018. // GlobalsAA is considered stateless and does not get invalidated unless
  1019. // explicitly invalidated; PreservedAnalyses::none() is not enough. Sanitizers
  1020. // make changes that require GlobalsAA to be invalidated.
  1021. PA.abandon<GlobalsAA>();
  1022. return PA;
  1023. }
  1024. static size_t TypeSizeToSizeIndex(uint32_t TypeSize) {
  1025. size_t Res = countTrailingZeros(TypeSize / 8);
  1026. assert(Res < kNumberOfAccessSizes);
  1027. return Res;
  1028. }
  1029. /// Check if \p G has been created by a trusted compiler pass.
  1030. static bool GlobalWasGeneratedByCompiler(GlobalVariable *G) {
  1031. // Do not instrument @llvm.global_ctors, @llvm.used, etc.
  1032. if (G->getName().startswith("llvm.") ||
  1033. // Do not instrument gcov counter arrays.
  1034. G->getName().startswith("__llvm_gcov_ctr") ||
  1035. // Do not instrument rtti proxy symbols for function sanitizer.
  1036. G->getName().startswith("__llvm_rtti_proxy"))
  1037. return true;
  1038. // Do not instrument asan globals.
  1039. if (G->getName().startswith(kAsanGenPrefix) ||
  1040. G->getName().startswith(kSanCovGenPrefix) ||
  1041. G->getName().startswith(kODRGenPrefix))
  1042. return true;
  1043. return false;
  1044. }
  1045. static bool isUnsupportedAMDGPUAddrspace(Value *Addr) {
  1046. Type *PtrTy = cast<PointerType>(Addr->getType()->getScalarType());
  1047. unsigned int AddrSpace = PtrTy->getPointerAddressSpace();
  1048. if (AddrSpace == 3 || AddrSpace == 5)
  1049. return true;
  1050. return false;
  1051. }
  1052. Value *AddressSanitizer::memToShadow(Value *Shadow, IRBuilder<> &IRB) {
  1053. // Shadow >> scale
  1054. Shadow = IRB.CreateLShr(Shadow, Mapping.Scale);
  1055. if (Mapping.Offset == 0) return Shadow;
  1056. // (Shadow >> scale) | offset
  1057. Value *ShadowBase;
  1058. if (LocalDynamicShadow)
  1059. ShadowBase = LocalDynamicShadow;
  1060. else
  1061. ShadowBase = ConstantInt::get(IntptrTy, Mapping.Offset);
  1062. if (Mapping.OrShadowOffset)
  1063. return IRB.CreateOr(Shadow, ShadowBase);
  1064. else
  1065. return IRB.CreateAdd(Shadow, ShadowBase);
  1066. }
  1067. // Instrument memset/memmove/memcpy
  1068. void AddressSanitizer::instrumentMemIntrinsic(MemIntrinsic *MI) {
  1069. IRBuilder<> IRB(MI);
  1070. if (isa<MemTransferInst>(MI)) {
  1071. IRB.CreateCall(
  1072. isa<MemMoveInst>(MI) ? AsanMemmove : AsanMemcpy,
  1073. {IRB.CreatePointerCast(MI->getOperand(0), IRB.getInt8PtrTy()),
  1074. IRB.CreatePointerCast(MI->getOperand(1), IRB.getInt8PtrTy()),
  1075. IRB.CreateIntCast(MI->getOperand(2), IntptrTy, false)});
  1076. } else if (isa<MemSetInst>(MI)) {
  1077. IRB.CreateCall(
  1078. AsanMemset,
  1079. {IRB.CreatePointerCast(MI->getOperand(0), IRB.getInt8PtrTy()),
  1080. IRB.CreateIntCast(MI->getOperand(1), IRB.getInt32Ty(), false),
  1081. IRB.CreateIntCast(MI->getOperand(2), IntptrTy, false)});
  1082. }
  1083. MI->eraseFromParent();
  1084. }
  1085. /// Check if we want (and can) handle this alloca.
  1086. bool AddressSanitizer::isInterestingAlloca(const AllocaInst &AI) {
  1087. auto PreviouslySeenAllocaInfo = ProcessedAllocas.find(&AI);
  1088. if (PreviouslySeenAllocaInfo != ProcessedAllocas.end())
  1089. return PreviouslySeenAllocaInfo->getSecond();
  1090. bool IsInteresting =
  1091. (AI.getAllocatedType()->isSized() &&
  1092. // alloca() may be called with 0 size, ignore it.
  1093. ((!AI.isStaticAlloca()) || getAllocaSizeInBytes(AI) > 0) &&
  1094. // We are only interested in allocas not promotable to registers.
  1095. // Promotable allocas are common under -O0.
  1096. (!ClSkipPromotableAllocas || !isAllocaPromotable(&AI)) &&
  1097. // inalloca allocas are not treated as static, and we don't want
  1098. // dynamic alloca instrumentation for them as well.
  1099. !AI.isUsedWithInAlloca() &&
  1100. // swifterror allocas are register promoted by ISel
  1101. !AI.isSwiftError() &&
  1102. // safe allocas are not interesting
  1103. !(SSGI && SSGI->isSafe(AI)));
  1104. ProcessedAllocas[&AI] = IsInteresting;
  1105. return IsInteresting;
  1106. }
  1107. bool AddressSanitizer::ignoreAccess(Instruction *Inst, Value *Ptr) {
  1108. // Instrument accesses from different address spaces only for AMDGPU.
  1109. Type *PtrTy = cast<PointerType>(Ptr->getType()->getScalarType());
  1110. if (PtrTy->getPointerAddressSpace() != 0 &&
  1111. !(TargetTriple.isAMDGPU() && !isUnsupportedAMDGPUAddrspace(Ptr)))
  1112. return true;
  1113. // Ignore swifterror addresses.
  1114. // swifterror memory addresses are mem2reg promoted by instruction
  1115. // selection. As such they cannot have regular uses like an instrumentation
  1116. // function and it makes no sense to track them as memory.
  1117. if (Ptr->isSwiftError())
  1118. return true;
  1119. // Treat memory accesses to promotable allocas as non-interesting since they
  1120. // will not cause memory violations. This greatly speeds up the instrumented
  1121. // executable at -O0.
  1122. if (auto AI = dyn_cast_or_null<AllocaInst>(Ptr))
  1123. if (ClSkipPromotableAllocas && !isInterestingAlloca(*AI))
  1124. return true;
  1125. if (SSGI != nullptr && SSGI->stackAccessIsSafe(*Inst) &&
  1126. findAllocaForValue(Ptr))
  1127. return true;
  1128. return false;
  1129. }
  1130. void AddressSanitizer::getInterestingMemoryOperands(
  1131. Instruction *I, SmallVectorImpl<InterestingMemoryOperand> &Interesting) {
  1132. // Do not instrument the load fetching the dynamic shadow address.
  1133. if (LocalDynamicShadow == I)
  1134. return;
  1135. if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
  1136. if (!ClInstrumentReads || ignoreAccess(I, LI->getPointerOperand()))
  1137. return;
  1138. Interesting.emplace_back(I, LI->getPointerOperandIndex(), false,
  1139. LI->getType(), LI->getAlign());
  1140. } else if (StoreInst *SI = dyn_cast<StoreInst>(I)) {
  1141. if (!ClInstrumentWrites || ignoreAccess(I, SI->getPointerOperand()))
  1142. return;
  1143. Interesting.emplace_back(I, SI->getPointerOperandIndex(), true,
  1144. SI->getValueOperand()->getType(), SI->getAlign());
  1145. } else if (AtomicRMWInst *RMW = dyn_cast<AtomicRMWInst>(I)) {
  1146. if (!ClInstrumentAtomics || ignoreAccess(I, RMW->getPointerOperand()))
  1147. return;
  1148. Interesting.emplace_back(I, RMW->getPointerOperandIndex(), true,
  1149. RMW->getValOperand()->getType(), std::nullopt);
  1150. } else if (AtomicCmpXchgInst *XCHG = dyn_cast<AtomicCmpXchgInst>(I)) {
  1151. if (!ClInstrumentAtomics || ignoreAccess(I, XCHG->getPointerOperand()))
  1152. return;
  1153. Interesting.emplace_back(I, XCHG->getPointerOperandIndex(), true,
  1154. XCHG->getCompareOperand()->getType(),
  1155. std::nullopt);
  1156. } else if (auto CI = dyn_cast<CallInst>(I)) {
  1157. if (CI->getIntrinsicID() == Intrinsic::masked_load ||
  1158. CI->getIntrinsicID() == Intrinsic::masked_store) {
  1159. bool IsWrite = CI->getIntrinsicID() == Intrinsic::masked_store;
  1160. // Masked store has an initial operand for the value.
  1161. unsigned OpOffset = IsWrite ? 1 : 0;
  1162. if (IsWrite ? !ClInstrumentWrites : !ClInstrumentReads)
  1163. return;
  1164. auto BasePtr = CI->getOperand(OpOffset);
  1165. if (ignoreAccess(I, BasePtr))
  1166. return;
  1167. Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
  1168. MaybeAlign Alignment = Align(1);
  1169. // Otherwise no alignment guarantees. We probably got Undef.
  1170. if (auto *Op = dyn_cast<ConstantInt>(CI->getOperand(1 + OpOffset)))
  1171. Alignment = Op->getMaybeAlignValue();
  1172. Value *Mask = CI->getOperand(2 + OpOffset);
  1173. Interesting.emplace_back(I, OpOffset, IsWrite, Ty, Alignment, Mask);
  1174. } else {
  1175. for (unsigned ArgNo = 0; ArgNo < CI->arg_size(); ArgNo++) {
  1176. if (!ClInstrumentByval || !CI->isByValArgument(ArgNo) ||
  1177. ignoreAccess(I, CI->getArgOperand(ArgNo)))
  1178. continue;
  1179. Type *Ty = CI->getParamByValType(ArgNo);
  1180. Interesting.emplace_back(I, ArgNo, false, Ty, Align(1));
  1181. }
  1182. }
  1183. }
  1184. }
  1185. static bool isPointerOperand(Value *V) {
  1186. return V->getType()->isPointerTy() || isa<PtrToIntInst>(V);
  1187. }
  1188. // This is a rough heuristic; it may cause both false positives and
  1189. // false negatives. The proper implementation requires cooperation with
  1190. // the frontend.
  1191. static bool isInterestingPointerComparison(Instruction *I) {
  1192. if (ICmpInst *Cmp = dyn_cast<ICmpInst>(I)) {
  1193. if (!Cmp->isRelational())
  1194. return false;
  1195. } else {
  1196. return false;
  1197. }
  1198. return isPointerOperand(I->getOperand(0)) &&
  1199. isPointerOperand(I->getOperand(1));
  1200. }
  1201. // This is a rough heuristic; it may cause both false positives and
  1202. // false negatives. The proper implementation requires cooperation with
  1203. // the frontend.
  1204. static bool isInterestingPointerSubtraction(Instruction *I) {
  1205. if (BinaryOperator *BO = dyn_cast<BinaryOperator>(I)) {
  1206. if (BO->getOpcode() != Instruction::Sub)
  1207. return false;
  1208. } else {
  1209. return false;
  1210. }
  1211. return isPointerOperand(I->getOperand(0)) &&
  1212. isPointerOperand(I->getOperand(1));
  1213. }
  1214. bool AddressSanitizer::GlobalIsLinkerInitialized(GlobalVariable *G) {
  1215. // If a global variable does not have dynamic initialization we don't
  1216. // have to instrument it. However, if a global does not have initializer
  1217. // at all, we assume it has dynamic initializer (in other TU).
  1218. if (!G->hasInitializer())
  1219. return false;
  1220. if (G->hasSanitizerMetadata() && G->getSanitizerMetadata().IsDynInit)
  1221. return false;
  1222. return true;
  1223. }
  1224. void AddressSanitizer::instrumentPointerComparisonOrSubtraction(
  1225. Instruction *I) {
  1226. IRBuilder<> IRB(I);
  1227. FunctionCallee F = isa<ICmpInst>(I) ? AsanPtrCmpFunction : AsanPtrSubFunction;
  1228. Value *Param[2] = {I->getOperand(0), I->getOperand(1)};
  1229. for (Value *&i : Param) {
  1230. if (i->getType()->isPointerTy())
  1231. i = IRB.CreatePointerCast(i, IntptrTy);
  1232. }
  1233. IRB.CreateCall(F, Param);
  1234. }
  1235. static void doInstrumentAddress(AddressSanitizer *Pass, Instruction *I,
  1236. Instruction *InsertBefore, Value *Addr,
  1237. MaybeAlign Alignment, unsigned Granularity,
  1238. uint32_t TypeSize, bool IsWrite,
  1239. Value *SizeArgument, bool UseCalls,
  1240. uint32_t Exp) {
  1241. // Instrument a 1-, 2-, 4-, 8-, or 16- byte access with one check
  1242. // if the data is properly aligned.
  1243. if ((TypeSize == 8 || TypeSize == 16 || TypeSize == 32 || TypeSize == 64 ||
  1244. TypeSize == 128) &&
  1245. (!Alignment || *Alignment >= Granularity || *Alignment >= TypeSize / 8))
  1246. return Pass->instrumentAddress(I, InsertBefore, Addr, TypeSize, IsWrite,
  1247. nullptr, UseCalls, Exp);
  1248. Pass->instrumentUnusualSizeOrAlignment(I, InsertBefore, Addr, TypeSize,
  1249. IsWrite, nullptr, UseCalls, Exp);
  1250. }
  1251. static void instrumentMaskedLoadOrStore(AddressSanitizer *Pass,
  1252. const DataLayout &DL, Type *IntptrTy,
  1253. Value *Mask, Instruction *I,
  1254. Value *Addr, MaybeAlign Alignment,
  1255. unsigned Granularity, Type *OpType,
  1256. bool IsWrite, Value *SizeArgument,
  1257. bool UseCalls, uint32_t Exp) {
  1258. auto *VTy = cast<FixedVectorType>(OpType);
  1259. uint64_t ElemTypeSize = DL.getTypeStoreSizeInBits(VTy->getScalarType());
  1260. unsigned Num = VTy->getNumElements();
  1261. auto Zero = ConstantInt::get(IntptrTy, 0);
  1262. for (unsigned Idx = 0; Idx < Num; ++Idx) {
  1263. Value *InstrumentedAddress = nullptr;
  1264. Instruction *InsertBefore = I;
  1265. if (auto *Vector = dyn_cast<ConstantVector>(Mask)) {
  1266. // dyn_cast as we might get UndefValue
  1267. if (auto *Masked = dyn_cast<ConstantInt>(Vector->getOperand(Idx))) {
  1268. if (Masked->isZero())
  1269. // Mask is constant false, so no instrumentation needed.
  1270. continue;
  1271. // If we have a true or undef value, fall through to doInstrumentAddress
  1272. // with InsertBefore == I
  1273. }
  1274. } else {
  1275. IRBuilder<> IRB(I);
  1276. Value *MaskElem = IRB.CreateExtractElement(Mask, Idx);
  1277. Instruction *ThenTerm = SplitBlockAndInsertIfThen(MaskElem, I, false);
  1278. InsertBefore = ThenTerm;
  1279. }
  1280. IRBuilder<> IRB(InsertBefore);
  1281. InstrumentedAddress =
  1282. IRB.CreateGEP(VTy, Addr, {Zero, ConstantInt::get(IntptrTy, Idx)});
  1283. doInstrumentAddress(Pass, I, InsertBefore, InstrumentedAddress, Alignment,
  1284. Granularity, ElemTypeSize, IsWrite, SizeArgument,
  1285. UseCalls, Exp);
  1286. }
  1287. }
  1288. void AddressSanitizer::instrumentMop(ObjectSizeOffsetVisitor &ObjSizeVis,
  1289. InterestingMemoryOperand &O, bool UseCalls,
  1290. const DataLayout &DL) {
  1291. Value *Addr = O.getPtr();
  1292. // Optimization experiments.
  1293. // The experiments can be used to evaluate potential optimizations that remove
  1294. // instrumentation (assess false negatives). Instead of completely removing
  1295. // some instrumentation, you set Exp to a non-zero value (mask of optimization
  1296. // experiments that want to remove instrumentation of this instruction).
  1297. // If Exp is non-zero, this pass will emit special calls into runtime
  1298. // (e.g. __asan_report_exp_load1 instead of __asan_report_load1). These calls
  1299. // make runtime terminate the program in a special way (with a different
  1300. // exit status). Then you run the new compiler on a buggy corpus, collect
  1301. // the special terminations (ideally, you don't see them at all -- no false
  1302. // negatives) and make the decision on the optimization.
  1303. uint32_t Exp = ClForceExperiment;
  1304. if (ClOpt && ClOptGlobals) {
  1305. // If initialization order checking is disabled, a simple access to a
  1306. // dynamically initialized global is always valid.
  1307. GlobalVariable *G = dyn_cast<GlobalVariable>(getUnderlyingObject(Addr));
  1308. if (G && (!ClInitializers || GlobalIsLinkerInitialized(G)) &&
  1309. isSafeAccess(ObjSizeVis, Addr, O.TypeSize)) {
  1310. NumOptimizedAccessesToGlobalVar++;
  1311. return;
  1312. }
  1313. }
  1314. if (ClOpt && ClOptStack) {
  1315. // A direct inbounds access to a stack variable is always valid.
  1316. if (isa<AllocaInst>(getUnderlyingObject(Addr)) &&
  1317. isSafeAccess(ObjSizeVis, Addr, O.TypeSize)) {
  1318. NumOptimizedAccessesToStackVar++;
  1319. return;
  1320. }
  1321. }
  1322. if (O.IsWrite)
  1323. NumInstrumentedWrites++;
  1324. else
  1325. NumInstrumentedReads++;
  1326. unsigned Granularity = 1 << Mapping.Scale;
  1327. if (O.MaybeMask) {
  1328. instrumentMaskedLoadOrStore(this, DL, IntptrTy, O.MaybeMask, O.getInsn(),
  1329. Addr, O.Alignment, Granularity, O.OpType,
  1330. O.IsWrite, nullptr, UseCalls, Exp);
  1331. } else {
  1332. doInstrumentAddress(this, O.getInsn(), O.getInsn(), Addr, O.Alignment,
  1333. Granularity, O.TypeSize, O.IsWrite, nullptr, UseCalls,
  1334. Exp);
  1335. }
  1336. }
  1337. Instruction *AddressSanitizer::generateCrashCode(Instruction *InsertBefore,
  1338. Value *Addr, bool IsWrite,
  1339. size_t AccessSizeIndex,
  1340. Value *SizeArgument,
  1341. uint32_t Exp) {
  1342. IRBuilder<> IRB(InsertBefore);
  1343. Value *ExpVal = Exp == 0 ? nullptr : ConstantInt::get(IRB.getInt32Ty(), Exp);
  1344. CallInst *Call = nullptr;
  1345. if (SizeArgument) {
  1346. if (Exp == 0)
  1347. Call = IRB.CreateCall(AsanErrorCallbackSized[IsWrite][0],
  1348. {Addr, SizeArgument});
  1349. else
  1350. Call = IRB.CreateCall(AsanErrorCallbackSized[IsWrite][1],
  1351. {Addr, SizeArgument, ExpVal});
  1352. } else {
  1353. if (Exp == 0)
  1354. Call =
  1355. IRB.CreateCall(AsanErrorCallback[IsWrite][0][AccessSizeIndex], Addr);
  1356. else
  1357. Call = IRB.CreateCall(AsanErrorCallback[IsWrite][1][AccessSizeIndex],
  1358. {Addr, ExpVal});
  1359. }
  1360. Call->setCannotMerge();
  1361. return Call;
  1362. }
  1363. Value *AddressSanitizer::createSlowPathCmp(IRBuilder<> &IRB, Value *AddrLong,
  1364. Value *ShadowValue,
  1365. uint32_t TypeSize) {
  1366. size_t Granularity = static_cast<size_t>(1) << Mapping.Scale;
  1367. // Addr & (Granularity - 1)
  1368. Value *LastAccessedByte =
  1369. IRB.CreateAnd(AddrLong, ConstantInt::get(IntptrTy, Granularity - 1));
  1370. // (Addr & (Granularity - 1)) + size - 1
  1371. if (TypeSize / 8 > 1)
  1372. LastAccessedByte = IRB.CreateAdd(
  1373. LastAccessedByte, ConstantInt::get(IntptrTy, TypeSize / 8 - 1));
  1374. // (uint8_t) ((Addr & (Granularity-1)) + size - 1)
  1375. LastAccessedByte =
  1376. IRB.CreateIntCast(LastAccessedByte, ShadowValue->getType(), false);
  1377. // ((uint8_t) ((Addr & (Granularity-1)) + size - 1)) >= ShadowValue
  1378. return IRB.CreateICmpSGE(LastAccessedByte, ShadowValue);
  1379. }
  1380. Instruction *AddressSanitizer::instrumentAMDGPUAddress(
  1381. Instruction *OrigIns, Instruction *InsertBefore, Value *Addr,
  1382. uint32_t TypeSize, bool IsWrite, Value *SizeArgument) {
  1383. // Do not instrument unsupported addrspaces.
  1384. if (isUnsupportedAMDGPUAddrspace(Addr))
  1385. return nullptr;
  1386. Type *PtrTy = cast<PointerType>(Addr->getType()->getScalarType());
  1387. // Follow host instrumentation for global and constant addresses.
  1388. if (PtrTy->getPointerAddressSpace() != 0)
  1389. return InsertBefore;
  1390. // Instrument generic addresses in supported addressspaces.
  1391. IRBuilder<> IRB(InsertBefore);
  1392. Value *AddrLong = IRB.CreatePointerCast(Addr, IRB.getInt8PtrTy());
  1393. Value *IsShared = IRB.CreateCall(AMDGPUAddressShared, {AddrLong});
  1394. Value *IsPrivate = IRB.CreateCall(AMDGPUAddressPrivate, {AddrLong});
  1395. Value *IsSharedOrPrivate = IRB.CreateOr(IsShared, IsPrivate);
  1396. Value *Cmp = IRB.CreateNot(IsSharedOrPrivate);
  1397. Value *AddrSpaceZeroLanding =
  1398. SplitBlockAndInsertIfThen(Cmp, InsertBefore, false);
  1399. InsertBefore = cast<Instruction>(AddrSpaceZeroLanding);
  1400. return InsertBefore;
  1401. }
  1402. void AddressSanitizer::instrumentAddress(Instruction *OrigIns,
  1403. Instruction *InsertBefore, Value *Addr,
  1404. uint32_t TypeSize, bool IsWrite,
  1405. Value *SizeArgument, bool UseCalls,
  1406. uint32_t Exp) {
  1407. if (TargetTriple.isAMDGPU()) {
  1408. InsertBefore = instrumentAMDGPUAddress(OrigIns, InsertBefore, Addr,
  1409. TypeSize, IsWrite, SizeArgument);
  1410. if (!InsertBefore)
  1411. return;
  1412. }
  1413. IRBuilder<> IRB(InsertBefore);
  1414. size_t AccessSizeIndex = TypeSizeToSizeIndex(TypeSize);
  1415. const ASanAccessInfo AccessInfo(IsWrite, CompileKernel, AccessSizeIndex);
  1416. if (UseCalls && ClOptimizeCallbacks) {
  1417. const ASanAccessInfo AccessInfo(IsWrite, CompileKernel, AccessSizeIndex);
  1418. Module *M = IRB.GetInsertBlock()->getParent()->getParent();
  1419. IRB.CreateCall(
  1420. Intrinsic::getDeclaration(M, Intrinsic::asan_check_memaccess),
  1421. {IRB.CreatePointerCast(Addr, Int8PtrTy),
  1422. ConstantInt::get(Int32Ty, AccessInfo.Packed)});
  1423. return;
  1424. }
  1425. Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy);
  1426. if (UseCalls) {
  1427. if (Exp == 0)
  1428. IRB.CreateCall(AsanMemoryAccessCallback[IsWrite][0][AccessSizeIndex],
  1429. AddrLong);
  1430. else
  1431. IRB.CreateCall(AsanMemoryAccessCallback[IsWrite][1][AccessSizeIndex],
  1432. {AddrLong, ConstantInt::get(IRB.getInt32Ty(), Exp)});
  1433. return;
  1434. }
  1435. Type *ShadowTy =
  1436. IntegerType::get(*C, std::max(8U, TypeSize >> Mapping.Scale));
  1437. Type *ShadowPtrTy = PointerType::get(ShadowTy, 0);
  1438. Value *ShadowPtr = memToShadow(AddrLong, IRB);
  1439. Value *ShadowValue =
  1440. IRB.CreateLoad(ShadowTy, IRB.CreateIntToPtr(ShadowPtr, ShadowPtrTy));
  1441. Value *Cmp = IRB.CreateIsNotNull(ShadowValue);
  1442. size_t Granularity = 1ULL << Mapping.Scale;
  1443. Instruction *CrashTerm = nullptr;
  1444. if (ClAlwaysSlowPath || (TypeSize < 8 * Granularity)) {
  1445. // We use branch weights for the slow path check, to indicate that the slow
  1446. // path is rarely taken. This seems to be the case for SPEC benchmarks.
  1447. Instruction *CheckTerm = SplitBlockAndInsertIfThen(
  1448. Cmp, InsertBefore, false, MDBuilder(*C).createBranchWeights(1, 100000));
  1449. assert(cast<BranchInst>(CheckTerm)->isUnconditional());
  1450. BasicBlock *NextBB = CheckTerm->getSuccessor(0);
  1451. IRB.SetInsertPoint(CheckTerm);
  1452. Value *Cmp2 = createSlowPathCmp(IRB, AddrLong, ShadowValue, TypeSize);
  1453. if (Recover) {
  1454. CrashTerm = SplitBlockAndInsertIfThen(Cmp2, CheckTerm, false);
  1455. } else {
  1456. BasicBlock *CrashBlock =
  1457. BasicBlock::Create(*C, "", NextBB->getParent(), NextBB);
  1458. CrashTerm = new UnreachableInst(*C, CrashBlock);
  1459. BranchInst *NewTerm = BranchInst::Create(CrashBlock, NextBB, Cmp2);
  1460. ReplaceInstWithInst(CheckTerm, NewTerm);
  1461. }
  1462. } else {
  1463. CrashTerm = SplitBlockAndInsertIfThen(Cmp, InsertBefore, !Recover);
  1464. }
  1465. Instruction *Crash = generateCrashCode(CrashTerm, AddrLong, IsWrite,
  1466. AccessSizeIndex, SizeArgument, Exp);
  1467. Crash->setDebugLoc(OrigIns->getDebugLoc());
  1468. }
  1469. // Instrument unusual size or unusual alignment.
  1470. // We can not do it with a single check, so we do 1-byte check for the first
  1471. // and the last bytes. We call __asan_report_*_n(addr, real_size) to be able
  1472. // to report the actual access size.
  1473. void AddressSanitizer::instrumentUnusualSizeOrAlignment(
  1474. Instruction *I, Instruction *InsertBefore, Value *Addr, uint32_t TypeSize,
  1475. bool IsWrite, Value *SizeArgument, bool UseCalls, uint32_t Exp) {
  1476. IRBuilder<> IRB(InsertBefore);
  1477. Value *Size = ConstantInt::get(IntptrTy, TypeSize / 8);
  1478. Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy);
  1479. if (UseCalls) {
  1480. if (Exp == 0)
  1481. IRB.CreateCall(AsanMemoryAccessCallbackSized[IsWrite][0],
  1482. {AddrLong, Size});
  1483. else
  1484. IRB.CreateCall(AsanMemoryAccessCallbackSized[IsWrite][1],
  1485. {AddrLong, Size, ConstantInt::get(IRB.getInt32Ty(), Exp)});
  1486. } else {
  1487. Value *LastByte = IRB.CreateIntToPtr(
  1488. IRB.CreateAdd(AddrLong, ConstantInt::get(IntptrTy, TypeSize / 8 - 1)),
  1489. Addr->getType());
  1490. instrumentAddress(I, InsertBefore, Addr, 8, IsWrite, Size, false, Exp);
  1491. instrumentAddress(I, InsertBefore, LastByte, 8, IsWrite, Size, false, Exp);
  1492. }
  1493. }
  1494. void ModuleAddressSanitizer::poisonOneInitializer(Function &GlobalInit,
  1495. GlobalValue *ModuleName) {
  1496. // Set up the arguments to our poison/unpoison functions.
  1497. IRBuilder<> IRB(&GlobalInit.front(),
  1498. GlobalInit.front().getFirstInsertionPt());
  1499. // Add a call to poison all external globals before the given function starts.
  1500. Value *ModuleNameAddr = ConstantExpr::getPointerCast(ModuleName, IntptrTy);
  1501. IRB.CreateCall(AsanPoisonGlobals, ModuleNameAddr);
  1502. // Add calls to unpoison all globals before each return instruction.
  1503. for (auto &BB : GlobalInit)
  1504. if (ReturnInst *RI = dyn_cast<ReturnInst>(BB.getTerminator()))
  1505. CallInst::Create(AsanUnpoisonGlobals, "", RI);
  1506. }
  1507. void ModuleAddressSanitizer::createInitializerPoisonCalls(
  1508. Module &M, GlobalValue *ModuleName) {
  1509. GlobalVariable *GV = M.getGlobalVariable("llvm.global_ctors");
  1510. if (!GV)
  1511. return;
  1512. ConstantArray *CA = dyn_cast<ConstantArray>(GV->getInitializer());
  1513. if (!CA)
  1514. return;
  1515. for (Use &OP : CA->operands()) {
  1516. if (isa<ConstantAggregateZero>(OP)) continue;
  1517. ConstantStruct *CS = cast<ConstantStruct>(OP);
  1518. // Must have a function or null ptr.
  1519. if (Function *F = dyn_cast<Function>(CS->getOperand(1))) {
  1520. if (F->getName() == kAsanModuleCtorName) continue;
  1521. auto *Priority = cast<ConstantInt>(CS->getOperand(0));
  1522. // Don't instrument CTORs that will run before asan.module_ctor.
  1523. if (Priority->getLimitedValue() <= GetCtorAndDtorPriority(TargetTriple))
  1524. continue;
  1525. poisonOneInitializer(*F, ModuleName);
  1526. }
  1527. }
  1528. }
  1529. const GlobalVariable *
  1530. ModuleAddressSanitizer::getExcludedAliasedGlobal(const GlobalAlias &GA) const {
  1531. // In case this function should be expanded to include rules that do not just
  1532. // apply when CompileKernel is true, either guard all existing rules with an
  1533. // 'if (CompileKernel) { ... }' or be absolutely sure that all these rules
  1534. // should also apply to user space.
  1535. assert(CompileKernel && "Only expecting to be called when compiling kernel");
  1536. const Constant *C = GA.getAliasee();
  1537. // When compiling the kernel, globals that are aliased by symbols prefixed
  1538. // by "__" are special and cannot be padded with a redzone.
  1539. if (GA.getName().startswith("__"))
  1540. return dyn_cast<GlobalVariable>(C->stripPointerCastsAndAliases());
  1541. return nullptr;
  1542. }
  1543. bool ModuleAddressSanitizer::shouldInstrumentGlobal(GlobalVariable *G) const {
  1544. Type *Ty = G->getValueType();
  1545. LLVM_DEBUG(dbgs() << "GLOBAL: " << *G << "\n");
  1546. if (G->hasSanitizerMetadata() && G->getSanitizerMetadata().NoAddress)
  1547. return false;
  1548. if (!Ty->isSized()) return false;
  1549. if (!G->hasInitializer()) return false;
  1550. // Globals in address space 1 and 4 are supported for AMDGPU.
  1551. if (G->getAddressSpace() &&
  1552. !(TargetTriple.isAMDGPU() && !isUnsupportedAMDGPUAddrspace(G)))
  1553. return false;
  1554. if (GlobalWasGeneratedByCompiler(G)) return false; // Our own globals.
  1555. // Two problems with thread-locals:
  1556. // - The address of the main thread's copy can't be computed at link-time.
  1557. // - Need to poison all copies, not just the main thread's one.
  1558. if (G->isThreadLocal()) return false;
  1559. // For now, just ignore this Global if the alignment is large.
  1560. if (G->getAlign() && *G->getAlign() > getMinRedzoneSizeForGlobal()) return false;
  1561. // For non-COFF targets, only instrument globals known to be defined by this
  1562. // TU.
  1563. // FIXME: We can instrument comdat globals on ELF if we are using the
  1564. // GC-friendly metadata scheme.
  1565. if (!TargetTriple.isOSBinFormatCOFF()) {
  1566. if (!G->hasExactDefinition() || G->hasComdat())
  1567. return false;
  1568. } else {
  1569. // On COFF, don't instrument non-ODR linkages.
  1570. if (G->isInterposable())
  1571. return false;
  1572. }
  1573. // If a comdat is present, it must have a selection kind that implies ODR
  1574. // semantics: no duplicates, any, or exact match.
  1575. if (Comdat *C = G->getComdat()) {
  1576. switch (C->getSelectionKind()) {
  1577. case Comdat::Any:
  1578. case Comdat::ExactMatch:
  1579. case Comdat::NoDeduplicate:
  1580. break;
  1581. case Comdat::Largest:
  1582. case Comdat::SameSize:
  1583. return false;
  1584. }
  1585. }
  1586. if (G->hasSection()) {
  1587. // The kernel uses explicit sections for mostly special global variables
  1588. // that we should not instrument. E.g. the kernel may rely on their layout
  1589. // without redzones, or remove them at link time ("discard.*"), etc.
  1590. if (CompileKernel)
  1591. return false;
  1592. StringRef Section = G->getSection();
  1593. // Globals from llvm.metadata aren't emitted, do not instrument them.
  1594. if (Section == "llvm.metadata") return false;
  1595. // Do not instrument globals from special LLVM sections.
  1596. if (Section.contains("__llvm") || Section.contains("__LLVM"))
  1597. return false;
  1598. // Do not instrument function pointers to initialization and termination
  1599. // routines: dynamic linker will not properly handle redzones.
  1600. if (Section.startswith(".preinit_array") ||
  1601. Section.startswith(".init_array") ||
  1602. Section.startswith(".fini_array")) {
  1603. return false;
  1604. }
  1605. // Do not instrument user-defined sections (with names resembling
  1606. // valid C identifiers)
  1607. if (TargetTriple.isOSBinFormatELF()) {
  1608. if (llvm::all_of(Section,
  1609. [](char c) { return llvm::isAlnum(c) || c == '_'; }))
  1610. return false;
  1611. }
  1612. // On COFF, if the section name contains '$', it is highly likely that the
  1613. // user is using section sorting to create an array of globals similar to
  1614. // the way initialization callbacks are registered in .init_array and
  1615. // .CRT$XCU. The ATL also registers things in .ATL$__[azm]. Adding redzones
  1616. // to such globals is counterproductive, because the intent is that they
  1617. // will form an array, and out-of-bounds accesses are expected.
  1618. // See https://github.com/google/sanitizers/issues/305
  1619. // and http://msdn.microsoft.com/en-US/en-en/library/bb918180(v=vs.120).aspx
  1620. if (TargetTriple.isOSBinFormatCOFF() && Section.contains('$')) {
  1621. LLVM_DEBUG(dbgs() << "Ignoring global in sorted section (contains '$'): "
  1622. << *G << "\n");
  1623. return false;
  1624. }
  1625. if (TargetTriple.isOSBinFormatMachO()) {
  1626. StringRef ParsedSegment, ParsedSection;
  1627. unsigned TAA = 0, StubSize = 0;
  1628. bool TAAParsed;
  1629. cantFail(MCSectionMachO::ParseSectionSpecifier(
  1630. Section, ParsedSegment, ParsedSection, TAA, TAAParsed, StubSize));
  1631. // Ignore the globals from the __OBJC section. The ObjC runtime assumes
  1632. // those conform to /usr/lib/objc/runtime.h, so we can't add redzones to
  1633. // them.
  1634. if (ParsedSegment == "__OBJC" ||
  1635. (ParsedSegment == "__DATA" && ParsedSection.startswith("__objc_"))) {
  1636. LLVM_DEBUG(dbgs() << "Ignoring ObjC runtime global: " << *G << "\n");
  1637. return false;
  1638. }
  1639. // See https://github.com/google/sanitizers/issues/32
  1640. // Constant CFString instances are compiled in the following way:
  1641. // -- the string buffer is emitted into
  1642. // __TEXT,__cstring,cstring_literals
  1643. // -- the constant NSConstantString structure referencing that buffer
  1644. // is placed into __DATA,__cfstring
  1645. // Therefore there's no point in placing redzones into __DATA,__cfstring.
  1646. // Moreover, it causes the linker to crash on OS X 10.7
  1647. if (ParsedSegment == "__DATA" && ParsedSection == "__cfstring") {
  1648. LLVM_DEBUG(dbgs() << "Ignoring CFString: " << *G << "\n");
  1649. return false;
  1650. }
  1651. // The linker merges the contents of cstring_literals and removes the
  1652. // trailing zeroes.
  1653. if (ParsedSegment == "__TEXT" && (TAA & MachO::S_CSTRING_LITERALS)) {
  1654. LLVM_DEBUG(dbgs() << "Ignoring a cstring literal: " << *G << "\n");
  1655. return false;
  1656. }
  1657. }
  1658. }
  1659. if (CompileKernel) {
  1660. // Globals that prefixed by "__" are special and cannot be padded with a
  1661. // redzone.
  1662. if (G->getName().startswith("__"))
  1663. return false;
  1664. }
  1665. return true;
  1666. }
  1667. // On Mach-O platforms, we emit global metadata in a separate section of the
  1668. // binary in order to allow the linker to properly dead strip. This is only
  1669. // supported on recent versions of ld64.
  1670. bool ModuleAddressSanitizer::ShouldUseMachOGlobalsSection() const {
  1671. if (!TargetTriple.isOSBinFormatMachO())
  1672. return false;
  1673. if (TargetTriple.isMacOSX() && !TargetTriple.isMacOSXVersionLT(10, 11))
  1674. return true;
  1675. if (TargetTriple.isiOS() /* or tvOS */ && !TargetTriple.isOSVersionLT(9))
  1676. return true;
  1677. if (TargetTriple.isWatchOS() && !TargetTriple.isOSVersionLT(2))
  1678. return true;
  1679. if (TargetTriple.isDriverKit())
  1680. return true;
  1681. return false;
  1682. }
  1683. StringRef ModuleAddressSanitizer::getGlobalMetadataSection() const {
  1684. switch (TargetTriple.getObjectFormat()) {
  1685. case Triple::COFF: return ".ASAN$GL";
  1686. case Triple::ELF: return "asan_globals";
  1687. case Triple::MachO: return "__DATA,__asan_globals,regular";
  1688. case Triple::Wasm:
  1689. case Triple::GOFF:
  1690. case Triple::SPIRV:
  1691. case Triple::XCOFF:
  1692. case Triple::DXContainer:
  1693. report_fatal_error(
  1694. "ModuleAddressSanitizer not implemented for object file format");
  1695. case Triple::UnknownObjectFormat:
  1696. break;
  1697. }
  1698. llvm_unreachable("unsupported object format");
  1699. }
  1700. void ModuleAddressSanitizer::initializeCallbacks(Module &M) {
  1701. IRBuilder<> IRB(*C);
  1702. // Declare our poisoning and unpoisoning functions.
  1703. AsanPoisonGlobals =
  1704. M.getOrInsertFunction(kAsanPoisonGlobalsName, IRB.getVoidTy(), IntptrTy);
  1705. AsanUnpoisonGlobals =
  1706. M.getOrInsertFunction(kAsanUnpoisonGlobalsName, IRB.getVoidTy());
  1707. // Declare functions that register/unregister globals.
  1708. AsanRegisterGlobals = M.getOrInsertFunction(
  1709. kAsanRegisterGlobalsName, IRB.getVoidTy(), IntptrTy, IntptrTy);
  1710. AsanUnregisterGlobals = M.getOrInsertFunction(
  1711. kAsanUnregisterGlobalsName, IRB.getVoidTy(), IntptrTy, IntptrTy);
  1712. // Declare the functions that find globals in a shared object and then invoke
  1713. // the (un)register function on them.
  1714. AsanRegisterImageGlobals = M.getOrInsertFunction(
  1715. kAsanRegisterImageGlobalsName, IRB.getVoidTy(), IntptrTy);
  1716. AsanUnregisterImageGlobals = M.getOrInsertFunction(
  1717. kAsanUnregisterImageGlobalsName, IRB.getVoidTy(), IntptrTy);
  1718. AsanRegisterElfGlobals =
  1719. M.getOrInsertFunction(kAsanRegisterElfGlobalsName, IRB.getVoidTy(),
  1720. IntptrTy, IntptrTy, IntptrTy);
  1721. AsanUnregisterElfGlobals =
  1722. M.getOrInsertFunction(kAsanUnregisterElfGlobalsName, IRB.getVoidTy(),
  1723. IntptrTy, IntptrTy, IntptrTy);
  1724. }
  1725. // Put the metadata and the instrumented global in the same group. This ensures
  1726. // that the metadata is discarded if the instrumented global is discarded.
  1727. void ModuleAddressSanitizer::SetComdatForGlobalMetadata(
  1728. GlobalVariable *G, GlobalVariable *Metadata, StringRef InternalSuffix) {
  1729. Module &M = *G->getParent();
  1730. Comdat *C = G->getComdat();
  1731. if (!C) {
  1732. if (!G->hasName()) {
  1733. // If G is unnamed, it must be internal. Give it an artificial name
  1734. // so we can put it in a comdat.
  1735. assert(G->hasLocalLinkage());
  1736. G->setName(Twine(kAsanGenPrefix) + "_anon_global");
  1737. }
  1738. if (!InternalSuffix.empty() && G->hasLocalLinkage()) {
  1739. std::string Name = std::string(G->getName());
  1740. Name += InternalSuffix;
  1741. C = M.getOrInsertComdat(Name);
  1742. } else {
  1743. C = M.getOrInsertComdat(G->getName());
  1744. }
  1745. // Make this IMAGE_COMDAT_SELECT_NODUPLICATES on COFF. Also upgrade private
  1746. // linkage to internal linkage so that a symbol table entry is emitted. This
  1747. // is necessary in order to create the comdat group.
  1748. if (TargetTriple.isOSBinFormatCOFF()) {
  1749. C->setSelectionKind(Comdat::NoDeduplicate);
  1750. if (G->hasPrivateLinkage())
  1751. G->setLinkage(GlobalValue::InternalLinkage);
  1752. }
  1753. G->setComdat(C);
  1754. }
  1755. assert(G->hasComdat());
  1756. Metadata->setComdat(G->getComdat());
  1757. }
  1758. // Create a separate metadata global and put it in the appropriate ASan
  1759. // global registration section.
  1760. GlobalVariable *
  1761. ModuleAddressSanitizer::CreateMetadataGlobal(Module &M, Constant *Initializer,
  1762. StringRef OriginalName) {
  1763. auto Linkage = TargetTriple.isOSBinFormatMachO()
  1764. ? GlobalVariable::InternalLinkage
  1765. : GlobalVariable::PrivateLinkage;
  1766. GlobalVariable *Metadata = new GlobalVariable(
  1767. M, Initializer->getType(), false, Linkage, Initializer,
  1768. Twine("__asan_global_") + GlobalValue::dropLLVMManglingEscape(OriginalName));
  1769. Metadata->setSection(getGlobalMetadataSection());
  1770. return Metadata;
  1771. }
  1772. Instruction *ModuleAddressSanitizer::CreateAsanModuleDtor(Module &M) {
  1773. AsanDtorFunction = Function::createWithDefaultAttr(
  1774. FunctionType::get(Type::getVoidTy(*C), false),
  1775. GlobalValue::InternalLinkage, 0, kAsanModuleDtorName, &M);
  1776. AsanDtorFunction->addFnAttr(Attribute::NoUnwind);
  1777. // Ensure Dtor cannot be discarded, even if in a comdat.
  1778. appendToUsed(M, {AsanDtorFunction});
  1779. BasicBlock *AsanDtorBB = BasicBlock::Create(*C, "", AsanDtorFunction);
  1780. return ReturnInst::Create(*C, AsanDtorBB);
  1781. }
  1782. void ModuleAddressSanitizer::InstrumentGlobalsCOFF(
  1783. IRBuilder<> &IRB, Module &M, ArrayRef<GlobalVariable *> ExtendedGlobals,
  1784. ArrayRef<Constant *> MetadataInitializers) {
  1785. assert(ExtendedGlobals.size() == MetadataInitializers.size());
  1786. auto &DL = M.getDataLayout();
  1787. SmallVector<GlobalValue *, 16> MetadataGlobals(ExtendedGlobals.size());
  1788. for (size_t i = 0; i < ExtendedGlobals.size(); i++) {
  1789. Constant *Initializer = MetadataInitializers[i];
  1790. GlobalVariable *G = ExtendedGlobals[i];
  1791. GlobalVariable *Metadata =
  1792. CreateMetadataGlobal(M, Initializer, G->getName());
  1793. MDNode *MD = MDNode::get(M.getContext(), ValueAsMetadata::get(G));
  1794. Metadata->setMetadata(LLVMContext::MD_associated, MD);
  1795. MetadataGlobals[i] = Metadata;
  1796. // The MSVC linker always inserts padding when linking incrementally. We
  1797. // cope with that by aligning each struct to its size, which must be a power
  1798. // of two.
  1799. unsigned SizeOfGlobalStruct = DL.getTypeAllocSize(Initializer->getType());
  1800. assert(isPowerOf2_32(SizeOfGlobalStruct) &&
  1801. "global metadata will not be padded appropriately");
  1802. Metadata->setAlignment(assumeAligned(SizeOfGlobalStruct));
  1803. SetComdatForGlobalMetadata(G, Metadata, "");
  1804. }
  1805. // Update llvm.compiler.used, adding the new metadata globals. This is
  1806. // needed so that during LTO these variables stay alive.
  1807. if (!MetadataGlobals.empty())
  1808. appendToCompilerUsed(M, MetadataGlobals);
  1809. }
  1810. void ModuleAddressSanitizer::InstrumentGlobalsELF(
  1811. IRBuilder<> &IRB, Module &M, ArrayRef<GlobalVariable *> ExtendedGlobals,
  1812. ArrayRef<Constant *> MetadataInitializers,
  1813. const std::string &UniqueModuleId) {
  1814. assert(ExtendedGlobals.size() == MetadataInitializers.size());
  1815. // Putting globals in a comdat changes the semantic and potentially cause
  1816. // false negative odr violations at link time. If odr indicators are used, we
  1817. // keep the comdat sections, as link time odr violations will be dectected on
  1818. // the odr indicator symbols.
  1819. bool UseComdatForGlobalsGC = UseOdrIndicator;
  1820. SmallVector<GlobalValue *, 16> MetadataGlobals(ExtendedGlobals.size());
  1821. for (size_t i = 0; i < ExtendedGlobals.size(); i++) {
  1822. GlobalVariable *G = ExtendedGlobals[i];
  1823. GlobalVariable *Metadata =
  1824. CreateMetadataGlobal(M, MetadataInitializers[i], G->getName());
  1825. MDNode *MD = MDNode::get(M.getContext(), ValueAsMetadata::get(G));
  1826. Metadata->setMetadata(LLVMContext::MD_associated, MD);
  1827. MetadataGlobals[i] = Metadata;
  1828. if (UseComdatForGlobalsGC)
  1829. SetComdatForGlobalMetadata(G, Metadata, UniqueModuleId);
  1830. }
  1831. // Update llvm.compiler.used, adding the new metadata globals. This is
  1832. // needed so that during LTO these variables stay alive.
  1833. if (!MetadataGlobals.empty())
  1834. appendToCompilerUsed(M, MetadataGlobals);
  1835. // RegisteredFlag serves two purposes. First, we can pass it to dladdr()
  1836. // to look up the loaded image that contains it. Second, we can store in it
  1837. // whether registration has already occurred, to prevent duplicate
  1838. // registration.
  1839. //
  1840. // Common linkage ensures that there is only one global per shared library.
  1841. GlobalVariable *RegisteredFlag = new GlobalVariable(
  1842. M, IntptrTy, false, GlobalVariable::CommonLinkage,
  1843. ConstantInt::get(IntptrTy, 0), kAsanGlobalsRegisteredFlagName);
  1844. RegisteredFlag->setVisibility(GlobalVariable::HiddenVisibility);
  1845. // Create start and stop symbols.
  1846. GlobalVariable *StartELFMetadata = new GlobalVariable(
  1847. M, IntptrTy, false, GlobalVariable::ExternalWeakLinkage, nullptr,
  1848. "__start_" + getGlobalMetadataSection());
  1849. StartELFMetadata->setVisibility(GlobalVariable::HiddenVisibility);
  1850. GlobalVariable *StopELFMetadata = new GlobalVariable(
  1851. M, IntptrTy, false, GlobalVariable::ExternalWeakLinkage, nullptr,
  1852. "__stop_" + getGlobalMetadataSection());
  1853. StopELFMetadata->setVisibility(GlobalVariable::HiddenVisibility);
  1854. // Create a call to register the globals with the runtime.
  1855. if (ConstructorKind == AsanCtorKind::Global)
  1856. IRB.CreateCall(AsanRegisterElfGlobals,
  1857. {IRB.CreatePointerCast(RegisteredFlag, IntptrTy),
  1858. IRB.CreatePointerCast(StartELFMetadata, IntptrTy),
  1859. IRB.CreatePointerCast(StopELFMetadata, IntptrTy)});
  1860. // We also need to unregister globals at the end, e.g., when a shared library
  1861. // gets closed.
  1862. if (DestructorKind != AsanDtorKind::None) {
  1863. IRBuilder<> IrbDtor(CreateAsanModuleDtor(M));
  1864. IrbDtor.CreateCall(AsanUnregisterElfGlobals,
  1865. {IRB.CreatePointerCast(RegisteredFlag, IntptrTy),
  1866. IRB.CreatePointerCast(StartELFMetadata, IntptrTy),
  1867. IRB.CreatePointerCast(StopELFMetadata, IntptrTy)});
  1868. }
  1869. }
  1870. void ModuleAddressSanitizer::InstrumentGlobalsMachO(
  1871. IRBuilder<> &IRB, Module &M, ArrayRef<GlobalVariable *> ExtendedGlobals,
  1872. ArrayRef<Constant *> MetadataInitializers) {
  1873. assert(ExtendedGlobals.size() == MetadataInitializers.size());
  1874. // On recent Mach-O platforms, use a structure which binds the liveness of
  1875. // the global variable to the metadata struct. Keep the list of "Liveness" GV
  1876. // created to be added to llvm.compiler.used
  1877. StructType *LivenessTy = StructType::get(IntptrTy, IntptrTy);
  1878. SmallVector<GlobalValue *, 16> LivenessGlobals(ExtendedGlobals.size());
  1879. for (size_t i = 0; i < ExtendedGlobals.size(); i++) {
  1880. Constant *Initializer = MetadataInitializers[i];
  1881. GlobalVariable *G = ExtendedGlobals[i];
  1882. GlobalVariable *Metadata =
  1883. CreateMetadataGlobal(M, Initializer, G->getName());
  1884. // On recent Mach-O platforms, we emit the global metadata in a way that
  1885. // allows the linker to properly strip dead globals.
  1886. auto LivenessBinder =
  1887. ConstantStruct::get(LivenessTy, Initializer->getAggregateElement(0u),
  1888. ConstantExpr::getPointerCast(Metadata, IntptrTy));
  1889. GlobalVariable *Liveness = new GlobalVariable(
  1890. M, LivenessTy, false, GlobalVariable::InternalLinkage, LivenessBinder,
  1891. Twine("__asan_binder_") + G->getName());
  1892. Liveness->setSection("__DATA,__asan_liveness,regular,live_support");
  1893. LivenessGlobals[i] = Liveness;
  1894. }
  1895. // Update llvm.compiler.used, adding the new liveness globals. This is
  1896. // needed so that during LTO these variables stay alive. The alternative
  1897. // would be to have the linker handling the LTO symbols, but libLTO
  1898. // current API does not expose access to the section for each symbol.
  1899. if (!LivenessGlobals.empty())
  1900. appendToCompilerUsed(M, LivenessGlobals);
  1901. // RegisteredFlag serves two purposes. First, we can pass it to dladdr()
  1902. // to look up the loaded image that contains it. Second, we can store in it
  1903. // whether registration has already occurred, to prevent duplicate
  1904. // registration.
  1905. //
  1906. // common linkage ensures that there is only one global per shared library.
  1907. GlobalVariable *RegisteredFlag = new GlobalVariable(
  1908. M, IntptrTy, false, GlobalVariable::CommonLinkage,
  1909. ConstantInt::get(IntptrTy, 0), kAsanGlobalsRegisteredFlagName);
  1910. RegisteredFlag->setVisibility(GlobalVariable::HiddenVisibility);
  1911. if (ConstructorKind == AsanCtorKind::Global)
  1912. IRB.CreateCall(AsanRegisterImageGlobals,
  1913. {IRB.CreatePointerCast(RegisteredFlag, IntptrTy)});
  1914. // We also need to unregister globals at the end, e.g., when a shared library
  1915. // gets closed.
  1916. if (DestructorKind != AsanDtorKind::None) {
  1917. IRBuilder<> IrbDtor(CreateAsanModuleDtor(M));
  1918. IrbDtor.CreateCall(AsanUnregisterImageGlobals,
  1919. {IRB.CreatePointerCast(RegisteredFlag, IntptrTy)});
  1920. }
  1921. }
  1922. void ModuleAddressSanitizer::InstrumentGlobalsWithMetadataArray(
  1923. IRBuilder<> &IRB, Module &M, ArrayRef<GlobalVariable *> ExtendedGlobals,
  1924. ArrayRef<Constant *> MetadataInitializers) {
  1925. assert(ExtendedGlobals.size() == MetadataInitializers.size());
  1926. unsigned N = ExtendedGlobals.size();
  1927. assert(N > 0);
  1928. // On platforms that don't have a custom metadata section, we emit an array
  1929. // of global metadata structures.
  1930. ArrayType *ArrayOfGlobalStructTy =
  1931. ArrayType::get(MetadataInitializers[0]->getType(), N);
  1932. auto AllGlobals = new GlobalVariable(
  1933. M, ArrayOfGlobalStructTy, false, GlobalVariable::InternalLinkage,
  1934. ConstantArray::get(ArrayOfGlobalStructTy, MetadataInitializers), "");
  1935. if (Mapping.Scale > 3)
  1936. AllGlobals->setAlignment(Align(1ULL << Mapping.Scale));
  1937. if (ConstructorKind == AsanCtorKind::Global)
  1938. IRB.CreateCall(AsanRegisterGlobals,
  1939. {IRB.CreatePointerCast(AllGlobals, IntptrTy),
  1940. ConstantInt::get(IntptrTy, N)});
  1941. // We also need to unregister globals at the end, e.g., when a shared library
  1942. // gets closed.
  1943. if (DestructorKind != AsanDtorKind::None) {
  1944. IRBuilder<> IrbDtor(CreateAsanModuleDtor(M));
  1945. IrbDtor.CreateCall(AsanUnregisterGlobals,
  1946. {IRB.CreatePointerCast(AllGlobals, IntptrTy),
  1947. ConstantInt::get(IntptrTy, N)});
  1948. }
  1949. }
  1950. // This function replaces all global variables with new variables that have
  1951. // trailing redzones. It also creates a function that poisons
  1952. // redzones and inserts this function into llvm.global_ctors.
  1953. // Sets *CtorComdat to true if the global registration code emitted into the
  1954. // asan constructor is comdat-compatible.
  1955. bool ModuleAddressSanitizer::InstrumentGlobals(IRBuilder<> &IRB, Module &M,
  1956. bool *CtorComdat) {
  1957. *CtorComdat = false;
  1958. // Build set of globals that are aliased by some GA, where
  1959. // getExcludedAliasedGlobal(GA) returns the relevant GlobalVariable.
  1960. SmallPtrSet<const GlobalVariable *, 16> AliasedGlobalExclusions;
  1961. if (CompileKernel) {
  1962. for (auto &GA : M.aliases()) {
  1963. if (const GlobalVariable *GV = getExcludedAliasedGlobal(GA))
  1964. AliasedGlobalExclusions.insert(GV);
  1965. }
  1966. }
  1967. SmallVector<GlobalVariable *, 16> GlobalsToChange;
  1968. for (auto &G : M.globals()) {
  1969. if (!AliasedGlobalExclusions.count(&G) && shouldInstrumentGlobal(&G))
  1970. GlobalsToChange.push_back(&G);
  1971. }
  1972. size_t n = GlobalsToChange.size();
  1973. if (n == 0) {
  1974. *CtorComdat = true;
  1975. return false;
  1976. }
  1977. auto &DL = M.getDataLayout();
  1978. // A global is described by a structure
  1979. // size_t beg;
  1980. // size_t size;
  1981. // size_t size_with_redzone;
  1982. // const char *name;
  1983. // const char *module_name;
  1984. // size_t has_dynamic_init;
  1985. // size_t padding_for_windows_msvc_incremental_link;
  1986. // size_t odr_indicator;
  1987. // We initialize an array of such structures and pass it to a run-time call.
  1988. StructType *GlobalStructTy =
  1989. StructType::get(IntptrTy, IntptrTy, IntptrTy, IntptrTy, IntptrTy,
  1990. IntptrTy, IntptrTy, IntptrTy);
  1991. SmallVector<GlobalVariable *, 16> NewGlobals(n);
  1992. SmallVector<Constant *, 16> Initializers(n);
  1993. bool HasDynamicallyInitializedGlobals = false;
  1994. // We shouldn't merge same module names, as this string serves as unique
  1995. // module ID in runtime.
  1996. GlobalVariable *ModuleName = createPrivateGlobalForString(
  1997. M, M.getModuleIdentifier(), /*AllowMerging*/ false, kAsanGenPrefix);
  1998. for (size_t i = 0; i < n; i++) {
  1999. GlobalVariable *G = GlobalsToChange[i];
  2000. GlobalValue::SanitizerMetadata MD;
  2001. if (G->hasSanitizerMetadata())
  2002. MD = G->getSanitizerMetadata();
  2003. // The runtime library tries demangling symbol names in the descriptor but
  2004. // functionality like __cxa_demangle may be unavailable (e.g.
  2005. // -static-libstdc++). So we demangle the symbol names here.
  2006. std::string NameForGlobal = G->getName().str();
  2007. GlobalVariable *Name =
  2008. createPrivateGlobalForString(M, llvm::demangle(NameForGlobal),
  2009. /*AllowMerging*/ true, kAsanGenPrefix);
  2010. Type *Ty = G->getValueType();
  2011. const uint64_t SizeInBytes = DL.getTypeAllocSize(Ty);
  2012. const uint64_t RightRedzoneSize = getRedzoneSizeForGlobal(SizeInBytes);
  2013. Type *RightRedZoneTy = ArrayType::get(IRB.getInt8Ty(), RightRedzoneSize);
  2014. StructType *NewTy = StructType::get(Ty, RightRedZoneTy);
  2015. Constant *NewInitializer = ConstantStruct::get(
  2016. NewTy, G->getInitializer(), Constant::getNullValue(RightRedZoneTy));
  2017. // Create a new global variable with enough space for a redzone.
  2018. GlobalValue::LinkageTypes Linkage = G->getLinkage();
  2019. if (G->isConstant() && Linkage == GlobalValue::PrivateLinkage)
  2020. Linkage = GlobalValue::InternalLinkage;
  2021. GlobalVariable *NewGlobal = new GlobalVariable(
  2022. M, NewTy, G->isConstant(), Linkage, NewInitializer, "", G,
  2023. G->getThreadLocalMode(), G->getAddressSpace());
  2024. NewGlobal->copyAttributesFrom(G);
  2025. NewGlobal->setComdat(G->getComdat());
  2026. NewGlobal->setAlignment(MaybeAlign(getMinRedzoneSizeForGlobal()));
  2027. // Don't fold globals with redzones. ODR violation detector and redzone
  2028. // poisoning implicitly creates a dependence on the global's address, so it
  2029. // is no longer valid for it to be marked unnamed_addr.
  2030. NewGlobal->setUnnamedAddr(GlobalValue::UnnamedAddr::None);
  2031. // Move null-terminated C strings to "__asan_cstring" section on Darwin.
  2032. if (TargetTriple.isOSBinFormatMachO() && !G->hasSection() &&
  2033. G->isConstant()) {
  2034. auto Seq = dyn_cast<ConstantDataSequential>(G->getInitializer());
  2035. if (Seq && Seq->isCString())
  2036. NewGlobal->setSection("__TEXT,__asan_cstring,regular");
  2037. }
  2038. // Transfer the debug info and type metadata. The payload starts at offset
  2039. // zero so we can copy the metadata over as is.
  2040. NewGlobal->copyMetadata(G, 0);
  2041. Value *Indices2[2];
  2042. Indices2[0] = IRB.getInt32(0);
  2043. Indices2[1] = IRB.getInt32(0);
  2044. G->replaceAllUsesWith(
  2045. ConstantExpr::getGetElementPtr(NewTy, NewGlobal, Indices2, true));
  2046. NewGlobal->takeName(G);
  2047. G->eraseFromParent();
  2048. NewGlobals[i] = NewGlobal;
  2049. Constant *ODRIndicator = ConstantExpr::getNullValue(IRB.getInt8PtrTy());
  2050. GlobalValue *InstrumentedGlobal = NewGlobal;
  2051. bool CanUsePrivateAliases =
  2052. TargetTriple.isOSBinFormatELF() || TargetTriple.isOSBinFormatMachO() ||
  2053. TargetTriple.isOSBinFormatWasm();
  2054. if (CanUsePrivateAliases && UsePrivateAlias) {
  2055. // Create local alias for NewGlobal to avoid crash on ODR between
  2056. // instrumented and non-instrumented libraries.
  2057. InstrumentedGlobal =
  2058. GlobalAlias::create(GlobalValue::PrivateLinkage, "", NewGlobal);
  2059. }
  2060. // ODR should not happen for local linkage.
  2061. if (NewGlobal->hasLocalLinkage()) {
  2062. ODRIndicator = ConstantExpr::getIntToPtr(ConstantInt::get(IntptrTy, -1),
  2063. IRB.getInt8PtrTy());
  2064. } else if (UseOdrIndicator) {
  2065. // With local aliases, we need to provide another externally visible
  2066. // symbol __odr_asan_XXX to detect ODR violation.
  2067. auto *ODRIndicatorSym =
  2068. new GlobalVariable(M, IRB.getInt8Ty(), false, Linkage,
  2069. Constant::getNullValue(IRB.getInt8Ty()),
  2070. kODRGenPrefix + NameForGlobal, nullptr,
  2071. NewGlobal->getThreadLocalMode());
  2072. // Set meaningful attributes for indicator symbol.
  2073. ODRIndicatorSym->setVisibility(NewGlobal->getVisibility());
  2074. ODRIndicatorSym->setDLLStorageClass(NewGlobal->getDLLStorageClass());
  2075. ODRIndicatorSym->setAlignment(Align(1));
  2076. ODRIndicator = ODRIndicatorSym;
  2077. }
  2078. Constant *Initializer = ConstantStruct::get(
  2079. GlobalStructTy,
  2080. ConstantExpr::getPointerCast(InstrumentedGlobal, IntptrTy),
  2081. ConstantInt::get(IntptrTy, SizeInBytes),
  2082. ConstantInt::get(IntptrTy, SizeInBytes + RightRedzoneSize),
  2083. ConstantExpr::getPointerCast(Name, IntptrTy),
  2084. ConstantExpr::getPointerCast(ModuleName, IntptrTy),
  2085. ConstantInt::get(IntptrTy, MD.IsDynInit),
  2086. Constant::getNullValue(IntptrTy),
  2087. ConstantExpr::getPointerCast(ODRIndicator, IntptrTy));
  2088. if (ClInitializers && MD.IsDynInit)
  2089. HasDynamicallyInitializedGlobals = true;
  2090. LLVM_DEBUG(dbgs() << "NEW GLOBAL: " << *NewGlobal << "\n");
  2091. Initializers[i] = Initializer;
  2092. }
  2093. // Add instrumented globals to llvm.compiler.used list to avoid LTO from
  2094. // ConstantMerge'ing them.
  2095. SmallVector<GlobalValue *, 16> GlobalsToAddToUsedList;
  2096. for (size_t i = 0; i < n; i++) {
  2097. GlobalVariable *G = NewGlobals[i];
  2098. if (G->getName().empty()) continue;
  2099. GlobalsToAddToUsedList.push_back(G);
  2100. }
  2101. appendToCompilerUsed(M, ArrayRef<GlobalValue *>(GlobalsToAddToUsedList));
  2102. std::string ELFUniqueModuleId =
  2103. (UseGlobalsGC && TargetTriple.isOSBinFormatELF()) ? getUniqueModuleId(&M)
  2104. : "";
  2105. if (!ELFUniqueModuleId.empty()) {
  2106. InstrumentGlobalsELF(IRB, M, NewGlobals, Initializers, ELFUniqueModuleId);
  2107. *CtorComdat = true;
  2108. } else if (UseGlobalsGC && TargetTriple.isOSBinFormatCOFF()) {
  2109. InstrumentGlobalsCOFF(IRB, M, NewGlobals, Initializers);
  2110. } else if (UseGlobalsGC && ShouldUseMachOGlobalsSection()) {
  2111. InstrumentGlobalsMachO(IRB, M, NewGlobals, Initializers);
  2112. } else {
  2113. InstrumentGlobalsWithMetadataArray(IRB, M, NewGlobals, Initializers);
  2114. }
  2115. // Create calls for poisoning before initializers run and unpoisoning after.
  2116. if (HasDynamicallyInitializedGlobals)
  2117. createInitializerPoisonCalls(M, ModuleName);
  2118. LLVM_DEBUG(dbgs() << M);
  2119. return true;
  2120. }
  2121. uint64_t
  2122. ModuleAddressSanitizer::getRedzoneSizeForGlobal(uint64_t SizeInBytes) const {
  2123. constexpr uint64_t kMaxRZ = 1 << 18;
  2124. const uint64_t MinRZ = getMinRedzoneSizeForGlobal();
  2125. uint64_t RZ = 0;
  2126. if (SizeInBytes <= MinRZ / 2) {
  2127. // Reduce redzone size for small size objects, e.g. int, char[1]. MinRZ is
  2128. // at least 32 bytes, optimize when SizeInBytes is less than or equal to
  2129. // half of MinRZ.
  2130. RZ = MinRZ - SizeInBytes;
  2131. } else {
  2132. // Calculate RZ, where MinRZ <= RZ <= MaxRZ, and RZ ~ 1/4 * SizeInBytes.
  2133. RZ = std::clamp((SizeInBytes / MinRZ / 4) * MinRZ, MinRZ, kMaxRZ);
  2134. // Round up to multiple of MinRZ.
  2135. if (SizeInBytes % MinRZ)
  2136. RZ += MinRZ - (SizeInBytes % MinRZ);
  2137. }
  2138. assert((RZ + SizeInBytes) % MinRZ == 0);
  2139. return RZ;
  2140. }
  2141. int ModuleAddressSanitizer::GetAsanVersion(const Module &M) const {
  2142. int LongSize = M.getDataLayout().getPointerSizeInBits();
  2143. bool isAndroid = Triple(M.getTargetTriple()).isAndroid();
  2144. int Version = 8;
  2145. // 32-bit Android is one version ahead because of the switch to dynamic
  2146. // shadow.
  2147. Version += (LongSize == 32 && isAndroid);
  2148. return Version;
  2149. }
  2150. bool ModuleAddressSanitizer::instrumentModule(Module &M) {
  2151. initializeCallbacks(M);
  2152. // Create a module constructor. A destructor is created lazily because not all
  2153. // platforms, and not all modules need it.
  2154. if (ConstructorKind == AsanCtorKind::Global) {
  2155. if (CompileKernel) {
  2156. // The kernel always builds with its own runtime, and therefore does not
  2157. // need the init and version check calls.
  2158. AsanCtorFunction = createSanitizerCtor(M, kAsanModuleCtorName);
  2159. } else {
  2160. std::string AsanVersion = std::to_string(GetAsanVersion(M));
  2161. std::string VersionCheckName =
  2162. ClInsertVersionCheck ? (kAsanVersionCheckNamePrefix + AsanVersion) : "";
  2163. std::tie(AsanCtorFunction, std::ignore) =
  2164. createSanitizerCtorAndInitFunctions(M, kAsanModuleCtorName,
  2165. kAsanInitName, /*InitArgTypes=*/{},
  2166. /*InitArgs=*/{}, VersionCheckName);
  2167. }
  2168. }
  2169. bool CtorComdat = true;
  2170. if (ClGlobals) {
  2171. assert(AsanCtorFunction || ConstructorKind == AsanCtorKind::None);
  2172. if (AsanCtorFunction) {
  2173. IRBuilder<> IRB(AsanCtorFunction->getEntryBlock().getTerminator());
  2174. InstrumentGlobals(IRB, M, &CtorComdat);
  2175. } else {
  2176. IRBuilder<> IRB(*C);
  2177. InstrumentGlobals(IRB, M, &CtorComdat);
  2178. }
  2179. }
  2180. const uint64_t Priority = GetCtorAndDtorPriority(TargetTriple);
  2181. // Put the constructor and destructor in comdat if both
  2182. // (1) global instrumentation is not TU-specific
  2183. // (2) target is ELF.
  2184. if (UseCtorComdat && TargetTriple.isOSBinFormatELF() && CtorComdat) {
  2185. if (AsanCtorFunction) {
  2186. AsanCtorFunction->setComdat(M.getOrInsertComdat(kAsanModuleCtorName));
  2187. appendToGlobalCtors(M, AsanCtorFunction, Priority, AsanCtorFunction);
  2188. }
  2189. if (AsanDtorFunction) {
  2190. AsanDtorFunction->setComdat(M.getOrInsertComdat(kAsanModuleDtorName));
  2191. appendToGlobalDtors(M, AsanDtorFunction, Priority, AsanDtorFunction);
  2192. }
  2193. } else {
  2194. if (AsanCtorFunction)
  2195. appendToGlobalCtors(M, AsanCtorFunction, Priority);
  2196. if (AsanDtorFunction)
  2197. appendToGlobalDtors(M, AsanDtorFunction, Priority);
  2198. }
  2199. return true;
  2200. }
  2201. void AddressSanitizer::initializeCallbacks(Module &M, const TargetLibraryInfo *TLI) {
  2202. IRBuilder<> IRB(*C);
  2203. // Create __asan_report* callbacks.
  2204. // IsWrite, TypeSize and Exp are encoded in the function name.
  2205. for (int Exp = 0; Exp < 2; Exp++) {
  2206. for (size_t AccessIsWrite = 0; AccessIsWrite <= 1; AccessIsWrite++) {
  2207. const std::string TypeStr = AccessIsWrite ? "store" : "load";
  2208. const std::string ExpStr = Exp ? "exp_" : "";
  2209. const std::string EndingStr = Recover ? "_noabort" : "";
  2210. SmallVector<Type *, 3> Args2 = {IntptrTy, IntptrTy};
  2211. SmallVector<Type *, 2> Args1{1, IntptrTy};
  2212. AttributeList AL2;
  2213. AttributeList AL1;
  2214. if (Exp) {
  2215. Type *ExpType = Type::getInt32Ty(*C);
  2216. Args2.push_back(ExpType);
  2217. Args1.push_back(ExpType);
  2218. if (auto AK = TLI->getExtAttrForI32Param(false)) {
  2219. AL2 = AL2.addParamAttribute(*C, 2, AK);
  2220. AL1 = AL1.addParamAttribute(*C, 1, AK);
  2221. }
  2222. }
  2223. AsanErrorCallbackSized[AccessIsWrite][Exp] = M.getOrInsertFunction(
  2224. kAsanReportErrorTemplate + ExpStr + TypeStr + "_n" + EndingStr,
  2225. FunctionType::get(IRB.getVoidTy(), Args2, false), AL2);
  2226. AsanMemoryAccessCallbackSized[AccessIsWrite][Exp] = M.getOrInsertFunction(
  2227. ClMemoryAccessCallbackPrefix + ExpStr + TypeStr + "N" + EndingStr,
  2228. FunctionType::get(IRB.getVoidTy(), Args2, false), AL2);
  2229. for (size_t AccessSizeIndex = 0; AccessSizeIndex < kNumberOfAccessSizes;
  2230. AccessSizeIndex++) {
  2231. const std::string Suffix = TypeStr + itostr(1ULL << AccessSizeIndex);
  2232. AsanErrorCallback[AccessIsWrite][Exp][AccessSizeIndex] =
  2233. M.getOrInsertFunction(
  2234. kAsanReportErrorTemplate + ExpStr + Suffix + EndingStr,
  2235. FunctionType::get(IRB.getVoidTy(), Args1, false), AL1);
  2236. AsanMemoryAccessCallback[AccessIsWrite][Exp][AccessSizeIndex] =
  2237. M.getOrInsertFunction(
  2238. ClMemoryAccessCallbackPrefix + ExpStr + Suffix + EndingStr,
  2239. FunctionType::get(IRB.getVoidTy(), Args1, false), AL1);
  2240. }
  2241. }
  2242. }
  2243. const std::string MemIntrinCallbackPrefix =
  2244. (CompileKernel && !ClKasanMemIntrinCallbackPrefix)
  2245. ? std::string("")
  2246. : ClMemoryAccessCallbackPrefix;
  2247. AsanMemmove = M.getOrInsertFunction(MemIntrinCallbackPrefix + "memmove",
  2248. IRB.getInt8PtrTy(), IRB.getInt8PtrTy(),
  2249. IRB.getInt8PtrTy(), IntptrTy);
  2250. AsanMemcpy = M.getOrInsertFunction(MemIntrinCallbackPrefix + "memcpy",
  2251. IRB.getInt8PtrTy(), IRB.getInt8PtrTy(),
  2252. IRB.getInt8PtrTy(), IntptrTy);
  2253. AsanMemset = M.getOrInsertFunction(MemIntrinCallbackPrefix + "memset",
  2254. TLI->getAttrList(C, {1}, /*Signed=*/false),
  2255. IRB.getInt8PtrTy(), IRB.getInt8PtrTy(),
  2256. IRB.getInt32Ty(), IntptrTy);
  2257. AsanHandleNoReturnFunc =
  2258. M.getOrInsertFunction(kAsanHandleNoReturnName, IRB.getVoidTy());
  2259. AsanPtrCmpFunction =
  2260. M.getOrInsertFunction(kAsanPtrCmp, IRB.getVoidTy(), IntptrTy, IntptrTy);
  2261. AsanPtrSubFunction =
  2262. M.getOrInsertFunction(kAsanPtrSub, IRB.getVoidTy(), IntptrTy, IntptrTy);
  2263. if (Mapping.InGlobal)
  2264. AsanShadowGlobal = M.getOrInsertGlobal("__asan_shadow",
  2265. ArrayType::get(IRB.getInt8Ty(), 0));
  2266. AMDGPUAddressShared = M.getOrInsertFunction(
  2267. kAMDGPUAddressSharedName, IRB.getInt1Ty(), IRB.getInt8PtrTy());
  2268. AMDGPUAddressPrivate = M.getOrInsertFunction(
  2269. kAMDGPUAddressPrivateName, IRB.getInt1Ty(), IRB.getInt8PtrTy());
  2270. }
  2271. bool AddressSanitizer::maybeInsertAsanInitAtFunctionEntry(Function &F) {
  2272. // For each NSObject descendant having a +load method, this method is invoked
  2273. // by the ObjC runtime before any of the static constructors is called.
  2274. // Therefore we need to instrument such methods with a call to __asan_init
  2275. // at the beginning in order to initialize our runtime before any access to
  2276. // the shadow memory.
  2277. // We cannot just ignore these methods, because they may call other
  2278. // instrumented functions.
  2279. if (F.getName().find(" load]") != std::string::npos) {
  2280. FunctionCallee AsanInitFunction =
  2281. declareSanitizerInitFunction(*F.getParent(), kAsanInitName, {});
  2282. IRBuilder<> IRB(&F.front(), F.front().begin());
  2283. IRB.CreateCall(AsanInitFunction, {});
  2284. return true;
  2285. }
  2286. return false;
  2287. }
  2288. bool AddressSanitizer::maybeInsertDynamicShadowAtFunctionEntry(Function &F) {
  2289. // Generate code only when dynamic addressing is needed.
  2290. if (Mapping.Offset != kDynamicShadowSentinel)
  2291. return false;
  2292. IRBuilder<> IRB(&F.front().front());
  2293. if (Mapping.InGlobal) {
  2294. if (ClWithIfuncSuppressRemat) {
  2295. // An empty inline asm with input reg == output reg.
  2296. // An opaque pointer-to-int cast, basically.
  2297. InlineAsm *Asm = InlineAsm::get(
  2298. FunctionType::get(IntptrTy, {AsanShadowGlobal->getType()}, false),
  2299. StringRef(""), StringRef("=r,0"),
  2300. /*hasSideEffects=*/false);
  2301. LocalDynamicShadow =
  2302. IRB.CreateCall(Asm, {AsanShadowGlobal}, ".asan.shadow");
  2303. } else {
  2304. LocalDynamicShadow =
  2305. IRB.CreatePointerCast(AsanShadowGlobal, IntptrTy, ".asan.shadow");
  2306. }
  2307. } else {
  2308. Value *GlobalDynamicAddress = F.getParent()->getOrInsertGlobal(
  2309. kAsanShadowMemoryDynamicAddress, IntptrTy);
  2310. LocalDynamicShadow = IRB.CreateLoad(IntptrTy, GlobalDynamicAddress);
  2311. }
  2312. return true;
  2313. }
  2314. void AddressSanitizer::markEscapedLocalAllocas(Function &F) {
  2315. // Find the one possible call to llvm.localescape and pre-mark allocas passed
  2316. // to it as uninteresting. This assumes we haven't started processing allocas
  2317. // yet. This check is done up front because iterating the use list in
  2318. // isInterestingAlloca would be algorithmically slower.
  2319. assert(ProcessedAllocas.empty() && "must process localescape before allocas");
  2320. // Try to get the declaration of llvm.localescape. If it's not in the module,
  2321. // we can exit early.
  2322. if (!F.getParent()->getFunction("llvm.localescape")) return;
  2323. // Look for a call to llvm.localescape call in the entry block. It can't be in
  2324. // any other block.
  2325. for (Instruction &I : F.getEntryBlock()) {
  2326. IntrinsicInst *II = dyn_cast<IntrinsicInst>(&I);
  2327. if (II && II->getIntrinsicID() == Intrinsic::localescape) {
  2328. // We found a call. Mark all the allocas passed in as uninteresting.
  2329. for (Value *Arg : II->args()) {
  2330. AllocaInst *AI = dyn_cast<AllocaInst>(Arg->stripPointerCasts());
  2331. assert(AI && AI->isStaticAlloca() &&
  2332. "non-static alloca arg to localescape");
  2333. ProcessedAllocas[AI] = false;
  2334. }
  2335. break;
  2336. }
  2337. }
  2338. }
  2339. bool AddressSanitizer::suppressInstrumentationSiteForDebug(int &Instrumented) {
  2340. bool ShouldInstrument =
  2341. ClDebugMin < 0 || ClDebugMax < 0 ||
  2342. (Instrumented >= ClDebugMin && Instrumented <= ClDebugMax);
  2343. Instrumented++;
  2344. return !ShouldInstrument;
  2345. }
  2346. bool AddressSanitizer::instrumentFunction(Function &F,
  2347. const TargetLibraryInfo *TLI) {
  2348. if (F.empty())
  2349. return false;
  2350. if (F.getLinkage() == GlobalValue::AvailableExternallyLinkage) return false;
  2351. if (!ClDebugFunc.empty() && ClDebugFunc == F.getName()) return false;
  2352. if (F.getName().startswith("__asan_")) return false;
  2353. bool FunctionModified = false;
  2354. // If needed, insert __asan_init before checking for SanitizeAddress attr.
  2355. // This function needs to be called even if the function body is not
  2356. // instrumented.
  2357. if (maybeInsertAsanInitAtFunctionEntry(F))
  2358. FunctionModified = true;
  2359. // Leave if the function doesn't need instrumentation.
  2360. if (!F.hasFnAttribute(Attribute::SanitizeAddress)) return FunctionModified;
  2361. if (F.hasFnAttribute(Attribute::DisableSanitizerInstrumentation))
  2362. return FunctionModified;
  2363. LLVM_DEBUG(dbgs() << "ASAN instrumenting:\n" << F << "\n");
  2364. initializeCallbacks(*F.getParent(), TLI);
  2365. FunctionStateRAII CleanupObj(this);
  2366. FunctionModified |= maybeInsertDynamicShadowAtFunctionEntry(F);
  2367. // We can't instrument allocas used with llvm.localescape. Only static allocas
  2368. // can be passed to that intrinsic.
  2369. markEscapedLocalAllocas(F);
  2370. // We want to instrument every address only once per basic block (unless there
  2371. // are calls between uses).
  2372. SmallPtrSet<Value *, 16> TempsToInstrument;
  2373. SmallVector<InterestingMemoryOperand, 16> OperandsToInstrument;
  2374. SmallVector<MemIntrinsic *, 16> IntrinToInstrument;
  2375. SmallVector<Instruction *, 8> NoReturnCalls;
  2376. SmallVector<BasicBlock *, 16> AllBlocks;
  2377. SmallVector<Instruction *, 16> PointerComparisonsOrSubtracts;
  2378. // Fill the set of memory operations to instrument.
  2379. for (auto &BB : F) {
  2380. AllBlocks.push_back(&BB);
  2381. TempsToInstrument.clear();
  2382. int NumInsnsPerBB = 0;
  2383. for (auto &Inst : BB) {
  2384. if (LooksLikeCodeInBug11395(&Inst)) return false;
  2385. // Skip instructions inserted by another instrumentation.
  2386. if (Inst.hasMetadata(LLVMContext::MD_nosanitize))
  2387. continue;
  2388. SmallVector<InterestingMemoryOperand, 1> InterestingOperands;
  2389. getInterestingMemoryOperands(&Inst, InterestingOperands);
  2390. if (!InterestingOperands.empty()) {
  2391. for (auto &Operand : InterestingOperands) {
  2392. if (ClOpt && ClOptSameTemp) {
  2393. Value *Ptr = Operand.getPtr();
  2394. // If we have a mask, skip instrumentation if we've already
  2395. // instrumented the full object. But don't add to TempsToInstrument
  2396. // because we might get another load/store with a different mask.
  2397. if (Operand.MaybeMask) {
  2398. if (TempsToInstrument.count(Ptr))
  2399. continue; // We've seen this (whole) temp in the current BB.
  2400. } else {
  2401. if (!TempsToInstrument.insert(Ptr).second)
  2402. continue; // We've seen this temp in the current BB.
  2403. }
  2404. }
  2405. OperandsToInstrument.push_back(Operand);
  2406. NumInsnsPerBB++;
  2407. }
  2408. } else if (((ClInvalidPointerPairs || ClInvalidPointerCmp) &&
  2409. isInterestingPointerComparison(&Inst)) ||
  2410. ((ClInvalidPointerPairs || ClInvalidPointerSub) &&
  2411. isInterestingPointerSubtraction(&Inst))) {
  2412. PointerComparisonsOrSubtracts.push_back(&Inst);
  2413. } else if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(&Inst)) {
  2414. // ok, take it.
  2415. IntrinToInstrument.push_back(MI);
  2416. NumInsnsPerBB++;
  2417. } else {
  2418. if (auto *CB = dyn_cast<CallBase>(&Inst)) {
  2419. // A call inside BB.
  2420. TempsToInstrument.clear();
  2421. if (CB->doesNotReturn())
  2422. NoReturnCalls.push_back(CB);
  2423. }
  2424. if (CallInst *CI = dyn_cast<CallInst>(&Inst))
  2425. maybeMarkSanitizerLibraryCallNoBuiltin(CI, TLI);
  2426. }
  2427. if (NumInsnsPerBB >= ClMaxInsnsToInstrumentPerBB) break;
  2428. }
  2429. }
  2430. bool UseCalls = (ClInstrumentationWithCallsThreshold >= 0 &&
  2431. OperandsToInstrument.size() + IntrinToInstrument.size() >
  2432. (unsigned)ClInstrumentationWithCallsThreshold);
  2433. const DataLayout &DL = F.getParent()->getDataLayout();
  2434. ObjectSizeOpts ObjSizeOpts;
  2435. ObjSizeOpts.RoundToAlign = true;
  2436. ObjectSizeOffsetVisitor ObjSizeVis(DL, TLI, F.getContext(), ObjSizeOpts);
  2437. // Instrument.
  2438. int NumInstrumented = 0;
  2439. for (auto &Operand : OperandsToInstrument) {
  2440. if (!suppressInstrumentationSiteForDebug(NumInstrumented))
  2441. instrumentMop(ObjSizeVis, Operand, UseCalls,
  2442. F.getParent()->getDataLayout());
  2443. FunctionModified = true;
  2444. }
  2445. for (auto *Inst : IntrinToInstrument) {
  2446. if (!suppressInstrumentationSiteForDebug(NumInstrumented))
  2447. instrumentMemIntrinsic(Inst);
  2448. FunctionModified = true;
  2449. }
  2450. FunctionStackPoisoner FSP(F, *this);
  2451. bool ChangedStack = FSP.runOnFunction();
  2452. // We must unpoison the stack before NoReturn calls (throw, _exit, etc).
  2453. // See e.g. https://github.com/google/sanitizers/issues/37
  2454. for (auto *CI : NoReturnCalls) {
  2455. IRBuilder<> IRB(CI);
  2456. IRB.CreateCall(AsanHandleNoReturnFunc, {});
  2457. }
  2458. for (auto *Inst : PointerComparisonsOrSubtracts) {
  2459. instrumentPointerComparisonOrSubtraction(Inst);
  2460. FunctionModified = true;
  2461. }
  2462. if (ChangedStack || !NoReturnCalls.empty())
  2463. FunctionModified = true;
  2464. LLVM_DEBUG(dbgs() << "ASAN done instrumenting: " << FunctionModified << " "
  2465. << F << "\n");
  2466. return FunctionModified;
  2467. }
  2468. // Workaround for bug 11395: we don't want to instrument stack in functions
  2469. // with large assembly blobs (32-bit only), otherwise reg alloc may crash.
  2470. // FIXME: remove once the bug 11395 is fixed.
  2471. bool AddressSanitizer::LooksLikeCodeInBug11395(Instruction *I) {
  2472. if (LongSize != 32) return false;
  2473. CallInst *CI = dyn_cast<CallInst>(I);
  2474. if (!CI || !CI->isInlineAsm()) return false;
  2475. if (CI->arg_size() <= 5)
  2476. return false;
  2477. // We have inline assembly with quite a few arguments.
  2478. return true;
  2479. }
  2480. void FunctionStackPoisoner::initializeCallbacks(Module &M) {
  2481. IRBuilder<> IRB(*C);
  2482. if (ASan.UseAfterReturn == AsanDetectStackUseAfterReturnMode::Always ||
  2483. ASan.UseAfterReturn == AsanDetectStackUseAfterReturnMode::Runtime) {
  2484. const char *MallocNameTemplate =
  2485. ASan.UseAfterReturn == AsanDetectStackUseAfterReturnMode::Always
  2486. ? kAsanStackMallocAlwaysNameTemplate
  2487. : kAsanStackMallocNameTemplate;
  2488. for (int Index = 0; Index <= kMaxAsanStackMallocSizeClass; Index++) {
  2489. std::string Suffix = itostr(Index);
  2490. AsanStackMallocFunc[Index] = M.getOrInsertFunction(
  2491. MallocNameTemplate + Suffix, IntptrTy, IntptrTy);
  2492. AsanStackFreeFunc[Index] =
  2493. M.getOrInsertFunction(kAsanStackFreeNameTemplate + Suffix,
  2494. IRB.getVoidTy(), IntptrTy, IntptrTy);
  2495. }
  2496. }
  2497. if (ASan.UseAfterScope) {
  2498. AsanPoisonStackMemoryFunc = M.getOrInsertFunction(
  2499. kAsanPoisonStackMemoryName, IRB.getVoidTy(), IntptrTy, IntptrTy);
  2500. AsanUnpoisonStackMemoryFunc = M.getOrInsertFunction(
  2501. kAsanUnpoisonStackMemoryName, IRB.getVoidTy(), IntptrTy, IntptrTy);
  2502. }
  2503. for (size_t Val : {0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0xf1, 0xf2,
  2504. 0xf3, 0xf5, 0xf8}) {
  2505. std::ostringstream Name;
  2506. Name << kAsanSetShadowPrefix;
  2507. Name << std::setw(2) << std::setfill('0') << std::hex << Val;
  2508. AsanSetShadowFunc[Val] =
  2509. M.getOrInsertFunction(Name.str(), IRB.getVoidTy(), IntptrTy, IntptrTy);
  2510. }
  2511. AsanAllocaPoisonFunc = M.getOrInsertFunction(
  2512. kAsanAllocaPoison, IRB.getVoidTy(), IntptrTy, IntptrTy);
  2513. AsanAllocasUnpoisonFunc = M.getOrInsertFunction(
  2514. kAsanAllocasUnpoison, IRB.getVoidTy(), IntptrTy, IntptrTy);
  2515. }
  2516. void FunctionStackPoisoner::copyToShadowInline(ArrayRef<uint8_t> ShadowMask,
  2517. ArrayRef<uint8_t> ShadowBytes,
  2518. size_t Begin, size_t End,
  2519. IRBuilder<> &IRB,
  2520. Value *ShadowBase) {
  2521. if (Begin >= End)
  2522. return;
  2523. const size_t LargestStoreSizeInBytes =
  2524. std::min<size_t>(sizeof(uint64_t), ASan.LongSize / 8);
  2525. const bool IsLittleEndian = F.getParent()->getDataLayout().isLittleEndian();
  2526. // Poison given range in shadow using larges store size with out leading and
  2527. // trailing zeros in ShadowMask. Zeros never change, so they need neither
  2528. // poisoning nor up-poisoning. Still we don't mind if some of them get into a
  2529. // middle of a store.
  2530. for (size_t i = Begin; i < End;) {
  2531. if (!ShadowMask[i]) {
  2532. assert(!ShadowBytes[i]);
  2533. ++i;
  2534. continue;
  2535. }
  2536. size_t StoreSizeInBytes = LargestStoreSizeInBytes;
  2537. // Fit store size into the range.
  2538. while (StoreSizeInBytes > End - i)
  2539. StoreSizeInBytes /= 2;
  2540. // Minimize store size by trimming trailing zeros.
  2541. for (size_t j = StoreSizeInBytes - 1; j && !ShadowMask[i + j]; --j) {
  2542. while (j <= StoreSizeInBytes / 2)
  2543. StoreSizeInBytes /= 2;
  2544. }
  2545. uint64_t Val = 0;
  2546. for (size_t j = 0; j < StoreSizeInBytes; j++) {
  2547. if (IsLittleEndian)
  2548. Val |= (uint64_t)ShadowBytes[i + j] << (8 * j);
  2549. else
  2550. Val = (Val << 8) | ShadowBytes[i + j];
  2551. }
  2552. Value *Ptr = IRB.CreateAdd(ShadowBase, ConstantInt::get(IntptrTy, i));
  2553. Value *Poison = IRB.getIntN(StoreSizeInBytes * 8, Val);
  2554. IRB.CreateAlignedStore(
  2555. Poison, IRB.CreateIntToPtr(Ptr, Poison->getType()->getPointerTo()),
  2556. Align(1));
  2557. i += StoreSizeInBytes;
  2558. }
  2559. }
  2560. void FunctionStackPoisoner::copyToShadow(ArrayRef<uint8_t> ShadowMask,
  2561. ArrayRef<uint8_t> ShadowBytes,
  2562. IRBuilder<> &IRB, Value *ShadowBase) {
  2563. copyToShadow(ShadowMask, ShadowBytes, 0, ShadowMask.size(), IRB, ShadowBase);
  2564. }
  2565. void FunctionStackPoisoner::copyToShadow(ArrayRef<uint8_t> ShadowMask,
  2566. ArrayRef<uint8_t> ShadowBytes,
  2567. size_t Begin, size_t End,
  2568. IRBuilder<> &IRB, Value *ShadowBase) {
  2569. assert(ShadowMask.size() == ShadowBytes.size());
  2570. size_t Done = Begin;
  2571. for (size_t i = Begin, j = Begin + 1; i < End; i = j++) {
  2572. if (!ShadowMask[i]) {
  2573. assert(!ShadowBytes[i]);
  2574. continue;
  2575. }
  2576. uint8_t Val = ShadowBytes[i];
  2577. if (!AsanSetShadowFunc[Val])
  2578. continue;
  2579. // Skip same values.
  2580. for (; j < End && ShadowMask[j] && Val == ShadowBytes[j]; ++j) {
  2581. }
  2582. if (j - i >= ClMaxInlinePoisoningSize) {
  2583. copyToShadowInline(ShadowMask, ShadowBytes, Done, i, IRB, ShadowBase);
  2584. IRB.CreateCall(AsanSetShadowFunc[Val],
  2585. {IRB.CreateAdd(ShadowBase, ConstantInt::get(IntptrTy, i)),
  2586. ConstantInt::get(IntptrTy, j - i)});
  2587. Done = j;
  2588. }
  2589. }
  2590. copyToShadowInline(ShadowMask, ShadowBytes, Done, End, IRB, ShadowBase);
  2591. }
  2592. // Fake stack allocator (asan_fake_stack.h) has 11 size classes
  2593. // for every power of 2 from kMinStackMallocSize to kMaxAsanStackMallocSizeClass
  2594. static int StackMallocSizeClass(uint64_t LocalStackSize) {
  2595. assert(LocalStackSize <= kMaxStackMallocSize);
  2596. uint64_t MaxSize = kMinStackMallocSize;
  2597. for (int i = 0;; i++, MaxSize *= 2)
  2598. if (LocalStackSize <= MaxSize) return i;
  2599. llvm_unreachable("impossible LocalStackSize");
  2600. }
  2601. void FunctionStackPoisoner::copyArgsPassedByValToAllocas() {
  2602. Instruction *CopyInsertPoint = &F.front().front();
  2603. if (CopyInsertPoint == ASan.LocalDynamicShadow) {
  2604. // Insert after the dynamic shadow location is determined
  2605. CopyInsertPoint = CopyInsertPoint->getNextNode();
  2606. assert(CopyInsertPoint);
  2607. }
  2608. IRBuilder<> IRB(CopyInsertPoint);
  2609. const DataLayout &DL = F.getParent()->getDataLayout();
  2610. for (Argument &Arg : F.args()) {
  2611. if (Arg.hasByValAttr()) {
  2612. Type *Ty = Arg.getParamByValType();
  2613. const Align Alignment =
  2614. DL.getValueOrABITypeAlignment(Arg.getParamAlign(), Ty);
  2615. AllocaInst *AI = IRB.CreateAlloca(
  2616. Ty, nullptr,
  2617. (Arg.hasName() ? Arg.getName() : "Arg" + Twine(Arg.getArgNo())) +
  2618. ".byval");
  2619. AI->setAlignment(Alignment);
  2620. Arg.replaceAllUsesWith(AI);
  2621. uint64_t AllocSize = DL.getTypeAllocSize(Ty);
  2622. IRB.CreateMemCpy(AI, Alignment, &Arg, Alignment, AllocSize);
  2623. }
  2624. }
  2625. }
  2626. PHINode *FunctionStackPoisoner::createPHI(IRBuilder<> &IRB, Value *Cond,
  2627. Value *ValueIfTrue,
  2628. Instruction *ThenTerm,
  2629. Value *ValueIfFalse) {
  2630. PHINode *PHI = IRB.CreatePHI(IntptrTy, 2);
  2631. BasicBlock *CondBlock = cast<Instruction>(Cond)->getParent();
  2632. PHI->addIncoming(ValueIfFalse, CondBlock);
  2633. BasicBlock *ThenBlock = ThenTerm->getParent();
  2634. PHI->addIncoming(ValueIfTrue, ThenBlock);
  2635. return PHI;
  2636. }
  2637. Value *FunctionStackPoisoner::createAllocaForLayout(
  2638. IRBuilder<> &IRB, const ASanStackFrameLayout &L, bool Dynamic) {
  2639. AllocaInst *Alloca;
  2640. if (Dynamic) {
  2641. Alloca = IRB.CreateAlloca(IRB.getInt8Ty(),
  2642. ConstantInt::get(IRB.getInt64Ty(), L.FrameSize),
  2643. "MyAlloca");
  2644. } else {
  2645. Alloca = IRB.CreateAlloca(ArrayType::get(IRB.getInt8Ty(), L.FrameSize),
  2646. nullptr, "MyAlloca");
  2647. assert(Alloca->isStaticAlloca());
  2648. }
  2649. assert((ClRealignStack & (ClRealignStack - 1)) == 0);
  2650. uint64_t FrameAlignment = std::max(L.FrameAlignment, uint64_t(ClRealignStack));
  2651. Alloca->setAlignment(Align(FrameAlignment));
  2652. return IRB.CreatePointerCast(Alloca, IntptrTy);
  2653. }
  2654. void FunctionStackPoisoner::createDynamicAllocasInitStorage() {
  2655. BasicBlock &FirstBB = *F.begin();
  2656. IRBuilder<> IRB(dyn_cast<Instruction>(FirstBB.begin()));
  2657. DynamicAllocaLayout = IRB.CreateAlloca(IntptrTy, nullptr);
  2658. IRB.CreateStore(Constant::getNullValue(IntptrTy), DynamicAllocaLayout);
  2659. DynamicAllocaLayout->setAlignment(Align(32));
  2660. }
  2661. void FunctionStackPoisoner::processDynamicAllocas() {
  2662. if (!ClInstrumentDynamicAllocas || DynamicAllocaVec.empty()) {
  2663. assert(DynamicAllocaPoisonCallVec.empty());
  2664. return;
  2665. }
  2666. // Insert poison calls for lifetime intrinsics for dynamic allocas.
  2667. for (const auto &APC : DynamicAllocaPoisonCallVec) {
  2668. assert(APC.InsBefore);
  2669. assert(APC.AI);
  2670. assert(ASan.isInterestingAlloca(*APC.AI));
  2671. assert(!APC.AI->isStaticAlloca());
  2672. IRBuilder<> IRB(APC.InsBefore);
  2673. poisonAlloca(APC.AI, APC.Size, IRB, APC.DoPoison);
  2674. // Dynamic allocas will be unpoisoned unconditionally below in
  2675. // unpoisonDynamicAllocas.
  2676. // Flag that we need unpoison static allocas.
  2677. }
  2678. // Handle dynamic allocas.
  2679. createDynamicAllocasInitStorage();
  2680. for (auto &AI : DynamicAllocaVec)
  2681. handleDynamicAllocaCall(AI);
  2682. unpoisonDynamicAllocas();
  2683. }
  2684. /// Collect instructions in the entry block after \p InsBefore which initialize
  2685. /// permanent storage for a function argument. These instructions must remain in
  2686. /// the entry block so that uninitialized values do not appear in backtraces. An
  2687. /// added benefit is that this conserves spill slots. This does not move stores
  2688. /// before instrumented / "interesting" allocas.
  2689. static void findStoresToUninstrumentedArgAllocas(
  2690. AddressSanitizer &ASan, Instruction &InsBefore,
  2691. SmallVectorImpl<Instruction *> &InitInsts) {
  2692. Instruction *Start = InsBefore.getNextNonDebugInstruction();
  2693. for (Instruction *It = Start; It; It = It->getNextNonDebugInstruction()) {
  2694. // Argument initialization looks like:
  2695. // 1) store <Argument>, <Alloca> OR
  2696. // 2) <CastArgument> = cast <Argument> to ...
  2697. // store <CastArgument> to <Alloca>
  2698. // Do not consider any other kind of instruction.
  2699. //
  2700. // Note: This covers all known cases, but may not be exhaustive. An
  2701. // alternative to pattern-matching stores is to DFS over all Argument uses:
  2702. // this might be more general, but is probably much more complicated.
  2703. if (isa<AllocaInst>(It) || isa<CastInst>(It))
  2704. continue;
  2705. if (auto *Store = dyn_cast<StoreInst>(It)) {
  2706. // The store destination must be an alloca that isn't interesting for
  2707. // ASan to instrument. These are moved up before InsBefore, and they're
  2708. // not interesting because allocas for arguments can be mem2reg'd.
  2709. auto *Alloca = dyn_cast<AllocaInst>(Store->getPointerOperand());
  2710. if (!Alloca || ASan.isInterestingAlloca(*Alloca))
  2711. continue;
  2712. Value *Val = Store->getValueOperand();
  2713. bool IsDirectArgInit = isa<Argument>(Val);
  2714. bool IsArgInitViaCast =
  2715. isa<CastInst>(Val) &&
  2716. isa<Argument>(cast<CastInst>(Val)->getOperand(0)) &&
  2717. // Check that the cast appears directly before the store. Otherwise
  2718. // moving the cast before InsBefore may break the IR.
  2719. Val == It->getPrevNonDebugInstruction();
  2720. bool IsArgInit = IsDirectArgInit || IsArgInitViaCast;
  2721. if (!IsArgInit)
  2722. continue;
  2723. if (IsArgInitViaCast)
  2724. InitInsts.push_back(cast<Instruction>(Val));
  2725. InitInsts.push_back(Store);
  2726. continue;
  2727. }
  2728. // Do not reorder past unknown instructions: argument initialization should
  2729. // only involve casts and stores.
  2730. return;
  2731. }
  2732. }
  2733. void FunctionStackPoisoner::processStaticAllocas() {
  2734. if (AllocaVec.empty()) {
  2735. assert(StaticAllocaPoisonCallVec.empty());
  2736. return;
  2737. }
  2738. int StackMallocIdx = -1;
  2739. DebugLoc EntryDebugLocation;
  2740. if (auto SP = F.getSubprogram())
  2741. EntryDebugLocation =
  2742. DILocation::get(SP->getContext(), SP->getScopeLine(), 0, SP);
  2743. Instruction *InsBefore = AllocaVec[0];
  2744. IRBuilder<> IRB(InsBefore);
  2745. // Make sure non-instrumented allocas stay in the entry block. Otherwise,
  2746. // debug info is broken, because only entry-block allocas are treated as
  2747. // regular stack slots.
  2748. auto InsBeforeB = InsBefore->getParent();
  2749. assert(InsBeforeB == &F.getEntryBlock());
  2750. for (auto *AI : StaticAllocasToMoveUp)
  2751. if (AI->getParent() == InsBeforeB)
  2752. AI->moveBefore(InsBefore);
  2753. // Move stores of arguments into entry-block allocas as well. This prevents
  2754. // extra stack slots from being generated (to house the argument values until
  2755. // they can be stored into the allocas). This also prevents uninitialized
  2756. // values from being shown in backtraces.
  2757. SmallVector<Instruction *, 8> ArgInitInsts;
  2758. findStoresToUninstrumentedArgAllocas(ASan, *InsBefore, ArgInitInsts);
  2759. for (Instruction *ArgInitInst : ArgInitInsts)
  2760. ArgInitInst->moveBefore(InsBefore);
  2761. // If we have a call to llvm.localescape, keep it in the entry block.
  2762. if (LocalEscapeCall) LocalEscapeCall->moveBefore(InsBefore);
  2763. SmallVector<ASanStackVariableDescription, 16> SVD;
  2764. SVD.reserve(AllocaVec.size());
  2765. for (AllocaInst *AI : AllocaVec) {
  2766. ASanStackVariableDescription D = {AI->getName().data(),
  2767. ASan.getAllocaSizeInBytes(*AI),
  2768. 0,
  2769. AI->getAlign().value(),
  2770. AI,
  2771. 0,
  2772. 0};
  2773. SVD.push_back(D);
  2774. }
  2775. // Minimal header size (left redzone) is 4 pointers,
  2776. // i.e. 32 bytes on 64-bit platforms and 16 bytes in 32-bit platforms.
  2777. uint64_t Granularity = 1ULL << Mapping.Scale;
  2778. uint64_t MinHeaderSize = std::max((uint64_t)ASan.LongSize / 2, Granularity);
  2779. const ASanStackFrameLayout &L =
  2780. ComputeASanStackFrameLayout(SVD, Granularity, MinHeaderSize);
  2781. // Build AllocaToSVDMap for ASanStackVariableDescription lookup.
  2782. DenseMap<const AllocaInst *, ASanStackVariableDescription *> AllocaToSVDMap;
  2783. for (auto &Desc : SVD)
  2784. AllocaToSVDMap[Desc.AI] = &Desc;
  2785. // Update SVD with information from lifetime intrinsics.
  2786. for (const auto &APC : StaticAllocaPoisonCallVec) {
  2787. assert(APC.InsBefore);
  2788. assert(APC.AI);
  2789. assert(ASan.isInterestingAlloca(*APC.AI));
  2790. assert(APC.AI->isStaticAlloca());
  2791. ASanStackVariableDescription &Desc = *AllocaToSVDMap[APC.AI];
  2792. Desc.LifetimeSize = Desc.Size;
  2793. if (const DILocation *FnLoc = EntryDebugLocation.get()) {
  2794. if (const DILocation *LifetimeLoc = APC.InsBefore->getDebugLoc().get()) {
  2795. if (LifetimeLoc->getFile() == FnLoc->getFile())
  2796. if (unsigned Line = LifetimeLoc->getLine())
  2797. Desc.Line = std::min(Desc.Line ? Desc.Line : Line, Line);
  2798. }
  2799. }
  2800. }
  2801. auto DescriptionString = ComputeASanStackFrameDescription(SVD);
  2802. LLVM_DEBUG(dbgs() << DescriptionString << " --- " << L.FrameSize << "\n");
  2803. uint64_t LocalStackSize = L.FrameSize;
  2804. bool DoStackMalloc =
  2805. ASan.UseAfterReturn != AsanDetectStackUseAfterReturnMode::Never &&
  2806. !ASan.CompileKernel && LocalStackSize <= kMaxStackMallocSize;
  2807. bool DoDynamicAlloca = ClDynamicAllocaStack;
  2808. // Don't do dynamic alloca or stack malloc if:
  2809. // 1) There is inline asm: too often it makes assumptions on which registers
  2810. // are available.
  2811. // 2) There is a returns_twice call (typically setjmp), which is
  2812. // optimization-hostile, and doesn't play well with introduced indirect
  2813. // register-relative calculation of local variable addresses.
  2814. DoDynamicAlloca &= !HasInlineAsm && !HasReturnsTwiceCall;
  2815. DoStackMalloc &= !HasInlineAsm && !HasReturnsTwiceCall;
  2816. Value *StaticAlloca =
  2817. DoDynamicAlloca ? nullptr : createAllocaForLayout(IRB, L, false);
  2818. Value *FakeStack;
  2819. Value *LocalStackBase;
  2820. Value *LocalStackBaseAlloca;
  2821. uint8_t DIExprFlags = DIExpression::ApplyOffset;
  2822. if (DoStackMalloc) {
  2823. LocalStackBaseAlloca =
  2824. IRB.CreateAlloca(IntptrTy, nullptr, "asan_local_stack_base");
  2825. if (ASan.UseAfterReturn == AsanDetectStackUseAfterReturnMode::Runtime) {
  2826. // void *FakeStack = __asan_option_detect_stack_use_after_return
  2827. // ? __asan_stack_malloc_N(LocalStackSize)
  2828. // : nullptr;
  2829. // void *LocalStackBase = (FakeStack) ? FakeStack :
  2830. // alloca(LocalStackSize);
  2831. Constant *OptionDetectUseAfterReturn = F.getParent()->getOrInsertGlobal(
  2832. kAsanOptionDetectUseAfterReturn, IRB.getInt32Ty());
  2833. Value *UseAfterReturnIsEnabled = IRB.CreateICmpNE(
  2834. IRB.CreateLoad(IRB.getInt32Ty(), OptionDetectUseAfterReturn),
  2835. Constant::getNullValue(IRB.getInt32Ty()));
  2836. Instruction *Term =
  2837. SplitBlockAndInsertIfThen(UseAfterReturnIsEnabled, InsBefore, false);
  2838. IRBuilder<> IRBIf(Term);
  2839. StackMallocIdx = StackMallocSizeClass(LocalStackSize);
  2840. assert(StackMallocIdx <= kMaxAsanStackMallocSizeClass);
  2841. Value *FakeStackValue =
  2842. IRBIf.CreateCall(AsanStackMallocFunc[StackMallocIdx],
  2843. ConstantInt::get(IntptrTy, LocalStackSize));
  2844. IRB.SetInsertPoint(InsBefore);
  2845. FakeStack = createPHI(IRB, UseAfterReturnIsEnabled, FakeStackValue, Term,
  2846. ConstantInt::get(IntptrTy, 0));
  2847. } else {
  2848. // assert(ASan.UseAfterReturn == AsanDetectStackUseAfterReturnMode:Always)
  2849. // void *FakeStack = __asan_stack_malloc_N(LocalStackSize);
  2850. // void *LocalStackBase = (FakeStack) ? FakeStack :
  2851. // alloca(LocalStackSize);
  2852. StackMallocIdx = StackMallocSizeClass(LocalStackSize);
  2853. FakeStack = IRB.CreateCall(AsanStackMallocFunc[StackMallocIdx],
  2854. ConstantInt::get(IntptrTy, LocalStackSize));
  2855. }
  2856. Value *NoFakeStack =
  2857. IRB.CreateICmpEQ(FakeStack, Constant::getNullValue(IntptrTy));
  2858. Instruction *Term =
  2859. SplitBlockAndInsertIfThen(NoFakeStack, InsBefore, false);
  2860. IRBuilder<> IRBIf(Term);
  2861. Value *AllocaValue =
  2862. DoDynamicAlloca ? createAllocaForLayout(IRBIf, L, true) : StaticAlloca;
  2863. IRB.SetInsertPoint(InsBefore);
  2864. LocalStackBase = createPHI(IRB, NoFakeStack, AllocaValue, Term, FakeStack);
  2865. IRB.CreateStore(LocalStackBase, LocalStackBaseAlloca);
  2866. DIExprFlags |= DIExpression::DerefBefore;
  2867. } else {
  2868. // void *FakeStack = nullptr;
  2869. // void *LocalStackBase = alloca(LocalStackSize);
  2870. FakeStack = ConstantInt::get(IntptrTy, 0);
  2871. LocalStackBase =
  2872. DoDynamicAlloca ? createAllocaForLayout(IRB, L, true) : StaticAlloca;
  2873. LocalStackBaseAlloca = LocalStackBase;
  2874. }
  2875. // It shouldn't matter whether we pass an `alloca` or a `ptrtoint` as the
  2876. // dbg.declare address opereand, but passing a `ptrtoint` seems to confuse
  2877. // later passes and can result in dropped variable coverage in debug info.
  2878. Value *LocalStackBaseAllocaPtr =
  2879. isa<PtrToIntInst>(LocalStackBaseAlloca)
  2880. ? cast<PtrToIntInst>(LocalStackBaseAlloca)->getPointerOperand()
  2881. : LocalStackBaseAlloca;
  2882. assert(isa<AllocaInst>(LocalStackBaseAllocaPtr) &&
  2883. "Variable descriptions relative to ASan stack base will be dropped");
  2884. // Replace Alloca instructions with base+offset.
  2885. for (const auto &Desc : SVD) {
  2886. AllocaInst *AI = Desc.AI;
  2887. replaceDbgDeclare(AI, LocalStackBaseAllocaPtr, DIB, DIExprFlags,
  2888. Desc.Offset);
  2889. Value *NewAllocaPtr = IRB.CreateIntToPtr(
  2890. IRB.CreateAdd(LocalStackBase, ConstantInt::get(IntptrTy, Desc.Offset)),
  2891. AI->getType());
  2892. AI->replaceAllUsesWith(NewAllocaPtr);
  2893. }
  2894. // The left-most redzone has enough space for at least 4 pointers.
  2895. // Write the Magic value to redzone[0].
  2896. Value *BasePlus0 = IRB.CreateIntToPtr(LocalStackBase, IntptrPtrTy);
  2897. IRB.CreateStore(ConstantInt::get(IntptrTy, kCurrentStackFrameMagic),
  2898. BasePlus0);
  2899. // Write the frame description constant to redzone[1].
  2900. Value *BasePlus1 = IRB.CreateIntToPtr(
  2901. IRB.CreateAdd(LocalStackBase,
  2902. ConstantInt::get(IntptrTy, ASan.LongSize / 8)),
  2903. IntptrPtrTy);
  2904. GlobalVariable *StackDescriptionGlobal =
  2905. createPrivateGlobalForString(*F.getParent(), DescriptionString,
  2906. /*AllowMerging*/ true, kAsanGenPrefix);
  2907. Value *Description = IRB.CreatePointerCast(StackDescriptionGlobal, IntptrTy);
  2908. IRB.CreateStore(Description, BasePlus1);
  2909. // Write the PC to redzone[2].
  2910. Value *BasePlus2 = IRB.CreateIntToPtr(
  2911. IRB.CreateAdd(LocalStackBase,
  2912. ConstantInt::get(IntptrTy, 2 * ASan.LongSize / 8)),
  2913. IntptrPtrTy);
  2914. IRB.CreateStore(IRB.CreatePointerCast(&F, IntptrTy), BasePlus2);
  2915. const auto &ShadowAfterScope = GetShadowBytesAfterScope(SVD, L);
  2916. // Poison the stack red zones at the entry.
  2917. Value *ShadowBase = ASan.memToShadow(LocalStackBase, IRB);
  2918. // As mask we must use most poisoned case: red zones and after scope.
  2919. // As bytes we can use either the same or just red zones only.
  2920. copyToShadow(ShadowAfterScope, ShadowAfterScope, IRB, ShadowBase);
  2921. if (!StaticAllocaPoisonCallVec.empty()) {
  2922. const auto &ShadowInScope = GetShadowBytes(SVD, L);
  2923. // Poison static allocas near lifetime intrinsics.
  2924. for (const auto &APC : StaticAllocaPoisonCallVec) {
  2925. const ASanStackVariableDescription &Desc = *AllocaToSVDMap[APC.AI];
  2926. assert(Desc.Offset % L.Granularity == 0);
  2927. size_t Begin = Desc.Offset / L.Granularity;
  2928. size_t End = Begin + (APC.Size + L.Granularity - 1) / L.Granularity;
  2929. IRBuilder<> IRB(APC.InsBefore);
  2930. copyToShadow(ShadowAfterScope,
  2931. APC.DoPoison ? ShadowAfterScope : ShadowInScope, Begin, End,
  2932. IRB, ShadowBase);
  2933. }
  2934. }
  2935. SmallVector<uint8_t, 64> ShadowClean(ShadowAfterScope.size(), 0);
  2936. SmallVector<uint8_t, 64> ShadowAfterReturn;
  2937. // (Un)poison the stack before all ret instructions.
  2938. for (Instruction *Ret : RetVec) {
  2939. IRBuilder<> IRBRet(Ret);
  2940. // Mark the current frame as retired.
  2941. IRBRet.CreateStore(ConstantInt::get(IntptrTy, kRetiredStackFrameMagic),
  2942. BasePlus0);
  2943. if (DoStackMalloc) {
  2944. assert(StackMallocIdx >= 0);
  2945. // if FakeStack != 0 // LocalStackBase == FakeStack
  2946. // // In use-after-return mode, poison the whole stack frame.
  2947. // if StackMallocIdx <= 4
  2948. // // For small sizes inline the whole thing:
  2949. // memset(ShadowBase, kAsanStackAfterReturnMagic, ShadowSize);
  2950. // **SavedFlagPtr(FakeStack) = 0
  2951. // else
  2952. // __asan_stack_free_N(FakeStack, LocalStackSize)
  2953. // else
  2954. // <This is not a fake stack; unpoison the redzones>
  2955. Value *Cmp =
  2956. IRBRet.CreateICmpNE(FakeStack, Constant::getNullValue(IntptrTy));
  2957. Instruction *ThenTerm, *ElseTerm;
  2958. SplitBlockAndInsertIfThenElse(Cmp, Ret, &ThenTerm, &ElseTerm);
  2959. IRBuilder<> IRBPoison(ThenTerm);
  2960. if (StackMallocIdx <= 4) {
  2961. int ClassSize = kMinStackMallocSize << StackMallocIdx;
  2962. ShadowAfterReturn.resize(ClassSize / L.Granularity,
  2963. kAsanStackUseAfterReturnMagic);
  2964. copyToShadow(ShadowAfterReturn, ShadowAfterReturn, IRBPoison,
  2965. ShadowBase);
  2966. Value *SavedFlagPtrPtr = IRBPoison.CreateAdd(
  2967. FakeStack,
  2968. ConstantInt::get(IntptrTy, ClassSize - ASan.LongSize / 8));
  2969. Value *SavedFlagPtr = IRBPoison.CreateLoad(
  2970. IntptrTy, IRBPoison.CreateIntToPtr(SavedFlagPtrPtr, IntptrPtrTy));
  2971. IRBPoison.CreateStore(
  2972. Constant::getNullValue(IRBPoison.getInt8Ty()),
  2973. IRBPoison.CreateIntToPtr(SavedFlagPtr, IRBPoison.getInt8PtrTy()));
  2974. } else {
  2975. // For larger frames call __asan_stack_free_*.
  2976. IRBPoison.CreateCall(
  2977. AsanStackFreeFunc[StackMallocIdx],
  2978. {FakeStack, ConstantInt::get(IntptrTy, LocalStackSize)});
  2979. }
  2980. IRBuilder<> IRBElse(ElseTerm);
  2981. copyToShadow(ShadowAfterScope, ShadowClean, IRBElse, ShadowBase);
  2982. } else {
  2983. copyToShadow(ShadowAfterScope, ShadowClean, IRBRet, ShadowBase);
  2984. }
  2985. }
  2986. // We are done. Remove the old unused alloca instructions.
  2987. for (auto *AI : AllocaVec)
  2988. AI->eraseFromParent();
  2989. }
  2990. void FunctionStackPoisoner::poisonAlloca(Value *V, uint64_t Size,
  2991. IRBuilder<> &IRB, bool DoPoison) {
  2992. // For now just insert the call to ASan runtime.
  2993. Value *AddrArg = IRB.CreatePointerCast(V, IntptrTy);
  2994. Value *SizeArg = ConstantInt::get(IntptrTy, Size);
  2995. IRB.CreateCall(
  2996. DoPoison ? AsanPoisonStackMemoryFunc : AsanUnpoisonStackMemoryFunc,
  2997. {AddrArg, SizeArg});
  2998. }
  2999. // Handling llvm.lifetime intrinsics for a given %alloca:
  3000. // (1) collect all llvm.lifetime.xxx(%size, %value) describing the alloca.
  3001. // (2) if %size is constant, poison memory for llvm.lifetime.end (to detect
  3002. // invalid accesses) and unpoison it for llvm.lifetime.start (the memory
  3003. // could be poisoned by previous llvm.lifetime.end instruction, as the
  3004. // variable may go in and out of scope several times, e.g. in loops).
  3005. // (3) if we poisoned at least one %alloca in a function,
  3006. // unpoison the whole stack frame at function exit.
  3007. void FunctionStackPoisoner::handleDynamicAllocaCall(AllocaInst *AI) {
  3008. IRBuilder<> IRB(AI);
  3009. const Align Alignment = std::max(Align(kAllocaRzSize), AI->getAlign());
  3010. const uint64_t AllocaRedzoneMask = kAllocaRzSize - 1;
  3011. Value *Zero = Constant::getNullValue(IntptrTy);
  3012. Value *AllocaRzSize = ConstantInt::get(IntptrTy, kAllocaRzSize);
  3013. Value *AllocaRzMask = ConstantInt::get(IntptrTy, AllocaRedzoneMask);
  3014. // Since we need to extend alloca with additional memory to locate
  3015. // redzones, and OldSize is number of allocated blocks with
  3016. // ElementSize size, get allocated memory size in bytes by
  3017. // OldSize * ElementSize.
  3018. const unsigned ElementSize =
  3019. F.getParent()->getDataLayout().getTypeAllocSize(AI->getAllocatedType());
  3020. Value *OldSize =
  3021. IRB.CreateMul(IRB.CreateIntCast(AI->getArraySize(), IntptrTy, false),
  3022. ConstantInt::get(IntptrTy, ElementSize));
  3023. // PartialSize = OldSize % 32
  3024. Value *PartialSize = IRB.CreateAnd(OldSize, AllocaRzMask);
  3025. // Misalign = kAllocaRzSize - PartialSize;
  3026. Value *Misalign = IRB.CreateSub(AllocaRzSize, PartialSize);
  3027. // PartialPadding = Misalign != kAllocaRzSize ? Misalign : 0;
  3028. Value *Cond = IRB.CreateICmpNE(Misalign, AllocaRzSize);
  3029. Value *PartialPadding = IRB.CreateSelect(Cond, Misalign, Zero);
  3030. // AdditionalChunkSize = Alignment + PartialPadding + kAllocaRzSize
  3031. // Alignment is added to locate left redzone, PartialPadding for possible
  3032. // partial redzone and kAllocaRzSize for right redzone respectively.
  3033. Value *AdditionalChunkSize = IRB.CreateAdd(
  3034. ConstantInt::get(IntptrTy, Alignment.value() + kAllocaRzSize),
  3035. PartialPadding);
  3036. Value *NewSize = IRB.CreateAdd(OldSize, AdditionalChunkSize);
  3037. // Insert new alloca with new NewSize and Alignment params.
  3038. AllocaInst *NewAlloca = IRB.CreateAlloca(IRB.getInt8Ty(), NewSize);
  3039. NewAlloca->setAlignment(Alignment);
  3040. // NewAddress = Address + Alignment
  3041. Value *NewAddress =
  3042. IRB.CreateAdd(IRB.CreatePtrToInt(NewAlloca, IntptrTy),
  3043. ConstantInt::get(IntptrTy, Alignment.value()));
  3044. // Insert __asan_alloca_poison call for new created alloca.
  3045. IRB.CreateCall(AsanAllocaPoisonFunc, {NewAddress, OldSize});
  3046. // Store the last alloca's address to DynamicAllocaLayout. We'll need this
  3047. // for unpoisoning stuff.
  3048. IRB.CreateStore(IRB.CreatePtrToInt(NewAlloca, IntptrTy), DynamicAllocaLayout);
  3049. Value *NewAddressPtr = IRB.CreateIntToPtr(NewAddress, AI->getType());
  3050. // Replace all uses of AddessReturnedByAlloca with NewAddressPtr.
  3051. AI->replaceAllUsesWith(NewAddressPtr);
  3052. // We are done. Erase old alloca from parent.
  3053. AI->eraseFromParent();
  3054. }
  3055. // isSafeAccess returns true if Addr is always inbounds with respect to its
  3056. // base object. For example, it is a field access or an array access with
  3057. // constant inbounds index.
  3058. bool AddressSanitizer::isSafeAccess(ObjectSizeOffsetVisitor &ObjSizeVis,
  3059. Value *Addr, uint64_t TypeSize) const {
  3060. SizeOffsetType SizeOffset = ObjSizeVis.compute(Addr);
  3061. if (!ObjSizeVis.bothKnown(SizeOffset)) return false;
  3062. uint64_t Size = SizeOffset.first.getZExtValue();
  3063. int64_t Offset = SizeOffset.second.getSExtValue();
  3064. // Three checks are required to ensure safety:
  3065. // . Offset >= 0 (since the offset is given from the base ptr)
  3066. // . Size >= Offset (unsigned)
  3067. // . Size - Offset >= NeededSize (unsigned)
  3068. return Offset >= 0 && Size >= uint64_t(Offset) &&
  3069. Size - uint64_t(Offset) >= TypeSize / 8;
  3070. }