AddressSanitizer.cpp 149 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688
  1. //===- AddressSanitizer.cpp - memory error detector -----------------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file is a part of AddressSanitizer, an address basic correctness
  10. // checker.
  11. // Details of the algorithm:
  12. // https://github.com/google/sanitizers/wiki/AddressSanitizerAlgorithm
  13. //
  14. // FIXME: This sanitizer does not yet handle scalable vectors
  15. //
  16. //===----------------------------------------------------------------------===//
  17. #include "llvm/Transforms/Instrumentation/AddressSanitizer.h"
  18. #include "llvm/ADT/ArrayRef.h"
  19. #include "llvm/ADT/DenseMap.h"
  20. #include "llvm/ADT/DepthFirstIterator.h"
  21. #include "llvm/ADT/SmallPtrSet.h"
  22. #include "llvm/ADT/SmallVector.h"
  23. #include "llvm/ADT/Statistic.h"
  24. #include "llvm/ADT/StringExtras.h"
  25. #include "llvm/ADT/StringRef.h"
  26. #include "llvm/ADT/Triple.h"
  27. #include "llvm/ADT/Twine.h"
  28. #include "llvm/Analysis/MemoryBuiltins.h"
  29. #include "llvm/Analysis/StackSafetyAnalysis.h"
  30. #include "llvm/Analysis/TargetLibraryInfo.h"
  31. #include "llvm/Analysis/ValueTracking.h"
  32. #include "llvm/BinaryFormat/MachO.h"
  33. #include "llvm/IR/Argument.h"
  34. #include "llvm/IR/Attributes.h"
  35. #include "llvm/IR/BasicBlock.h"
  36. #include "llvm/IR/Comdat.h"
  37. #include "llvm/IR/Constant.h"
  38. #include "llvm/IR/Constants.h"
  39. #include "llvm/IR/DIBuilder.h"
  40. #include "llvm/IR/DataLayout.h"
  41. #include "llvm/IR/DebugInfoMetadata.h"
  42. #include "llvm/IR/DebugLoc.h"
  43. #include "llvm/IR/DerivedTypes.h"
  44. #include "llvm/IR/Dominators.h"
  45. #include "llvm/IR/Function.h"
  46. #include "llvm/IR/GlobalAlias.h"
  47. #include "llvm/IR/GlobalValue.h"
  48. #include "llvm/IR/GlobalVariable.h"
  49. #include "llvm/IR/IRBuilder.h"
  50. #include "llvm/IR/InlineAsm.h"
  51. #include "llvm/IR/InstIterator.h"
  52. #include "llvm/IR/InstVisitor.h"
  53. #include "llvm/IR/InstrTypes.h"
  54. #include "llvm/IR/Instruction.h"
  55. #include "llvm/IR/Instructions.h"
  56. #include "llvm/IR/IntrinsicInst.h"
  57. #include "llvm/IR/Intrinsics.h"
  58. #include "llvm/IR/LLVMContext.h"
  59. #include "llvm/IR/MDBuilder.h"
  60. #include "llvm/IR/Metadata.h"
  61. #include "llvm/IR/Module.h"
  62. #include "llvm/IR/Type.h"
  63. #include "llvm/IR/Use.h"
  64. #include "llvm/IR/Value.h"
  65. #include "llvm/InitializePasses.h"
  66. #include "llvm/MC/MCSectionMachO.h"
  67. #include "llvm/Pass.h"
  68. #include "llvm/Support/Casting.h"
  69. #include "llvm/Support/CommandLine.h"
  70. #include "llvm/Support/Debug.h"
  71. #include "llvm/Support/ErrorHandling.h"
  72. #include "llvm/Support/MathExtras.h"
  73. #include "llvm/Support/ScopedPrinter.h"
  74. #include "llvm/Support/raw_ostream.h"
  75. #include "llvm/Transforms/Instrumentation.h"
  76. #include "llvm/Transforms/Instrumentation/AddressSanitizerCommon.h"
  77. #include "llvm/Transforms/Instrumentation/AddressSanitizerOptions.h"
  78. #include "llvm/Transforms/Utils/ASanStackFrameLayout.h"
  79. #include "llvm/Transforms/Utils/BasicBlockUtils.h"
  80. #include "llvm/Transforms/Utils/Local.h"
  81. #include "llvm/Transforms/Utils/ModuleUtils.h"
  82. #include "llvm/Transforms/Utils/PromoteMemToReg.h"
  83. #include <algorithm>
  84. #include <cassert>
  85. #include <cstddef>
  86. #include <cstdint>
  87. #include <iomanip>
  88. #include <limits>
  89. #include <memory>
  90. #include <sstream>
  91. #include <string>
  92. #include <tuple>
  93. using namespace llvm;
  94. #define DEBUG_TYPE "asan"
  95. static const uint64_t kDefaultShadowScale = 3;
  96. static const uint64_t kDefaultShadowOffset32 = 1ULL << 29;
  97. static const uint64_t kDefaultShadowOffset64 = 1ULL << 44;
  98. static const uint64_t kDynamicShadowSentinel =
  99. std::numeric_limits<uint64_t>::max();
  100. static const uint64_t kSmallX86_64ShadowOffsetBase = 0x7FFFFFFF; // < 2G.
  101. static const uint64_t kSmallX86_64ShadowOffsetAlignMask = ~0xFFFULL;
  102. static const uint64_t kLinuxKasan_ShadowOffset64 = 0xdffffc0000000000;
  103. static const uint64_t kPPC64_ShadowOffset64 = 1ULL << 44;
  104. static const uint64_t kSystemZ_ShadowOffset64 = 1ULL << 52;
  105. static const uint64_t kMIPS32_ShadowOffset32 = 0x0aaa0000;
  106. static const uint64_t kMIPS64_ShadowOffset64 = 1ULL << 37;
  107. static const uint64_t kAArch64_ShadowOffset64 = 1ULL << 36;
  108. static const uint64_t kRISCV64_ShadowOffset64 = 0xd55550000;
  109. static const uint64_t kFreeBSD_ShadowOffset32 = 1ULL << 30;
  110. static const uint64_t kFreeBSD_ShadowOffset64 = 1ULL << 46;
  111. static const uint64_t kFreeBSDKasan_ShadowOffset64 = 0xdffff7c000000000;
  112. static const uint64_t kNetBSD_ShadowOffset32 = 1ULL << 30;
  113. static const uint64_t kNetBSD_ShadowOffset64 = 1ULL << 46;
  114. static const uint64_t kNetBSDKasan_ShadowOffset64 = 0xdfff900000000000;
  115. static const uint64_t kPS4CPU_ShadowOffset64 = 1ULL << 40;
  116. static const uint64_t kWindowsShadowOffset32 = 3ULL << 28;
  117. static const uint64_t kEmscriptenShadowOffset = 0;
  118. // The shadow memory space is dynamically allocated.
  119. static const uint64_t kWindowsShadowOffset64 = kDynamicShadowSentinel;
  120. static const size_t kMinStackMallocSize = 1 << 6; // 64B
  121. static const size_t kMaxStackMallocSize = 1 << 16; // 64K
  122. static const uintptr_t kCurrentStackFrameMagic = 0x41B58AB3;
  123. static const uintptr_t kRetiredStackFrameMagic = 0x45E0360E;
  124. const char kAsanModuleCtorName[] = "asan.module_ctor";
  125. const char kAsanModuleDtorName[] = "asan.module_dtor";
  126. static const uint64_t kAsanCtorAndDtorPriority = 1;
  127. // On Emscripten, the system needs more than one priorities for constructors.
  128. static const uint64_t kAsanEmscriptenCtorAndDtorPriority = 50;
  129. const char kAsanReportErrorTemplate[] = "__asan_report_";
  130. const char kAsanRegisterGlobalsName[] = "__asan_register_globals";
  131. const char kAsanUnregisterGlobalsName[] = "__asan_unregister_globals";
  132. const char kAsanRegisterImageGlobalsName[] = "__asan_register_image_globals";
  133. const char kAsanUnregisterImageGlobalsName[] =
  134. "__asan_unregister_image_globals";
  135. const char kAsanRegisterElfGlobalsName[] = "__asan_register_elf_globals";
  136. const char kAsanUnregisterElfGlobalsName[] = "__asan_unregister_elf_globals";
  137. const char kAsanPoisonGlobalsName[] = "__asan_before_dynamic_init";
  138. const char kAsanUnpoisonGlobalsName[] = "__asan_after_dynamic_init";
  139. const char kAsanInitName[] = "__asan_init";
  140. const char kAsanVersionCheckNamePrefix[] = "__asan_version_mismatch_check_v";
  141. const char kAsanPtrCmp[] = "__sanitizer_ptr_cmp";
  142. const char kAsanPtrSub[] = "__sanitizer_ptr_sub";
  143. const char kAsanHandleNoReturnName[] = "__asan_handle_no_return";
  144. static const int kMaxAsanStackMallocSizeClass = 10;
  145. const char kAsanStackMallocNameTemplate[] = "__asan_stack_malloc_";
  146. const char kAsanStackMallocAlwaysNameTemplate[] =
  147. "__asan_stack_malloc_always_";
  148. const char kAsanStackFreeNameTemplate[] = "__asan_stack_free_";
  149. const char kAsanGenPrefix[] = "___asan_gen_";
  150. const char kODRGenPrefix[] = "__odr_asan_gen_";
  151. const char kSanCovGenPrefix[] = "__sancov_gen_";
  152. const char kAsanSetShadowPrefix[] = "__asan_set_shadow_";
  153. const char kAsanPoisonStackMemoryName[] = "__asan_poison_stack_memory";
  154. const char kAsanUnpoisonStackMemoryName[] = "__asan_unpoison_stack_memory";
  155. // ASan version script has __asan_* wildcard. Triple underscore prevents a
  156. // linker (gold) warning about attempting to export a local symbol.
  157. const char kAsanGlobalsRegisteredFlagName[] = "___asan_globals_registered";
  158. const char kAsanOptionDetectUseAfterReturn[] =
  159. "__asan_option_detect_stack_use_after_return";
  160. const char kAsanShadowMemoryDynamicAddress[] =
  161. "__asan_shadow_memory_dynamic_address";
  162. const char kAsanAllocaPoison[] = "__asan_alloca_poison";
  163. const char kAsanAllocasUnpoison[] = "__asan_allocas_unpoison";
  164. const char kAMDGPUAddressSharedName[] = "llvm.amdgcn.is.shared";
  165. const char kAMDGPUAddressPrivateName[] = "llvm.amdgcn.is.private";
  166. // Accesses sizes are powers of two: 1, 2, 4, 8, 16.
  167. static const size_t kNumberOfAccessSizes = 5;
  168. static const uint64_t kAllocaRzSize = 32;
  169. // ASanAccessInfo implementation constants.
  170. constexpr size_t kCompileKernelShift = 0;
  171. constexpr size_t kCompileKernelMask = 0x1;
  172. constexpr size_t kAccessSizeIndexShift = 1;
  173. constexpr size_t kAccessSizeIndexMask = 0xf;
  174. constexpr size_t kIsWriteShift = 5;
  175. constexpr size_t kIsWriteMask = 0x1;
  176. // Command-line flags.
  177. static cl::opt<bool> ClEnableKasan(
  178. "asan-kernel", cl::desc("Enable KernelAddressSanitizer instrumentation"),
  179. cl::Hidden, cl::init(false));
  180. static cl::opt<bool> ClRecover(
  181. "asan-recover",
  182. cl::desc("Enable recovery mode (continue-after-error)."),
  183. cl::Hidden, cl::init(false));
  184. static cl::opt<bool> ClInsertVersionCheck(
  185. "asan-guard-against-version-mismatch",
  186. cl::desc("Guard against compiler/runtime version mismatch."),
  187. cl::Hidden, cl::init(true));
  188. // This flag may need to be replaced with -f[no-]asan-reads.
  189. static cl::opt<bool> ClInstrumentReads("asan-instrument-reads",
  190. cl::desc("instrument read instructions"),
  191. cl::Hidden, cl::init(true));
  192. static cl::opt<bool> ClInstrumentWrites(
  193. "asan-instrument-writes", cl::desc("instrument write instructions"),
  194. cl::Hidden, cl::init(true));
  195. static cl::opt<bool>
  196. ClUseStackSafety("asan-use-stack-safety", cl::Hidden, cl::init(false),
  197. cl::Hidden, cl::desc("Use Stack Safety analysis results"),
  198. cl::Optional);
  199. static cl::opt<bool> ClInstrumentAtomics(
  200. "asan-instrument-atomics",
  201. cl::desc("instrument atomic instructions (rmw, cmpxchg)"), cl::Hidden,
  202. cl::init(true));
  203. static cl::opt<bool>
  204. ClInstrumentByval("asan-instrument-byval",
  205. cl::desc("instrument byval call arguments"), cl::Hidden,
  206. cl::init(true));
  207. static cl::opt<bool> ClAlwaysSlowPath(
  208. "asan-always-slow-path",
  209. cl::desc("use instrumentation with slow path for all accesses"), cl::Hidden,
  210. cl::init(false));
  211. static cl::opt<bool> ClForceDynamicShadow(
  212. "asan-force-dynamic-shadow",
  213. cl::desc("Load shadow address into a local variable for each function"),
  214. cl::Hidden, cl::init(false));
  215. static cl::opt<bool>
  216. ClWithIfunc("asan-with-ifunc",
  217. cl::desc("Access dynamic shadow through an ifunc global on "
  218. "platforms that support this"),
  219. cl::Hidden, cl::init(true));
  220. static cl::opt<bool> ClWithIfuncSuppressRemat(
  221. "asan-with-ifunc-suppress-remat",
  222. cl::desc("Suppress rematerialization of dynamic shadow address by passing "
  223. "it through inline asm in prologue."),
  224. cl::Hidden, cl::init(true));
  225. // This flag limits the number of instructions to be instrumented
  226. // in any given BB. Normally, this should be set to unlimited (INT_MAX),
  227. // but due to http://llvm.org/bugs/show_bug.cgi?id=12652 we temporary
  228. // set it to 10000.
  229. static cl::opt<int> ClMaxInsnsToInstrumentPerBB(
  230. "asan-max-ins-per-bb", cl::init(10000),
  231. cl::desc("maximal number of instructions to instrument in any given BB"),
  232. cl::Hidden);
  233. // This flag may need to be replaced with -f[no]asan-stack.
  234. static cl::opt<bool> ClStack("asan-stack", cl::desc("Handle stack memory"),
  235. cl::Hidden, cl::init(true));
  236. static cl::opt<uint32_t> ClMaxInlinePoisoningSize(
  237. "asan-max-inline-poisoning-size",
  238. cl::desc(
  239. "Inline shadow poisoning for blocks up to the given size in bytes."),
  240. cl::Hidden, cl::init(64));
  241. static cl::opt<AsanDetectStackUseAfterReturnMode> ClUseAfterReturn(
  242. "asan-use-after-return",
  243. cl::desc("Sets the mode of detection for stack-use-after-return."),
  244. cl::values(
  245. clEnumValN(AsanDetectStackUseAfterReturnMode::Never, "never",
  246. "Never detect stack use after return."),
  247. clEnumValN(
  248. AsanDetectStackUseAfterReturnMode::Runtime, "runtime",
  249. "Detect stack use after return if "
  250. "binary flag 'ASAN_OPTIONS=detect_stack_use_after_return' is set."),
  251. clEnumValN(AsanDetectStackUseAfterReturnMode::Always, "always",
  252. "Always detect stack use after return.")),
  253. cl::Hidden, cl::init(AsanDetectStackUseAfterReturnMode::Runtime));
  254. static cl::opt<bool> ClRedzoneByvalArgs("asan-redzone-byval-args",
  255. cl::desc("Create redzones for byval "
  256. "arguments (extra copy "
  257. "required)"), cl::Hidden,
  258. cl::init(true));
  259. static cl::opt<bool> ClUseAfterScope("asan-use-after-scope",
  260. cl::desc("Check stack-use-after-scope"),
  261. cl::Hidden, cl::init(false));
  262. // This flag may need to be replaced with -f[no]asan-globals.
  263. static cl::opt<bool> ClGlobals("asan-globals",
  264. cl::desc("Handle global objects"), cl::Hidden,
  265. cl::init(true));
  266. static cl::opt<bool> ClInitializers("asan-initialization-order",
  267. cl::desc("Handle C++ initializer order"),
  268. cl::Hidden, cl::init(true));
  269. static cl::opt<bool> ClInvalidPointerPairs(
  270. "asan-detect-invalid-pointer-pair",
  271. cl::desc("Instrument <, <=, >, >=, - with pointer operands"), cl::Hidden,
  272. cl::init(false));
  273. static cl::opt<bool> ClInvalidPointerCmp(
  274. "asan-detect-invalid-pointer-cmp",
  275. cl::desc("Instrument <, <=, >, >= with pointer operands"), cl::Hidden,
  276. cl::init(false));
  277. static cl::opt<bool> ClInvalidPointerSub(
  278. "asan-detect-invalid-pointer-sub",
  279. cl::desc("Instrument - operations with pointer operands"), cl::Hidden,
  280. cl::init(false));
  281. static cl::opt<unsigned> ClRealignStack(
  282. "asan-realign-stack",
  283. cl::desc("Realign stack to the value of this flag (power of two)"),
  284. cl::Hidden, cl::init(32));
  285. static cl::opt<int> ClInstrumentationWithCallsThreshold(
  286. "asan-instrumentation-with-call-threshold",
  287. cl::desc(
  288. "If the function being instrumented contains more than "
  289. "this number of memory accesses, use callbacks instead of "
  290. "inline checks (-1 means never use callbacks)."),
  291. cl::Hidden, cl::init(7000));
  292. static cl::opt<std::string> ClMemoryAccessCallbackPrefix(
  293. "asan-memory-access-callback-prefix",
  294. cl::desc("Prefix for memory access callbacks"), cl::Hidden,
  295. cl::init("__asan_"));
  296. static cl::opt<bool>
  297. ClInstrumentDynamicAllocas("asan-instrument-dynamic-allocas",
  298. cl::desc("instrument dynamic allocas"),
  299. cl::Hidden, cl::init(true));
  300. static cl::opt<bool> ClSkipPromotableAllocas(
  301. "asan-skip-promotable-allocas",
  302. cl::desc("Do not instrument promotable allocas"), cl::Hidden,
  303. cl::init(true));
  304. // These flags allow to change the shadow mapping.
  305. // The shadow mapping looks like
  306. // Shadow = (Mem >> scale) + offset
  307. static cl::opt<int> ClMappingScale("asan-mapping-scale",
  308. cl::desc("scale of asan shadow mapping"),
  309. cl::Hidden, cl::init(0));
  310. static cl::opt<uint64_t>
  311. ClMappingOffset("asan-mapping-offset",
  312. cl::desc("offset of asan shadow mapping [EXPERIMENTAL]"),
  313. cl::Hidden, cl::init(0));
  314. // Optimization flags. Not user visible, used mostly for testing
  315. // and benchmarking the tool.
  316. static cl::opt<bool> ClOpt("asan-opt", cl::desc("Optimize instrumentation"),
  317. cl::Hidden, cl::init(true));
  318. static cl::opt<bool> ClOptimizeCallbacks("asan-optimize-callbacks",
  319. cl::desc("Optimize callbacks"),
  320. cl::Hidden, cl::init(false));
  321. static cl::opt<bool> ClOptSameTemp(
  322. "asan-opt-same-temp", cl::desc("Instrument the same temp just once"),
  323. cl::Hidden, cl::init(true));
  324. static cl::opt<bool> ClOptGlobals("asan-opt-globals",
  325. cl::desc("Don't instrument scalar globals"),
  326. cl::Hidden, cl::init(true));
  327. static cl::opt<bool> ClOptStack(
  328. "asan-opt-stack", cl::desc("Don't instrument scalar stack variables"),
  329. cl::Hidden, cl::init(false));
  330. static cl::opt<bool> ClDynamicAllocaStack(
  331. "asan-stack-dynamic-alloca",
  332. cl::desc("Use dynamic alloca to represent stack variables"), cl::Hidden,
  333. cl::init(true));
  334. static cl::opt<uint32_t> ClForceExperiment(
  335. "asan-force-experiment",
  336. cl::desc("Force optimization experiment (for testing)"), cl::Hidden,
  337. cl::init(0));
  338. static cl::opt<bool>
  339. ClUsePrivateAlias("asan-use-private-alias",
  340. cl::desc("Use private aliases for global variables"),
  341. cl::Hidden, cl::init(false));
  342. static cl::opt<bool>
  343. ClUseOdrIndicator("asan-use-odr-indicator",
  344. cl::desc("Use odr indicators to improve ODR reporting"),
  345. cl::Hidden, cl::init(false));
  346. static cl::opt<bool>
  347. ClUseGlobalsGC("asan-globals-live-support",
  348. cl::desc("Use linker features to support dead "
  349. "code stripping of globals"),
  350. cl::Hidden, cl::init(true));
  351. // This is on by default even though there is a bug in gold:
  352. // https://sourceware.org/bugzilla/show_bug.cgi?id=19002
  353. static cl::opt<bool>
  354. ClWithComdat("asan-with-comdat",
  355. cl::desc("Place ASan constructors in comdat sections"),
  356. cl::Hidden, cl::init(true));
  357. static cl::opt<AsanDtorKind> ClOverrideDestructorKind(
  358. "asan-destructor-kind",
  359. cl::desc("Sets the ASan destructor kind. The default is to use the value "
  360. "provided to the pass constructor"),
  361. cl::values(clEnumValN(AsanDtorKind::None, "none", "No destructors"),
  362. clEnumValN(AsanDtorKind::Global, "global",
  363. "Use global destructors")),
  364. cl::init(AsanDtorKind::Invalid), cl::Hidden);
  365. // Debug flags.
  366. static cl::opt<int> ClDebug("asan-debug", cl::desc("debug"), cl::Hidden,
  367. cl::init(0));
  368. static cl::opt<int> ClDebugStack("asan-debug-stack", cl::desc("debug stack"),
  369. cl::Hidden, cl::init(0));
  370. static cl::opt<std::string> ClDebugFunc("asan-debug-func", cl::Hidden,
  371. cl::desc("Debug func"));
  372. static cl::opt<int> ClDebugMin("asan-debug-min", cl::desc("Debug min inst"),
  373. cl::Hidden, cl::init(-1));
  374. static cl::opt<int> ClDebugMax("asan-debug-max", cl::desc("Debug max inst"),
  375. cl::Hidden, cl::init(-1));
  376. STATISTIC(NumInstrumentedReads, "Number of instrumented reads");
  377. STATISTIC(NumInstrumentedWrites, "Number of instrumented writes");
  378. STATISTIC(NumOptimizedAccessesToGlobalVar,
  379. "Number of optimized accesses to global vars");
  380. STATISTIC(NumOptimizedAccessesToStackVar,
  381. "Number of optimized accesses to stack vars");
  382. namespace {
  383. /// This struct defines the shadow mapping using the rule:
  384. /// shadow = (mem >> Scale) ADD-or-OR Offset.
  385. /// If InGlobal is true, then
  386. /// extern char __asan_shadow[];
  387. /// shadow = (mem >> Scale) + &__asan_shadow
  388. struct ShadowMapping {
  389. int Scale;
  390. uint64_t Offset;
  391. bool OrShadowOffset;
  392. bool InGlobal;
  393. };
  394. } // end anonymous namespace
  395. static ShadowMapping getShadowMapping(const Triple &TargetTriple, int LongSize,
  396. bool IsKasan) {
  397. bool IsAndroid = TargetTriple.isAndroid();
  398. bool IsIOS = TargetTriple.isiOS() || TargetTriple.isWatchOS();
  399. bool IsMacOS = TargetTriple.isMacOSX();
  400. bool IsFreeBSD = TargetTriple.isOSFreeBSD();
  401. bool IsNetBSD = TargetTriple.isOSNetBSD();
  402. bool IsPS4CPU = TargetTriple.isPS4CPU();
  403. bool IsLinux = TargetTriple.isOSLinux();
  404. bool IsPPC64 = TargetTriple.getArch() == Triple::ppc64 ||
  405. TargetTriple.getArch() == Triple::ppc64le;
  406. bool IsSystemZ = TargetTriple.getArch() == Triple::systemz;
  407. bool IsX86_64 = TargetTriple.getArch() == Triple::x86_64;
  408. bool IsMIPS32 = TargetTriple.isMIPS32();
  409. bool IsMIPS64 = TargetTriple.isMIPS64();
  410. bool IsArmOrThumb = TargetTriple.isARM() || TargetTriple.isThumb();
  411. bool IsAArch64 = TargetTriple.getArch() == Triple::aarch64;
  412. bool IsRISCV64 = TargetTriple.getArch() == Triple::riscv64;
  413. bool IsWindows = TargetTriple.isOSWindows();
  414. bool IsFuchsia = TargetTriple.isOSFuchsia();
  415. bool IsEmscripten = TargetTriple.isOSEmscripten();
  416. bool IsAMDGPU = TargetTriple.isAMDGPU();
  417. ShadowMapping Mapping;
  418. Mapping.Scale = kDefaultShadowScale;
  419. if (ClMappingScale.getNumOccurrences() > 0) {
  420. Mapping.Scale = ClMappingScale;
  421. }
  422. if (LongSize == 32) {
  423. if (IsAndroid)
  424. Mapping.Offset = kDynamicShadowSentinel;
  425. else if (IsMIPS32)
  426. Mapping.Offset = kMIPS32_ShadowOffset32;
  427. else if (IsFreeBSD)
  428. Mapping.Offset = kFreeBSD_ShadowOffset32;
  429. else if (IsNetBSD)
  430. Mapping.Offset = kNetBSD_ShadowOffset32;
  431. else if (IsIOS)
  432. Mapping.Offset = kDynamicShadowSentinel;
  433. else if (IsWindows)
  434. Mapping.Offset = kWindowsShadowOffset32;
  435. else if (IsEmscripten)
  436. Mapping.Offset = kEmscriptenShadowOffset;
  437. else
  438. Mapping.Offset = kDefaultShadowOffset32;
  439. } else { // LongSize == 64
  440. // Fuchsia is always PIE, which means that the beginning of the address
  441. // space is always available.
  442. if (IsFuchsia)
  443. Mapping.Offset = 0;
  444. else if (IsPPC64)
  445. Mapping.Offset = kPPC64_ShadowOffset64;
  446. else if (IsSystemZ)
  447. Mapping.Offset = kSystemZ_ShadowOffset64;
  448. else if (IsFreeBSD && !IsMIPS64) {
  449. if (IsKasan)
  450. Mapping.Offset = kFreeBSDKasan_ShadowOffset64;
  451. else
  452. Mapping.Offset = kFreeBSD_ShadowOffset64;
  453. } else if (IsNetBSD) {
  454. if (IsKasan)
  455. Mapping.Offset = kNetBSDKasan_ShadowOffset64;
  456. else
  457. Mapping.Offset = kNetBSD_ShadowOffset64;
  458. } else if (IsPS4CPU)
  459. Mapping.Offset = kPS4CPU_ShadowOffset64;
  460. else if (IsLinux && IsX86_64) {
  461. if (IsKasan)
  462. Mapping.Offset = kLinuxKasan_ShadowOffset64;
  463. else
  464. Mapping.Offset = (kSmallX86_64ShadowOffsetBase &
  465. (kSmallX86_64ShadowOffsetAlignMask << Mapping.Scale));
  466. } else if (IsWindows && IsX86_64) {
  467. Mapping.Offset = kWindowsShadowOffset64;
  468. } else if (IsMIPS64)
  469. Mapping.Offset = kMIPS64_ShadowOffset64;
  470. else if (IsIOS)
  471. Mapping.Offset = kDynamicShadowSentinel;
  472. else if (IsMacOS && IsAArch64)
  473. Mapping.Offset = kDynamicShadowSentinel;
  474. else if (IsAArch64)
  475. Mapping.Offset = kAArch64_ShadowOffset64;
  476. else if (IsRISCV64)
  477. Mapping.Offset = kRISCV64_ShadowOffset64;
  478. else if (IsAMDGPU)
  479. Mapping.Offset = (kSmallX86_64ShadowOffsetBase &
  480. (kSmallX86_64ShadowOffsetAlignMask << Mapping.Scale));
  481. else
  482. Mapping.Offset = kDefaultShadowOffset64;
  483. }
  484. if (ClForceDynamicShadow) {
  485. Mapping.Offset = kDynamicShadowSentinel;
  486. }
  487. if (ClMappingOffset.getNumOccurrences() > 0) {
  488. Mapping.Offset = ClMappingOffset;
  489. }
  490. // OR-ing shadow offset if more efficient (at least on x86) if the offset
  491. // is a power of two, but on ppc64 we have to use add since the shadow
  492. // offset is not necessary 1/8-th of the address space. On SystemZ,
  493. // we could OR the constant in a single instruction, but it's more
  494. // efficient to load it once and use indexed addressing.
  495. Mapping.OrShadowOffset = !IsAArch64 && !IsPPC64 && !IsSystemZ && !IsPS4CPU &&
  496. !IsRISCV64 &&
  497. !(Mapping.Offset & (Mapping.Offset - 1)) &&
  498. Mapping.Offset != kDynamicShadowSentinel;
  499. bool IsAndroidWithIfuncSupport =
  500. IsAndroid && !TargetTriple.isAndroidVersionLT(21);
  501. Mapping.InGlobal = ClWithIfunc && IsAndroidWithIfuncSupport && IsArmOrThumb;
  502. return Mapping;
  503. }
  504. namespace llvm {
  505. void getAddressSanitizerParams(const Triple &TargetTriple, int LongSize,
  506. bool IsKasan, uint64_t *ShadowBase,
  507. int *MappingScale, bool *OrShadowOffset) {
  508. auto Mapping = getShadowMapping(TargetTriple, LongSize, IsKasan);
  509. *ShadowBase = Mapping.Offset;
  510. *MappingScale = Mapping.Scale;
  511. *OrShadowOffset = Mapping.OrShadowOffset;
  512. }
  513. ASanAccessInfo::ASanAccessInfo(int32_t Packed)
  514. : Packed(Packed),
  515. AccessSizeIndex((Packed >> kAccessSizeIndexShift) & kAccessSizeIndexMask),
  516. IsWrite((Packed >> kIsWriteShift) & kIsWriteMask),
  517. CompileKernel((Packed >> kCompileKernelShift) & kCompileKernelMask) {}
  518. ASanAccessInfo::ASanAccessInfo(bool IsWrite, bool CompileKernel,
  519. uint8_t AccessSizeIndex)
  520. : Packed((IsWrite << kIsWriteShift) +
  521. (CompileKernel << kCompileKernelShift) +
  522. (AccessSizeIndex << kAccessSizeIndexShift)),
  523. AccessSizeIndex(AccessSizeIndex), IsWrite(IsWrite),
  524. CompileKernel(CompileKernel) {}
  525. } // namespace llvm
  526. static uint64_t getRedzoneSizeForScale(int MappingScale) {
  527. // Redzone used for stack and globals is at least 32 bytes.
  528. // For scales 6 and 7, the redzone has to be 64 and 128 bytes respectively.
  529. return std::max(32U, 1U << MappingScale);
  530. }
  531. static uint64_t GetCtorAndDtorPriority(Triple &TargetTriple) {
  532. if (TargetTriple.isOSEmscripten()) {
  533. return kAsanEmscriptenCtorAndDtorPriority;
  534. } else {
  535. return kAsanCtorAndDtorPriority;
  536. }
  537. }
  538. namespace {
  539. /// Module analysis for getting various metadata about the module.
  540. class ASanGlobalsMetadataWrapperPass : public ModulePass {
  541. public:
  542. static char ID;
  543. ASanGlobalsMetadataWrapperPass() : ModulePass(ID) {
  544. initializeASanGlobalsMetadataWrapperPassPass(
  545. *PassRegistry::getPassRegistry());
  546. }
  547. bool runOnModule(Module &M) override {
  548. GlobalsMD = GlobalsMetadata(M);
  549. return false;
  550. }
  551. StringRef getPassName() const override {
  552. return "ASanGlobalsMetadataWrapperPass";
  553. }
  554. void getAnalysisUsage(AnalysisUsage &AU) const override {
  555. AU.setPreservesAll();
  556. }
  557. GlobalsMetadata &getGlobalsMD() { return GlobalsMD; }
  558. private:
  559. GlobalsMetadata GlobalsMD;
  560. };
  561. char ASanGlobalsMetadataWrapperPass::ID = 0;
  562. /// AddressSanitizer: instrument the code in module to find memory bugs.
  563. struct AddressSanitizer {
  564. AddressSanitizer(Module &M, const GlobalsMetadata *GlobalsMD,
  565. const StackSafetyGlobalInfo *SSGI,
  566. bool CompileKernel = false, bool Recover = false,
  567. bool UseAfterScope = false,
  568. AsanDetectStackUseAfterReturnMode UseAfterReturn =
  569. AsanDetectStackUseAfterReturnMode::Runtime)
  570. : CompileKernel(ClEnableKasan.getNumOccurrences() > 0 ? ClEnableKasan
  571. : CompileKernel),
  572. Recover(ClRecover.getNumOccurrences() > 0 ? ClRecover : Recover),
  573. UseAfterScope(UseAfterScope || ClUseAfterScope),
  574. UseAfterReturn(ClUseAfterReturn.getNumOccurrences() ? ClUseAfterReturn
  575. : UseAfterReturn),
  576. GlobalsMD(*GlobalsMD), SSGI(SSGI) {
  577. C = &(M.getContext());
  578. LongSize = M.getDataLayout().getPointerSizeInBits();
  579. IntptrTy = Type::getIntNTy(*C, LongSize);
  580. Int8PtrTy = Type::getInt8PtrTy(*C);
  581. Int32Ty = Type::getInt32Ty(*C);
  582. TargetTriple = Triple(M.getTargetTriple());
  583. Mapping = getShadowMapping(TargetTriple, LongSize, this->CompileKernel);
  584. assert(this->UseAfterReturn != AsanDetectStackUseAfterReturnMode::Invalid);
  585. }
  586. uint64_t getAllocaSizeInBytes(const AllocaInst &AI) const {
  587. uint64_t ArraySize = 1;
  588. if (AI.isArrayAllocation()) {
  589. const ConstantInt *CI = dyn_cast<ConstantInt>(AI.getArraySize());
  590. assert(CI && "non-constant array size");
  591. ArraySize = CI->getZExtValue();
  592. }
  593. Type *Ty = AI.getAllocatedType();
  594. uint64_t SizeInBytes =
  595. AI.getModule()->getDataLayout().getTypeAllocSize(Ty);
  596. return SizeInBytes * ArraySize;
  597. }
  598. /// Check if we want (and can) handle this alloca.
  599. bool isInterestingAlloca(const AllocaInst &AI);
  600. bool ignoreAccess(Instruction *Inst, Value *Ptr);
  601. void getInterestingMemoryOperands(
  602. Instruction *I, SmallVectorImpl<InterestingMemoryOperand> &Interesting);
  603. void instrumentMop(ObjectSizeOffsetVisitor &ObjSizeVis,
  604. InterestingMemoryOperand &O, bool UseCalls,
  605. const DataLayout &DL);
  606. void instrumentPointerComparisonOrSubtraction(Instruction *I);
  607. void instrumentAddress(Instruction *OrigIns, Instruction *InsertBefore,
  608. Value *Addr, uint32_t TypeSize, bool IsWrite,
  609. Value *SizeArgument, bool UseCalls, uint32_t Exp);
  610. Instruction *instrumentAMDGPUAddress(Instruction *OrigIns,
  611. Instruction *InsertBefore, Value *Addr,
  612. uint32_t TypeSize, bool IsWrite,
  613. Value *SizeArgument);
  614. void instrumentUnusualSizeOrAlignment(Instruction *I,
  615. Instruction *InsertBefore, Value *Addr,
  616. uint32_t TypeSize, bool IsWrite,
  617. Value *SizeArgument, bool UseCalls,
  618. uint32_t Exp);
  619. Value *createSlowPathCmp(IRBuilder<> &IRB, Value *AddrLong,
  620. Value *ShadowValue, uint32_t TypeSize);
  621. Instruction *generateCrashCode(Instruction *InsertBefore, Value *Addr,
  622. bool IsWrite, size_t AccessSizeIndex,
  623. Value *SizeArgument, uint32_t Exp);
  624. void instrumentMemIntrinsic(MemIntrinsic *MI);
  625. Value *memToShadow(Value *Shadow, IRBuilder<> &IRB);
  626. bool suppressInstrumentationSiteForDebug(int &Instrumented);
  627. bool instrumentFunction(Function &F, const TargetLibraryInfo *TLI);
  628. bool maybeInsertAsanInitAtFunctionEntry(Function &F);
  629. bool maybeInsertDynamicShadowAtFunctionEntry(Function &F);
  630. void markEscapedLocalAllocas(Function &F);
  631. private:
  632. friend struct FunctionStackPoisoner;
  633. void initializeCallbacks(Module &M);
  634. bool LooksLikeCodeInBug11395(Instruction *I);
  635. bool GlobalIsLinkerInitialized(GlobalVariable *G);
  636. bool isSafeAccess(ObjectSizeOffsetVisitor &ObjSizeVis, Value *Addr,
  637. uint64_t TypeSize) const;
  638. /// Helper to cleanup per-function state.
  639. struct FunctionStateRAII {
  640. AddressSanitizer *Pass;
  641. FunctionStateRAII(AddressSanitizer *Pass) : Pass(Pass) {
  642. assert(Pass->ProcessedAllocas.empty() &&
  643. "last pass forgot to clear cache");
  644. assert(!Pass->LocalDynamicShadow);
  645. }
  646. ~FunctionStateRAII() {
  647. Pass->LocalDynamicShadow = nullptr;
  648. Pass->ProcessedAllocas.clear();
  649. }
  650. };
  651. LLVMContext *C;
  652. Triple TargetTriple;
  653. int LongSize;
  654. bool CompileKernel;
  655. bool Recover;
  656. bool UseAfterScope;
  657. AsanDetectStackUseAfterReturnMode UseAfterReturn;
  658. Type *IntptrTy;
  659. Type *Int8PtrTy;
  660. Type *Int32Ty;
  661. ShadowMapping Mapping;
  662. FunctionCallee AsanHandleNoReturnFunc;
  663. FunctionCallee AsanPtrCmpFunction, AsanPtrSubFunction;
  664. Constant *AsanShadowGlobal;
  665. // These arrays is indexed by AccessIsWrite, Experiment and log2(AccessSize).
  666. FunctionCallee AsanErrorCallback[2][2][kNumberOfAccessSizes];
  667. FunctionCallee AsanMemoryAccessCallback[2][2][kNumberOfAccessSizes];
  668. // These arrays is indexed by AccessIsWrite and Experiment.
  669. FunctionCallee AsanErrorCallbackSized[2][2];
  670. FunctionCallee AsanMemoryAccessCallbackSized[2][2];
  671. FunctionCallee AsanMemmove, AsanMemcpy, AsanMemset;
  672. Value *LocalDynamicShadow = nullptr;
  673. const GlobalsMetadata &GlobalsMD;
  674. const StackSafetyGlobalInfo *SSGI;
  675. DenseMap<const AllocaInst *, bool> ProcessedAllocas;
  676. FunctionCallee AMDGPUAddressShared;
  677. FunctionCallee AMDGPUAddressPrivate;
  678. };
  679. class AddressSanitizerLegacyPass : public FunctionPass {
  680. public:
  681. static char ID;
  682. explicit AddressSanitizerLegacyPass(
  683. bool CompileKernel = false, bool Recover = false,
  684. bool UseAfterScope = false,
  685. AsanDetectStackUseAfterReturnMode UseAfterReturn =
  686. AsanDetectStackUseAfterReturnMode::Runtime)
  687. : FunctionPass(ID), CompileKernel(CompileKernel), Recover(Recover),
  688. UseAfterScope(UseAfterScope), UseAfterReturn(UseAfterReturn) {
  689. initializeAddressSanitizerLegacyPassPass(*PassRegistry::getPassRegistry());
  690. }
  691. StringRef getPassName() const override {
  692. return "AddressSanitizerFunctionPass";
  693. }
  694. void getAnalysisUsage(AnalysisUsage &AU) const override {
  695. AU.addRequired<ASanGlobalsMetadataWrapperPass>();
  696. if (ClUseStackSafety)
  697. AU.addRequired<StackSafetyGlobalInfoWrapperPass>();
  698. AU.addRequired<TargetLibraryInfoWrapperPass>();
  699. }
  700. bool runOnFunction(Function &F) override {
  701. GlobalsMetadata &GlobalsMD =
  702. getAnalysis<ASanGlobalsMetadataWrapperPass>().getGlobalsMD();
  703. const StackSafetyGlobalInfo *const SSGI =
  704. ClUseStackSafety
  705. ? &getAnalysis<StackSafetyGlobalInfoWrapperPass>().getResult()
  706. : nullptr;
  707. const TargetLibraryInfo *TLI =
  708. &getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(F);
  709. AddressSanitizer ASan(*F.getParent(), &GlobalsMD, SSGI, CompileKernel,
  710. Recover, UseAfterScope, UseAfterReturn);
  711. return ASan.instrumentFunction(F, TLI);
  712. }
  713. private:
  714. bool CompileKernel;
  715. bool Recover;
  716. bool UseAfterScope;
  717. AsanDetectStackUseAfterReturnMode UseAfterReturn;
  718. };
  719. class ModuleAddressSanitizer {
  720. public:
  721. ModuleAddressSanitizer(Module &M, const GlobalsMetadata *GlobalsMD,
  722. bool CompileKernel = false, bool Recover = false,
  723. bool UseGlobalsGC = true, bool UseOdrIndicator = false,
  724. AsanDtorKind DestructorKind = AsanDtorKind::Global)
  725. : GlobalsMD(*GlobalsMD),
  726. CompileKernel(ClEnableKasan.getNumOccurrences() > 0 ? ClEnableKasan
  727. : CompileKernel),
  728. Recover(ClRecover.getNumOccurrences() > 0 ? ClRecover : Recover),
  729. UseGlobalsGC(UseGlobalsGC && ClUseGlobalsGC && !this->CompileKernel),
  730. // Enable aliases as they should have no downside with ODR indicators.
  731. UsePrivateAlias(UseOdrIndicator || ClUsePrivateAlias),
  732. UseOdrIndicator(UseOdrIndicator || ClUseOdrIndicator),
  733. // Not a typo: ClWithComdat is almost completely pointless without
  734. // ClUseGlobalsGC (because then it only works on modules without
  735. // globals, which are rare); it is a prerequisite for ClUseGlobalsGC;
  736. // and both suffer from gold PR19002 for which UseGlobalsGC constructor
  737. // argument is designed as workaround. Therefore, disable both
  738. // ClWithComdat and ClUseGlobalsGC unless the frontend says it's ok to
  739. // do globals-gc.
  740. UseCtorComdat(UseGlobalsGC && ClWithComdat && !this->CompileKernel),
  741. DestructorKind(DestructorKind) {
  742. C = &(M.getContext());
  743. int LongSize = M.getDataLayout().getPointerSizeInBits();
  744. IntptrTy = Type::getIntNTy(*C, LongSize);
  745. TargetTriple = Triple(M.getTargetTriple());
  746. Mapping = getShadowMapping(TargetTriple, LongSize, this->CompileKernel);
  747. if (ClOverrideDestructorKind != AsanDtorKind::Invalid)
  748. this->DestructorKind = ClOverrideDestructorKind;
  749. assert(this->DestructorKind != AsanDtorKind::Invalid);
  750. }
  751. bool instrumentModule(Module &);
  752. private:
  753. void initializeCallbacks(Module &M);
  754. bool InstrumentGlobals(IRBuilder<> &IRB, Module &M, bool *CtorComdat);
  755. void InstrumentGlobalsCOFF(IRBuilder<> &IRB, Module &M,
  756. ArrayRef<GlobalVariable *> ExtendedGlobals,
  757. ArrayRef<Constant *> MetadataInitializers);
  758. void InstrumentGlobalsELF(IRBuilder<> &IRB, Module &M,
  759. ArrayRef<GlobalVariable *> ExtendedGlobals,
  760. ArrayRef<Constant *> MetadataInitializers,
  761. const std::string &UniqueModuleId);
  762. void InstrumentGlobalsMachO(IRBuilder<> &IRB, Module &M,
  763. ArrayRef<GlobalVariable *> ExtendedGlobals,
  764. ArrayRef<Constant *> MetadataInitializers);
  765. void
  766. InstrumentGlobalsWithMetadataArray(IRBuilder<> &IRB, Module &M,
  767. ArrayRef<GlobalVariable *> ExtendedGlobals,
  768. ArrayRef<Constant *> MetadataInitializers);
  769. GlobalVariable *CreateMetadataGlobal(Module &M, Constant *Initializer,
  770. StringRef OriginalName);
  771. void SetComdatForGlobalMetadata(GlobalVariable *G, GlobalVariable *Metadata,
  772. StringRef InternalSuffix);
  773. Instruction *CreateAsanModuleDtor(Module &M);
  774. const GlobalVariable *getExcludedAliasedGlobal(const GlobalAlias &GA) const;
  775. bool shouldInstrumentGlobal(GlobalVariable *G) const;
  776. bool ShouldUseMachOGlobalsSection() const;
  777. StringRef getGlobalMetadataSection() const;
  778. void poisonOneInitializer(Function &GlobalInit, GlobalValue *ModuleName);
  779. void createInitializerPoisonCalls(Module &M, GlobalValue *ModuleName);
  780. uint64_t getMinRedzoneSizeForGlobal() const {
  781. return getRedzoneSizeForScale(Mapping.Scale);
  782. }
  783. uint64_t getRedzoneSizeForGlobal(uint64_t SizeInBytes) const;
  784. int GetAsanVersion(const Module &M) const;
  785. const GlobalsMetadata &GlobalsMD;
  786. bool CompileKernel;
  787. bool Recover;
  788. bool UseGlobalsGC;
  789. bool UsePrivateAlias;
  790. bool UseOdrIndicator;
  791. bool UseCtorComdat;
  792. AsanDtorKind DestructorKind;
  793. Type *IntptrTy;
  794. LLVMContext *C;
  795. Triple TargetTriple;
  796. ShadowMapping Mapping;
  797. FunctionCallee AsanPoisonGlobals;
  798. FunctionCallee AsanUnpoisonGlobals;
  799. FunctionCallee AsanRegisterGlobals;
  800. FunctionCallee AsanUnregisterGlobals;
  801. FunctionCallee AsanRegisterImageGlobals;
  802. FunctionCallee AsanUnregisterImageGlobals;
  803. FunctionCallee AsanRegisterElfGlobals;
  804. FunctionCallee AsanUnregisterElfGlobals;
  805. Function *AsanCtorFunction = nullptr;
  806. Function *AsanDtorFunction = nullptr;
  807. };
  808. class ModuleAddressSanitizerLegacyPass : public ModulePass {
  809. public:
  810. static char ID;
  811. explicit ModuleAddressSanitizerLegacyPass(
  812. bool CompileKernel = false, bool Recover = false, bool UseGlobalGC = true,
  813. bool UseOdrIndicator = false,
  814. AsanDtorKind DestructorKind = AsanDtorKind::Global)
  815. : ModulePass(ID), CompileKernel(CompileKernel), Recover(Recover),
  816. UseGlobalGC(UseGlobalGC), UseOdrIndicator(UseOdrIndicator),
  817. DestructorKind(DestructorKind) {
  818. initializeModuleAddressSanitizerLegacyPassPass(
  819. *PassRegistry::getPassRegistry());
  820. }
  821. StringRef getPassName() const override { return "ModuleAddressSanitizer"; }
  822. void getAnalysisUsage(AnalysisUsage &AU) const override {
  823. AU.addRequired<ASanGlobalsMetadataWrapperPass>();
  824. }
  825. bool runOnModule(Module &M) override {
  826. GlobalsMetadata &GlobalsMD =
  827. getAnalysis<ASanGlobalsMetadataWrapperPass>().getGlobalsMD();
  828. ModuleAddressSanitizer ASanModule(M, &GlobalsMD, CompileKernel, Recover,
  829. UseGlobalGC, UseOdrIndicator,
  830. DestructorKind);
  831. return ASanModule.instrumentModule(M);
  832. }
  833. private:
  834. bool CompileKernel;
  835. bool Recover;
  836. bool UseGlobalGC;
  837. bool UseOdrIndicator;
  838. AsanDtorKind DestructorKind;
  839. };
  840. // Stack poisoning does not play well with exception handling.
  841. // When an exception is thrown, we essentially bypass the code
  842. // that unpoisones the stack. This is why the run-time library has
  843. // to intercept __cxa_throw (as well as longjmp, etc) and unpoison the entire
  844. // stack in the interceptor. This however does not work inside the
  845. // actual function which catches the exception. Most likely because the
  846. // compiler hoists the load of the shadow value somewhere too high.
  847. // This causes asan to report a non-existing bug on 453.povray.
  848. // It sounds like an LLVM bug.
  849. struct FunctionStackPoisoner : public InstVisitor<FunctionStackPoisoner> {
  850. Function &F;
  851. AddressSanitizer &ASan;
  852. DIBuilder DIB;
  853. LLVMContext *C;
  854. Type *IntptrTy;
  855. Type *IntptrPtrTy;
  856. ShadowMapping Mapping;
  857. SmallVector<AllocaInst *, 16> AllocaVec;
  858. SmallVector<AllocaInst *, 16> StaticAllocasToMoveUp;
  859. SmallVector<Instruction *, 8> RetVec;
  860. FunctionCallee AsanStackMallocFunc[kMaxAsanStackMallocSizeClass + 1],
  861. AsanStackFreeFunc[kMaxAsanStackMallocSizeClass + 1];
  862. FunctionCallee AsanSetShadowFunc[0x100] = {};
  863. FunctionCallee AsanPoisonStackMemoryFunc, AsanUnpoisonStackMemoryFunc;
  864. FunctionCallee AsanAllocaPoisonFunc, AsanAllocasUnpoisonFunc;
  865. // Stores a place and arguments of poisoning/unpoisoning call for alloca.
  866. struct AllocaPoisonCall {
  867. IntrinsicInst *InsBefore;
  868. AllocaInst *AI;
  869. uint64_t Size;
  870. bool DoPoison;
  871. };
  872. SmallVector<AllocaPoisonCall, 8> DynamicAllocaPoisonCallVec;
  873. SmallVector<AllocaPoisonCall, 8> StaticAllocaPoisonCallVec;
  874. bool HasUntracedLifetimeIntrinsic = false;
  875. SmallVector<AllocaInst *, 1> DynamicAllocaVec;
  876. SmallVector<IntrinsicInst *, 1> StackRestoreVec;
  877. AllocaInst *DynamicAllocaLayout = nullptr;
  878. IntrinsicInst *LocalEscapeCall = nullptr;
  879. bool HasInlineAsm = false;
  880. bool HasReturnsTwiceCall = false;
  881. bool PoisonStack;
  882. FunctionStackPoisoner(Function &F, AddressSanitizer &ASan)
  883. : F(F), ASan(ASan), DIB(*F.getParent(), /*AllowUnresolved*/ false),
  884. C(ASan.C), IntptrTy(ASan.IntptrTy),
  885. IntptrPtrTy(PointerType::get(IntptrTy, 0)), Mapping(ASan.Mapping),
  886. PoisonStack(ClStack &&
  887. !Triple(F.getParent()->getTargetTriple()).isAMDGPU()) {}
  888. bool runOnFunction() {
  889. if (!PoisonStack)
  890. return false;
  891. if (ClRedzoneByvalArgs)
  892. copyArgsPassedByValToAllocas();
  893. // Collect alloca, ret, lifetime instructions etc.
  894. for (BasicBlock *BB : depth_first(&F.getEntryBlock())) visit(*BB);
  895. if (AllocaVec.empty() && DynamicAllocaVec.empty()) return false;
  896. initializeCallbacks(*F.getParent());
  897. if (HasUntracedLifetimeIntrinsic) {
  898. // If there are lifetime intrinsics which couldn't be traced back to an
  899. // alloca, we may not know exactly when a variable enters scope, and
  900. // therefore should "fail safe" by not poisoning them.
  901. StaticAllocaPoisonCallVec.clear();
  902. DynamicAllocaPoisonCallVec.clear();
  903. }
  904. processDynamicAllocas();
  905. processStaticAllocas();
  906. if (ClDebugStack) {
  907. LLVM_DEBUG(dbgs() << F);
  908. }
  909. return true;
  910. }
  911. // Arguments marked with the "byval" attribute are implicitly copied without
  912. // using an alloca instruction. To produce redzones for those arguments, we
  913. // copy them a second time into memory allocated with an alloca instruction.
  914. void copyArgsPassedByValToAllocas();
  915. // Finds all Alloca instructions and puts
  916. // poisoned red zones around all of them.
  917. // Then unpoison everything back before the function returns.
  918. void processStaticAllocas();
  919. void processDynamicAllocas();
  920. void createDynamicAllocasInitStorage();
  921. // ----------------------- Visitors.
  922. /// Collect all Ret instructions, or the musttail call instruction if it
  923. /// precedes the return instruction.
  924. void visitReturnInst(ReturnInst &RI) {
  925. if (CallInst *CI = RI.getParent()->getTerminatingMustTailCall())
  926. RetVec.push_back(CI);
  927. else
  928. RetVec.push_back(&RI);
  929. }
  930. /// Collect all Resume instructions.
  931. void visitResumeInst(ResumeInst &RI) { RetVec.push_back(&RI); }
  932. /// Collect all CatchReturnInst instructions.
  933. void visitCleanupReturnInst(CleanupReturnInst &CRI) { RetVec.push_back(&CRI); }
  934. void unpoisonDynamicAllocasBeforeInst(Instruction *InstBefore,
  935. Value *SavedStack) {
  936. IRBuilder<> IRB(InstBefore);
  937. Value *DynamicAreaPtr = IRB.CreatePtrToInt(SavedStack, IntptrTy);
  938. // When we insert _asan_allocas_unpoison before @llvm.stackrestore, we
  939. // need to adjust extracted SP to compute the address of the most recent
  940. // alloca. We have a special @llvm.get.dynamic.area.offset intrinsic for
  941. // this purpose.
  942. if (!isa<ReturnInst>(InstBefore)) {
  943. Function *DynamicAreaOffsetFunc = Intrinsic::getDeclaration(
  944. InstBefore->getModule(), Intrinsic::get_dynamic_area_offset,
  945. {IntptrTy});
  946. Value *DynamicAreaOffset = IRB.CreateCall(DynamicAreaOffsetFunc, {});
  947. DynamicAreaPtr = IRB.CreateAdd(IRB.CreatePtrToInt(SavedStack, IntptrTy),
  948. DynamicAreaOffset);
  949. }
  950. IRB.CreateCall(
  951. AsanAllocasUnpoisonFunc,
  952. {IRB.CreateLoad(IntptrTy, DynamicAllocaLayout), DynamicAreaPtr});
  953. }
  954. // Unpoison dynamic allocas redzones.
  955. void unpoisonDynamicAllocas() {
  956. for (Instruction *Ret : RetVec)
  957. unpoisonDynamicAllocasBeforeInst(Ret, DynamicAllocaLayout);
  958. for (Instruction *StackRestoreInst : StackRestoreVec)
  959. unpoisonDynamicAllocasBeforeInst(StackRestoreInst,
  960. StackRestoreInst->getOperand(0));
  961. }
  962. // Deploy and poison redzones around dynamic alloca call. To do this, we
  963. // should replace this call with another one with changed parameters and
  964. // replace all its uses with new address, so
  965. // addr = alloca type, old_size, align
  966. // is replaced by
  967. // new_size = (old_size + additional_size) * sizeof(type)
  968. // tmp = alloca i8, new_size, max(align, 32)
  969. // addr = tmp + 32 (first 32 bytes are for the left redzone).
  970. // Additional_size is added to make new memory allocation contain not only
  971. // requested memory, but also left, partial and right redzones.
  972. void handleDynamicAllocaCall(AllocaInst *AI);
  973. /// Collect Alloca instructions we want (and can) handle.
  974. void visitAllocaInst(AllocaInst &AI) {
  975. if (!ASan.isInterestingAlloca(AI)) {
  976. if (AI.isStaticAlloca()) {
  977. // Skip over allocas that are present *before* the first instrumented
  978. // alloca, we don't want to move those around.
  979. if (AllocaVec.empty())
  980. return;
  981. StaticAllocasToMoveUp.push_back(&AI);
  982. }
  983. return;
  984. }
  985. if (!AI.isStaticAlloca())
  986. DynamicAllocaVec.push_back(&AI);
  987. else
  988. AllocaVec.push_back(&AI);
  989. }
  990. /// Collect lifetime intrinsic calls to check for use-after-scope
  991. /// errors.
  992. void visitIntrinsicInst(IntrinsicInst &II) {
  993. Intrinsic::ID ID = II.getIntrinsicID();
  994. if (ID == Intrinsic::stackrestore) StackRestoreVec.push_back(&II);
  995. if (ID == Intrinsic::localescape) LocalEscapeCall = &II;
  996. if (!ASan.UseAfterScope)
  997. return;
  998. if (!II.isLifetimeStartOrEnd())
  999. return;
  1000. // Found lifetime intrinsic, add ASan instrumentation if necessary.
  1001. auto *Size = cast<ConstantInt>(II.getArgOperand(0));
  1002. // If size argument is undefined, don't do anything.
  1003. if (Size->isMinusOne()) return;
  1004. // Check that size doesn't saturate uint64_t and can
  1005. // be stored in IntptrTy.
  1006. const uint64_t SizeValue = Size->getValue().getLimitedValue();
  1007. if (SizeValue == ~0ULL ||
  1008. !ConstantInt::isValueValidForType(IntptrTy, SizeValue))
  1009. return;
  1010. // Find alloca instruction that corresponds to llvm.lifetime argument.
  1011. // Currently we can only handle lifetime markers pointing to the
  1012. // beginning of the alloca.
  1013. AllocaInst *AI = findAllocaForValue(II.getArgOperand(1), true);
  1014. if (!AI) {
  1015. HasUntracedLifetimeIntrinsic = true;
  1016. return;
  1017. }
  1018. // We're interested only in allocas we can handle.
  1019. if (!ASan.isInterestingAlloca(*AI))
  1020. return;
  1021. bool DoPoison = (ID == Intrinsic::lifetime_end);
  1022. AllocaPoisonCall APC = {&II, AI, SizeValue, DoPoison};
  1023. if (AI->isStaticAlloca())
  1024. StaticAllocaPoisonCallVec.push_back(APC);
  1025. else if (ClInstrumentDynamicAllocas)
  1026. DynamicAllocaPoisonCallVec.push_back(APC);
  1027. }
  1028. void visitCallBase(CallBase &CB) {
  1029. if (CallInst *CI = dyn_cast<CallInst>(&CB)) {
  1030. HasInlineAsm |= CI->isInlineAsm() && &CB != ASan.LocalDynamicShadow;
  1031. HasReturnsTwiceCall |= CI->canReturnTwice();
  1032. }
  1033. }
  1034. // ---------------------- Helpers.
  1035. void initializeCallbacks(Module &M);
  1036. // Copies bytes from ShadowBytes into shadow memory for indexes where
  1037. // ShadowMask is not zero. If ShadowMask[i] is zero, we assume that
  1038. // ShadowBytes[i] is constantly zero and doesn't need to be overwritten.
  1039. void copyToShadow(ArrayRef<uint8_t> ShadowMask, ArrayRef<uint8_t> ShadowBytes,
  1040. IRBuilder<> &IRB, Value *ShadowBase);
  1041. void copyToShadow(ArrayRef<uint8_t> ShadowMask, ArrayRef<uint8_t> ShadowBytes,
  1042. size_t Begin, size_t End, IRBuilder<> &IRB,
  1043. Value *ShadowBase);
  1044. void copyToShadowInline(ArrayRef<uint8_t> ShadowMask,
  1045. ArrayRef<uint8_t> ShadowBytes, size_t Begin,
  1046. size_t End, IRBuilder<> &IRB, Value *ShadowBase);
  1047. void poisonAlloca(Value *V, uint64_t Size, IRBuilder<> &IRB, bool DoPoison);
  1048. Value *createAllocaForLayout(IRBuilder<> &IRB, const ASanStackFrameLayout &L,
  1049. bool Dynamic);
  1050. PHINode *createPHI(IRBuilder<> &IRB, Value *Cond, Value *ValueIfTrue,
  1051. Instruction *ThenTerm, Value *ValueIfFalse);
  1052. };
  1053. } // end anonymous namespace
  1054. void LocationMetadata::parse(MDNode *MDN) {
  1055. assert(MDN->getNumOperands() == 3);
  1056. MDString *DIFilename = cast<MDString>(MDN->getOperand(0));
  1057. Filename = DIFilename->getString();
  1058. LineNo = mdconst::extract<ConstantInt>(MDN->getOperand(1))->getLimitedValue();
  1059. ColumnNo =
  1060. mdconst::extract<ConstantInt>(MDN->getOperand(2))->getLimitedValue();
  1061. }
  1062. // FIXME: It would be cleaner to instead attach relevant metadata to the globals
  1063. // we want to sanitize instead and reading this metadata on each pass over a
  1064. // function instead of reading module level metadata at first.
  1065. GlobalsMetadata::GlobalsMetadata(Module &M) {
  1066. NamedMDNode *Globals = M.getNamedMetadata("llvm.asan.globals");
  1067. if (!Globals)
  1068. return;
  1069. for (auto MDN : Globals->operands()) {
  1070. // Metadata node contains the global and the fields of "Entry".
  1071. assert(MDN->getNumOperands() == 5);
  1072. auto *V = mdconst::extract_or_null<Constant>(MDN->getOperand(0));
  1073. // The optimizer may optimize away a global entirely.
  1074. if (!V)
  1075. continue;
  1076. auto *StrippedV = V->stripPointerCasts();
  1077. auto *GV = dyn_cast<GlobalVariable>(StrippedV);
  1078. if (!GV)
  1079. continue;
  1080. // We can already have an entry for GV if it was merged with another
  1081. // global.
  1082. Entry &E = Entries[GV];
  1083. if (auto *Loc = cast_or_null<MDNode>(MDN->getOperand(1)))
  1084. E.SourceLoc.parse(Loc);
  1085. if (auto *Name = cast_or_null<MDString>(MDN->getOperand(2)))
  1086. E.Name = Name->getString();
  1087. ConstantInt *IsDynInit = mdconst::extract<ConstantInt>(MDN->getOperand(3));
  1088. E.IsDynInit |= IsDynInit->isOne();
  1089. ConstantInt *IsExcluded =
  1090. mdconst::extract<ConstantInt>(MDN->getOperand(4));
  1091. E.IsExcluded |= IsExcluded->isOne();
  1092. }
  1093. }
  1094. AnalysisKey ASanGlobalsMetadataAnalysis::Key;
  1095. GlobalsMetadata ASanGlobalsMetadataAnalysis::run(Module &M,
  1096. ModuleAnalysisManager &AM) {
  1097. return GlobalsMetadata(M);
  1098. }
  1099. PreservedAnalyses AddressSanitizerPass::run(Function &F,
  1100. AnalysisManager<Function> &AM) {
  1101. auto &MAMProxy = AM.getResult<ModuleAnalysisManagerFunctionProxy>(F);
  1102. Module &M = *F.getParent();
  1103. if (auto *R = MAMProxy.getCachedResult<ASanGlobalsMetadataAnalysis>(M)) {
  1104. const TargetLibraryInfo *TLI = &AM.getResult<TargetLibraryAnalysis>(F);
  1105. AddressSanitizer Sanitizer(M, R, nullptr, Options.CompileKernel,
  1106. Options.Recover, Options.UseAfterScope,
  1107. Options.UseAfterReturn);
  1108. if (Sanitizer.instrumentFunction(F, TLI))
  1109. return PreservedAnalyses::none();
  1110. return PreservedAnalyses::all();
  1111. }
  1112. report_fatal_error(
  1113. "The ASanGlobalsMetadataAnalysis is required to run before "
  1114. "AddressSanitizer can run");
  1115. return PreservedAnalyses::all();
  1116. }
  1117. void AddressSanitizerPass::printPipeline(
  1118. raw_ostream &OS, function_ref<StringRef(StringRef)> MapClassName2PassName) {
  1119. static_cast<PassInfoMixin<AddressSanitizerPass> *>(this)->printPipeline(
  1120. OS, MapClassName2PassName);
  1121. OS << "<";
  1122. if (Options.CompileKernel)
  1123. OS << "kernel";
  1124. OS << ">";
  1125. }
  1126. void ModuleAddressSanitizerPass::printPipeline(
  1127. raw_ostream &OS, function_ref<StringRef(StringRef)> MapClassName2PassName) {
  1128. static_cast<PassInfoMixin<ModuleAddressSanitizerPass> *>(this)->printPipeline(
  1129. OS, MapClassName2PassName);
  1130. OS << "<";
  1131. if (Options.CompileKernel)
  1132. OS << "kernel";
  1133. OS << ">";
  1134. }
  1135. ModuleAddressSanitizerPass::ModuleAddressSanitizerPass(
  1136. const AddressSanitizerOptions &Options, bool UseGlobalGC,
  1137. bool UseOdrIndicator, AsanDtorKind DestructorKind)
  1138. : Options(Options), UseGlobalGC(UseGlobalGC),
  1139. UseOdrIndicator(UseOdrIndicator), DestructorKind(DestructorKind) {}
  1140. PreservedAnalyses ModuleAddressSanitizerPass::run(Module &M,
  1141. ModuleAnalysisManager &MAM) {
  1142. GlobalsMetadata &GlobalsMD = MAM.getResult<ASanGlobalsMetadataAnalysis>(M);
  1143. ModuleAddressSanitizer ModuleSanitizer(M, &GlobalsMD, Options.CompileKernel,
  1144. Options.Recover, UseGlobalGC,
  1145. UseOdrIndicator, DestructorKind);
  1146. bool Modified = false;
  1147. auto &FAM = MAM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
  1148. const StackSafetyGlobalInfo *const SSGI =
  1149. ClUseStackSafety ? &MAM.getResult<StackSafetyGlobalAnalysis>(M) : nullptr;
  1150. for (Function &F : M) {
  1151. AddressSanitizer FunctionSanitizer(
  1152. M, &GlobalsMD, SSGI, Options.CompileKernel, Options.Recover,
  1153. Options.UseAfterScope, Options.UseAfterReturn);
  1154. const TargetLibraryInfo &TLI = FAM.getResult<TargetLibraryAnalysis>(F);
  1155. Modified |= FunctionSanitizer.instrumentFunction(F, &TLI);
  1156. }
  1157. Modified |= ModuleSanitizer.instrumentModule(M);
  1158. return Modified ? PreservedAnalyses::none() : PreservedAnalyses::all();
  1159. }
  1160. INITIALIZE_PASS(ASanGlobalsMetadataWrapperPass, "asan-globals-md",
  1161. "Read metadata to mark which globals should be instrumented "
  1162. "when running ASan.",
  1163. false, true)
  1164. char AddressSanitizerLegacyPass::ID = 0;
  1165. INITIALIZE_PASS_BEGIN(
  1166. AddressSanitizerLegacyPass, "asan",
  1167. "AddressSanitizer: detects use-after-free and out-of-bounds bugs.", false,
  1168. false)
  1169. INITIALIZE_PASS_DEPENDENCY(ASanGlobalsMetadataWrapperPass)
  1170. INITIALIZE_PASS_DEPENDENCY(StackSafetyGlobalInfoWrapperPass)
  1171. INITIALIZE_PASS_DEPENDENCY(TargetLibraryInfoWrapperPass)
  1172. INITIALIZE_PASS_END(
  1173. AddressSanitizerLegacyPass, "asan",
  1174. "AddressSanitizer: detects use-after-free and out-of-bounds bugs.", false,
  1175. false)
  1176. FunctionPass *llvm::createAddressSanitizerFunctionPass(
  1177. bool CompileKernel, bool Recover, bool UseAfterScope,
  1178. AsanDetectStackUseAfterReturnMode UseAfterReturn) {
  1179. assert(!CompileKernel || Recover);
  1180. return new AddressSanitizerLegacyPass(CompileKernel, Recover, UseAfterScope,
  1181. UseAfterReturn);
  1182. }
  1183. char ModuleAddressSanitizerLegacyPass::ID = 0;
  1184. INITIALIZE_PASS(
  1185. ModuleAddressSanitizerLegacyPass, "asan-module",
  1186. "AddressSanitizer: detects use-after-free and out-of-bounds bugs."
  1187. "ModulePass",
  1188. false, false)
  1189. ModulePass *llvm::createModuleAddressSanitizerLegacyPassPass(
  1190. bool CompileKernel, bool Recover, bool UseGlobalsGC, bool UseOdrIndicator,
  1191. AsanDtorKind Destructor) {
  1192. assert(!CompileKernel || Recover);
  1193. return new ModuleAddressSanitizerLegacyPass(
  1194. CompileKernel, Recover, UseGlobalsGC, UseOdrIndicator, Destructor);
  1195. }
  1196. static size_t TypeSizeToSizeIndex(uint32_t TypeSize) {
  1197. size_t Res = countTrailingZeros(TypeSize / 8);
  1198. assert(Res < kNumberOfAccessSizes);
  1199. return Res;
  1200. }
  1201. /// Create a global describing a source location.
  1202. static GlobalVariable *createPrivateGlobalForSourceLoc(Module &M,
  1203. LocationMetadata MD) {
  1204. Constant *LocData[] = {
  1205. createPrivateGlobalForString(M, MD.Filename, true, kAsanGenPrefix),
  1206. ConstantInt::get(Type::getInt32Ty(M.getContext()), MD.LineNo),
  1207. ConstantInt::get(Type::getInt32Ty(M.getContext()), MD.ColumnNo),
  1208. };
  1209. auto LocStruct = ConstantStruct::getAnon(LocData);
  1210. auto GV = new GlobalVariable(M, LocStruct->getType(), true,
  1211. GlobalValue::PrivateLinkage, LocStruct,
  1212. kAsanGenPrefix);
  1213. GV->setUnnamedAddr(GlobalValue::UnnamedAddr::Global);
  1214. return GV;
  1215. }
  1216. /// Check if \p G has been created by a trusted compiler pass.
  1217. static bool GlobalWasGeneratedByCompiler(GlobalVariable *G) {
  1218. // Do not instrument @llvm.global_ctors, @llvm.used, etc.
  1219. if (G->getName().startswith("llvm."))
  1220. return true;
  1221. // Do not instrument asan globals.
  1222. if (G->getName().startswith(kAsanGenPrefix) ||
  1223. G->getName().startswith(kSanCovGenPrefix) ||
  1224. G->getName().startswith(kODRGenPrefix))
  1225. return true;
  1226. // Do not instrument gcov counter arrays.
  1227. if (G->getName() == "__llvm_gcov_ctr")
  1228. return true;
  1229. return false;
  1230. }
  1231. static bool isUnsupportedAMDGPUAddrspace(Value *Addr) {
  1232. Type *PtrTy = cast<PointerType>(Addr->getType()->getScalarType());
  1233. unsigned int AddrSpace = PtrTy->getPointerAddressSpace();
  1234. if (AddrSpace == 3 || AddrSpace == 5)
  1235. return true;
  1236. return false;
  1237. }
  1238. Value *AddressSanitizer::memToShadow(Value *Shadow, IRBuilder<> &IRB) {
  1239. // Shadow >> scale
  1240. Shadow = IRB.CreateLShr(Shadow, Mapping.Scale);
  1241. if (Mapping.Offset == 0) return Shadow;
  1242. // (Shadow >> scale) | offset
  1243. Value *ShadowBase;
  1244. if (LocalDynamicShadow)
  1245. ShadowBase = LocalDynamicShadow;
  1246. else
  1247. ShadowBase = ConstantInt::get(IntptrTy, Mapping.Offset);
  1248. if (Mapping.OrShadowOffset)
  1249. return IRB.CreateOr(Shadow, ShadowBase);
  1250. else
  1251. return IRB.CreateAdd(Shadow, ShadowBase);
  1252. }
  1253. // Instrument memset/memmove/memcpy
  1254. void AddressSanitizer::instrumentMemIntrinsic(MemIntrinsic *MI) {
  1255. IRBuilder<> IRB(MI);
  1256. if (isa<MemTransferInst>(MI)) {
  1257. IRB.CreateCall(
  1258. isa<MemMoveInst>(MI) ? AsanMemmove : AsanMemcpy,
  1259. {IRB.CreatePointerCast(MI->getOperand(0), IRB.getInt8PtrTy()),
  1260. IRB.CreatePointerCast(MI->getOperand(1), IRB.getInt8PtrTy()),
  1261. IRB.CreateIntCast(MI->getOperand(2), IntptrTy, false)});
  1262. } else if (isa<MemSetInst>(MI)) {
  1263. IRB.CreateCall(
  1264. AsanMemset,
  1265. {IRB.CreatePointerCast(MI->getOperand(0), IRB.getInt8PtrTy()),
  1266. IRB.CreateIntCast(MI->getOperand(1), IRB.getInt32Ty(), false),
  1267. IRB.CreateIntCast(MI->getOperand(2), IntptrTy, false)});
  1268. }
  1269. MI->eraseFromParent();
  1270. }
  1271. /// Check if we want (and can) handle this alloca.
  1272. bool AddressSanitizer::isInterestingAlloca(const AllocaInst &AI) {
  1273. auto PreviouslySeenAllocaInfo = ProcessedAllocas.find(&AI);
  1274. if (PreviouslySeenAllocaInfo != ProcessedAllocas.end())
  1275. return PreviouslySeenAllocaInfo->getSecond();
  1276. bool IsInteresting =
  1277. (AI.getAllocatedType()->isSized() &&
  1278. // alloca() may be called with 0 size, ignore it.
  1279. ((!AI.isStaticAlloca()) || getAllocaSizeInBytes(AI) > 0) &&
  1280. // We are only interested in allocas not promotable to registers.
  1281. // Promotable allocas are common under -O0.
  1282. (!ClSkipPromotableAllocas || !isAllocaPromotable(&AI)) &&
  1283. // inalloca allocas are not treated as static, and we don't want
  1284. // dynamic alloca instrumentation for them as well.
  1285. !AI.isUsedWithInAlloca() &&
  1286. // swifterror allocas are register promoted by ISel
  1287. !AI.isSwiftError());
  1288. ProcessedAllocas[&AI] = IsInteresting;
  1289. return IsInteresting;
  1290. }
  1291. bool AddressSanitizer::ignoreAccess(Instruction *Inst, Value *Ptr) {
  1292. // Instrument acesses from different address spaces only for AMDGPU.
  1293. Type *PtrTy = cast<PointerType>(Ptr->getType()->getScalarType());
  1294. if (PtrTy->getPointerAddressSpace() != 0 &&
  1295. !(TargetTriple.isAMDGPU() && !isUnsupportedAMDGPUAddrspace(Ptr)))
  1296. return true;
  1297. // Ignore swifterror addresses.
  1298. // swifterror memory addresses are mem2reg promoted by instruction
  1299. // selection. As such they cannot have regular uses like an instrumentation
  1300. // function and it makes no sense to track them as memory.
  1301. if (Ptr->isSwiftError())
  1302. return true;
  1303. // Treat memory accesses to promotable allocas as non-interesting since they
  1304. // will not cause memory violations. This greatly speeds up the instrumented
  1305. // executable at -O0.
  1306. if (auto AI = dyn_cast_or_null<AllocaInst>(Ptr))
  1307. if (ClSkipPromotableAllocas && !isInterestingAlloca(*AI))
  1308. return true;
  1309. if (SSGI != nullptr && SSGI->stackAccessIsSafe(*Inst) &&
  1310. findAllocaForValue(Ptr))
  1311. return true;
  1312. return false;
  1313. }
  1314. void AddressSanitizer::getInterestingMemoryOperands(
  1315. Instruction *I, SmallVectorImpl<InterestingMemoryOperand> &Interesting) {
  1316. // Skip memory accesses inserted by another instrumentation.
  1317. if (I->hasMetadata("nosanitize"))
  1318. return;
  1319. // Do not instrument the load fetching the dynamic shadow address.
  1320. if (LocalDynamicShadow == I)
  1321. return;
  1322. if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
  1323. if (!ClInstrumentReads || ignoreAccess(I, LI->getPointerOperand()))
  1324. return;
  1325. Interesting.emplace_back(I, LI->getPointerOperandIndex(), false,
  1326. LI->getType(), LI->getAlign());
  1327. } else if (StoreInst *SI = dyn_cast<StoreInst>(I)) {
  1328. if (!ClInstrumentWrites || ignoreAccess(I, SI->getPointerOperand()))
  1329. return;
  1330. Interesting.emplace_back(I, SI->getPointerOperandIndex(), true,
  1331. SI->getValueOperand()->getType(), SI->getAlign());
  1332. } else if (AtomicRMWInst *RMW = dyn_cast<AtomicRMWInst>(I)) {
  1333. if (!ClInstrumentAtomics || ignoreAccess(I, RMW->getPointerOperand()))
  1334. return;
  1335. Interesting.emplace_back(I, RMW->getPointerOperandIndex(), true,
  1336. RMW->getValOperand()->getType(), None);
  1337. } else if (AtomicCmpXchgInst *XCHG = dyn_cast<AtomicCmpXchgInst>(I)) {
  1338. if (!ClInstrumentAtomics || ignoreAccess(I, XCHG->getPointerOperand()))
  1339. return;
  1340. Interesting.emplace_back(I, XCHG->getPointerOperandIndex(), true,
  1341. XCHG->getCompareOperand()->getType(), None);
  1342. } else if (auto CI = dyn_cast<CallInst>(I)) {
  1343. if (CI->getIntrinsicID() == Intrinsic::masked_load ||
  1344. CI->getIntrinsicID() == Intrinsic::masked_store) {
  1345. bool IsWrite = CI->getIntrinsicID() == Intrinsic::masked_store;
  1346. // Masked store has an initial operand for the value.
  1347. unsigned OpOffset = IsWrite ? 1 : 0;
  1348. if (IsWrite ? !ClInstrumentWrites : !ClInstrumentReads)
  1349. return;
  1350. auto BasePtr = CI->getOperand(OpOffset);
  1351. if (ignoreAccess(I, BasePtr))
  1352. return;
  1353. Type *Ty = IsWrite ? CI->getArgOperand(0)->getType() : CI->getType();
  1354. MaybeAlign Alignment = Align(1);
  1355. // Otherwise no alignment guarantees. We probably got Undef.
  1356. if (auto *Op = dyn_cast<ConstantInt>(CI->getOperand(1 + OpOffset)))
  1357. Alignment = Op->getMaybeAlignValue();
  1358. Value *Mask = CI->getOperand(2 + OpOffset);
  1359. Interesting.emplace_back(I, OpOffset, IsWrite, Ty, Alignment, Mask);
  1360. } else {
  1361. for (unsigned ArgNo = 0; ArgNo < CI->arg_size(); ArgNo++) {
  1362. if (!ClInstrumentByval || !CI->isByValArgument(ArgNo) ||
  1363. ignoreAccess(I, CI->getArgOperand(ArgNo)))
  1364. continue;
  1365. Type *Ty = CI->getParamByValType(ArgNo);
  1366. Interesting.emplace_back(I, ArgNo, false, Ty, Align(1));
  1367. }
  1368. }
  1369. }
  1370. }
  1371. static bool isPointerOperand(Value *V) {
  1372. return V->getType()->isPointerTy() || isa<PtrToIntInst>(V);
  1373. }
  1374. // This is a rough heuristic; it may cause both false positives and
  1375. // false negatives. The proper implementation requires cooperation with
  1376. // the frontend.
  1377. static bool isInterestingPointerComparison(Instruction *I) {
  1378. if (ICmpInst *Cmp = dyn_cast<ICmpInst>(I)) {
  1379. if (!Cmp->isRelational())
  1380. return false;
  1381. } else {
  1382. return false;
  1383. }
  1384. return isPointerOperand(I->getOperand(0)) &&
  1385. isPointerOperand(I->getOperand(1));
  1386. }
  1387. // This is a rough heuristic; it may cause both false positives and
  1388. // false negatives. The proper implementation requires cooperation with
  1389. // the frontend.
  1390. static bool isInterestingPointerSubtraction(Instruction *I) {
  1391. if (BinaryOperator *BO = dyn_cast<BinaryOperator>(I)) {
  1392. if (BO->getOpcode() != Instruction::Sub)
  1393. return false;
  1394. } else {
  1395. return false;
  1396. }
  1397. return isPointerOperand(I->getOperand(0)) &&
  1398. isPointerOperand(I->getOperand(1));
  1399. }
  1400. bool AddressSanitizer::GlobalIsLinkerInitialized(GlobalVariable *G) {
  1401. // If a global variable does not have dynamic initialization we don't
  1402. // have to instrument it. However, if a global does not have initializer
  1403. // at all, we assume it has dynamic initializer (in other TU).
  1404. //
  1405. // FIXME: Metadata should be attched directly to the global directly instead
  1406. // of being added to llvm.asan.globals.
  1407. return G->hasInitializer() && !GlobalsMD.get(G).IsDynInit;
  1408. }
  1409. void AddressSanitizer::instrumentPointerComparisonOrSubtraction(
  1410. Instruction *I) {
  1411. IRBuilder<> IRB(I);
  1412. FunctionCallee F = isa<ICmpInst>(I) ? AsanPtrCmpFunction : AsanPtrSubFunction;
  1413. Value *Param[2] = {I->getOperand(0), I->getOperand(1)};
  1414. for (Value *&i : Param) {
  1415. if (i->getType()->isPointerTy())
  1416. i = IRB.CreatePointerCast(i, IntptrTy);
  1417. }
  1418. IRB.CreateCall(F, Param);
  1419. }
  1420. static void doInstrumentAddress(AddressSanitizer *Pass, Instruction *I,
  1421. Instruction *InsertBefore, Value *Addr,
  1422. MaybeAlign Alignment, unsigned Granularity,
  1423. uint32_t TypeSize, bool IsWrite,
  1424. Value *SizeArgument, bool UseCalls,
  1425. uint32_t Exp) {
  1426. // Instrument a 1-, 2-, 4-, 8-, or 16- byte access with one check
  1427. // if the data is properly aligned.
  1428. if ((TypeSize == 8 || TypeSize == 16 || TypeSize == 32 || TypeSize == 64 ||
  1429. TypeSize == 128) &&
  1430. (!Alignment || *Alignment >= Granularity || *Alignment >= TypeSize / 8))
  1431. return Pass->instrumentAddress(I, InsertBefore, Addr, TypeSize, IsWrite,
  1432. nullptr, UseCalls, Exp);
  1433. Pass->instrumentUnusualSizeOrAlignment(I, InsertBefore, Addr, TypeSize,
  1434. IsWrite, nullptr, UseCalls, Exp);
  1435. }
  1436. static void instrumentMaskedLoadOrStore(AddressSanitizer *Pass,
  1437. const DataLayout &DL, Type *IntptrTy,
  1438. Value *Mask, Instruction *I,
  1439. Value *Addr, MaybeAlign Alignment,
  1440. unsigned Granularity, Type *OpType,
  1441. bool IsWrite, Value *SizeArgument,
  1442. bool UseCalls, uint32_t Exp) {
  1443. auto *VTy = cast<FixedVectorType>(OpType);
  1444. uint64_t ElemTypeSize = DL.getTypeStoreSizeInBits(VTy->getScalarType());
  1445. unsigned Num = VTy->getNumElements();
  1446. auto Zero = ConstantInt::get(IntptrTy, 0);
  1447. for (unsigned Idx = 0; Idx < Num; ++Idx) {
  1448. Value *InstrumentedAddress = nullptr;
  1449. Instruction *InsertBefore = I;
  1450. if (auto *Vector = dyn_cast<ConstantVector>(Mask)) {
  1451. // dyn_cast as we might get UndefValue
  1452. if (auto *Masked = dyn_cast<ConstantInt>(Vector->getOperand(Idx))) {
  1453. if (Masked->isZero())
  1454. // Mask is constant false, so no instrumentation needed.
  1455. continue;
  1456. // If we have a true or undef value, fall through to doInstrumentAddress
  1457. // with InsertBefore == I
  1458. }
  1459. } else {
  1460. IRBuilder<> IRB(I);
  1461. Value *MaskElem = IRB.CreateExtractElement(Mask, Idx);
  1462. Instruction *ThenTerm = SplitBlockAndInsertIfThen(MaskElem, I, false);
  1463. InsertBefore = ThenTerm;
  1464. }
  1465. IRBuilder<> IRB(InsertBefore);
  1466. InstrumentedAddress =
  1467. IRB.CreateGEP(VTy, Addr, {Zero, ConstantInt::get(IntptrTy, Idx)});
  1468. doInstrumentAddress(Pass, I, InsertBefore, InstrumentedAddress, Alignment,
  1469. Granularity, ElemTypeSize, IsWrite, SizeArgument,
  1470. UseCalls, Exp);
  1471. }
  1472. }
  1473. void AddressSanitizer::instrumentMop(ObjectSizeOffsetVisitor &ObjSizeVis,
  1474. InterestingMemoryOperand &O, bool UseCalls,
  1475. const DataLayout &DL) {
  1476. Value *Addr = O.getPtr();
  1477. // Optimization experiments.
  1478. // The experiments can be used to evaluate potential optimizations that remove
  1479. // instrumentation (assess false negatives). Instead of completely removing
  1480. // some instrumentation, you set Exp to a non-zero value (mask of optimization
  1481. // experiments that want to remove instrumentation of this instruction).
  1482. // If Exp is non-zero, this pass will emit special calls into runtime
  1483. // (e.g. __asan_report_exp_load1 instead of __asan_report_load1). These calls
  1484. // make runtime terminate the program in a special way (with a different
  1485. // exit status). Then you run the new compiler on a buggy corpus, collect
  1486. // the special terminations (ideally, you don't see them at all -- no false
  1487. // negatives) and make the decision on the optimization.
  1488. uint32_t Exp = ClForceExperiment;
  1489. if (ClOpt && ClOptGlobals) {
  1490. // If initialization order checking is disabled, a simple access to a
  1491. // dynamically initialized global is always valid.
  1492. GlobalVariable *G = dyn_cast<GlobalVariable>(getUnderlyingObject(Addr));
  1493. if (G && (!ClInitializers || GlobalIsLinkerInitialized(G)) &&
  1494. isSafeAccess(ObjSizeVis, Addr, O.TypeSize)) {
  1495. NumOptimizedAccessesToGlobalVar++;
  1496. return;
  1497. }
  1498. }
  1499. if (ClOpt && ClOptStack) {
  1500. // A direct inbounds access to a stack variable is always valid.
  1501. if (isa<AllocaInst>(getUnderlyingObject(Addr)) &&
  1502. isSafeAccess(ObjSizeVis, Addr, O.TypeSize)) {
  1503. NumOptimizedAccessesToStackVar++;
  1504. return;
  1505. }
  1506. }
  1507. if (O.IsWrite)
  1508. NumInstrumentedWrites++;
  1509. else
  1510. NumInstrumentedReads++;
  1511. unsigned Granularity = 1 << Mapping.Scale;
  1512. if (O.MaybeMask) {
  1513. instrumentMaskedLoadOrStore(this, DL, IntptrTy, O.MaybeMask, O.getInsn(),
  1514. Addr, O.Alignment, Granularity, O.OpType,
  1515. O.IsWrite, nullptr, UseCalls, Exp);
  1516. } else {
  1517. doInstrumentAddress(this, O.getInsn(), O.getInsn(), Addr, O.Alignment,
  1518. Granularity, O.TypeSize, O.IsWrite, nullptr, UseCalls,
  1519. Exp);
  1520. }
  1521. }
  1522. Instruction *AddressSanitizer::generateCrashCode(Instruction *InsertBefore,
  1523. Value *Addr, bool IsWrite,
  1524. size_t AccessSizeIndex,
  1525. Value *SizeArgument,
  1526. uint32_t Exp) {
  1527. IRBuilder<> IRB(InsertBefore);
  1528. Value *ExpVal = Exp == 0 ? nullptr : ConstantInt::get(IRB.getInt32Ty(), Exp);
  1529. CallInst *Call = nullptr;
  1530. if (SizeArgument) {
  1531. if (Exp == 0)
  1532. Call = IRB.CreateCall(AsanErrorCallbackSized[IsWrite][0],
  1533. {Addr, SizeArgument});
  1534. else
  1535. Call = IRB.CreateCall(AsanErrorCallbackSized[IsWrite][1],
  1536. {Addr, SizeArgument, ExpVal});
  1537. } else {
  1538. if (Exp == 0)
  1539. Call =
  1540. IRB.CreateCall(AsanErrorCallback[IsWrite][0][AccessSizeIndex], Addr);
  1541. else
  1542. Call = IRB.CreateCall(AsanErrorCallback[IsWrite][1][AccessSizeIndex],
  1543. {Addr, ExpVal});
  1544. }
  1545. Call->setCannotMerge();
  1546. return Call;
  1547. }
  1548. Value *AddressSanitizer::createSlowPathCmp(IRBuilder<> &IRB, Value *AddrLong,
  1549. Value *ShadowValue,
  1550. uint32_t TypeSize) {
  1551. size_t Granularity = static_cast<size_t>(1) << Mapping.Scale;
  1552. // Addr & (Granularity - 1)
  1553. Value *LastAccessedByte =
  1554. IRB.CreateAnd(AddrLong, ConstantInt::get(IntptrTy, Granularity - 1));
  1555. // (Addr & (Granularity - 1)) + size - 1
  1556. if (TypeSize / 8 > 1)
  1557. LastAccessedByte = IRB.CreateAdd(
  1558. LastAccessedByte, ConstantInt::get(IntptrTy, TypeSize / 8 - 1));
  1559. // (uint8_t) ((Addr & (Granularity-1)) + size - 1)
  1560. LastAccessedByte =
  1561. IRB.CreateIntCast(LastAccessedByte, ShadowValue->getType(), false);
  1562. // ((uint8_t) ((Addr & (Granularity-1)) + size - 1)) >= ShadowValue
  1563. return IRB.CreateICmpSGE(LastAccessedByte, ShadowValue);
  1564. }
  1565. Instruction *AddressSanitizer::instrumentAMDGPUAddress(
  1566. Instruction *OrigIns, Instruction *InsertBefore, Value *Addr,
  1567. uint32_t TypeSize, bool IsWrite, Value *SizeArgument) {
  1568. // Do not instrument unsupported addrspaces.
  1569. if (isUnsupportedAMDGPUAddrspace(Addr))
  1570. return nullptr;
  1571. Type *PtrTy = cast<PointerType>(Addr->getType()->getScalarType());
  1572. // Follow host instrumentation for global and constant addresses.
  1573. if (PtrTy->getPointerAddressSpace() != 0)
  1574. return InsertBefore;
  1575. // Instrument generic addresses in supported addressspaces.
  1576. IRBuilder<> IRB(InsertBefore);
  1577. Value *AddrLong = IRB.CreatePointerCast(Addr, IRB.getInt8PtrTy());
  1578. Value *IsShared = IRB.CreateCall(AMDGPUAddressShared, {AddrLong});
  1579. Value *IsPrivate = IRB.CreateCall(AMDGPUAddressPrivate, {AddrLong});
  1580. Value *IsSharedOrPrivate = IRB.CreateOr(IsShared, IsPrivate);
  1581. Value *Cmp = IRB.CreateICmpNE(IRB.getTrue(), IsSharedOrPrivate);
  1582. Value *AddrSpaceZeroLanding =
  1583. SplitBlockAndInsertIfThen(Cmp, InsertBefore, false);
  1584. InsertBefore = cast<Instruction>(AddrSpaceZeroLanding);
  1585. return InsertBefore;
  1586. }
  1587. void AddressSanitizer::instrumentAddress(Instruction *OrigIns,
  1588. Instruction *InsertBefore, Value *Addr,
  1589. uint32_t TypeSize, bool IsWrite,
  1590. Value *SizeArgument, bool UseCalls,
  1591. uint32_t Exp) {
  1592. if (TargetTriple.isAMDGPU()) {
  1593. InsertBefore = instrumentAMDGPUAddress(OrigIns, InsertBefore, Addr,
  1594. TypeSize, IsWrite, SizeArgument);
  1595. if (!InsertBefore)
  1596. return;
  1597. }
  1598. IRBuilder<> IRB(InsertBefore);
  1599. size_t AccessSizeIndex = TypeSizeToSizeIndex(TypeSize);
  1600. const ASanAccessInfo AccessInfo(IsWrite, CompileKernel, AccessSizeIndex);
  1601. if (UseCalls && ClOptimizeCallbacks) {
  1602. const ASanAccessInfo AccessInfo(IsWrite, CompileKernel, AccessSizeIndex);
  1603. Module *M = IRB.GetInsertBlock()->getParent()->getParent();
  1604. IRB.CreateCall(
  1605. Intrinsic::getDeclaration(M, Intrinsic::asan_check_memaccess),
  1606. {IRB.CreatePointerCast(Addr, Int8PtrTy),
  1607. ConstantInt::get(Int32Ty, AccessInfo.Packed)});
  1608. return;
  1609. }
  1610. Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy);
  1611. if (UseCalls) {
  1612. if (Exp == 0)
  1613. IRB.CreateCall(AsanMemoryAccessCallback[IsWrite][0][AccessSizeIndex],
  1614. AddrLong);
  1615. else
  1616. IRB.CreateCall(AsanMemoryAccessCallback[IsWrite][1][AccessSizeIndex],
  1617. {AddrLong, ConstantInt::get(IRB.getInt32Ty(), Exp)});
  1618. return;
  1619. }
  1620. Type *ShadowTy =
  1621. IntegerType::get(*C, std::max(8U, TypeSize >> Mapping.Scale));
  1622. Type *ShadowPtrTy = PointerType::get(ShadowTy, 0);
  1623. Value *ShadowPtr = memToShadow(AddrLong, IRB);
  1624. Value *CmpVal = Constant::getNullValue(ShadowTy);
  1625. Value *ShadowValue =
  1626. IRB.CreateLoad(ShadowTy, IRB.CreateIntToPtr(ShadowPtr, ShadowPtrTy));
  1627. Value *Cmp = IRB.CreateICmpNE(ShadowValue, CmpVal);
  1628. size_t Granularity = 1ULL << Mapping.Scale;
  1629. Instruction *CrashTerm = nullptr;
  1630. if (ClAlwaysSlowPath || (TypeSize < 8 * Granularity)) {
  1631. // We use branch weights for the slow path check, to indicate that the slow
  1632. // path is rarely taken. This seems to be the case for SPEC benchmarks.
  1633. Instruction *CheckTerm = SplitBlockAndInsertIfThen(
  1634. Cmp, InsertBefore, false, MDBuilder(*C).createBranchWeights(1, 100000));
  1635. assert(cast<BranchInst>(CheckTerm)->isUnconditional());
  1636. BasicBlock *NextBB = CheckTerm->getSuccessor(0);
  1637. IRB.SetInsertPoint(CheckTerm);
  1638. Value *Cmp2 = createSlowPathCmp(IRB, AddrLong, ShadowValue, TypeSize);
  1639. if (Recover) {
  1640. CrashTerm = SplitBlockAndInsertIfThen(Cmp2, CheckTerm, false);
  1641. } else {
  1642. BasicBlock *CrashBlock =
  1643. BasicBlock::Create(*C, "", NextBB->getParent(), NextBB);
  1644. CrashTerm = new UnreachableInst(*C, CrashBlock);
  1645. BranchInst *NewTerm = BranchInst::Create(CrashBlock, NextBB, Cmp2);
  1646. ReplaceInstWithInst(CheckTerm, NewTerm);
  1647. }
  1648. } else {
  1649. CrashTerm = SplitBlockAndInsertIfThen(Cmp, InsertBefore, !Recover);
  1650. }
  1651. Instruction *Crash = generateCrashCode(CrashTerm, AddrLong, IsWrite,
  1652. AccessSizeIndex, SizeArgument, Exp);
  1653. Crash->setDebugLoc(OrigIns->getDebugLoc());
  1654. }
  1655. // Instrument unusual size or unusual alignment.
  1656. // We can not do it with a single check, so we do 1-byte check for the first
  1657. // and the last bytes. We call __asan_report_*_n(addr, real_size) to be able
  1658. // to report the actual access size.
  1659. void AddressSanitizer::instrumentUnusualSizeOrAlignment(
  1660. Instruction *I, Instruction *InsertBefore, Value *Addr, uint32_t TypeSize,
  1661. bool IsWrite, Value *SizeArgument, bool UseCalls, uint32_t Exp) {
  1662. IRBuilder<> IRB(InsertBefore);
  1663. Value *Size = ConstantInt::get(IntptrTy, TypeSize / 8);
  1664. Value *AddrLong = IRB.CreatePointerCast(Addr, IntptrTy);
  1665. if (UseCalls) {
  1666. if (Exp == 0)
  1667. IRB.CreateCall(AsanMemoryAccessCallbackSized[IsWrite][0],
  1668. {AddrLong, Size});
  1669. else
  1670. IRB.CreateCall(AsanMemoryAccessCallbackSized[IsWrite][1],
  1671. {AddrLong, Size, ConstantInt::get(IRB.getInt32Ty(), Exp)});
  1672. } else {
  1673. Value *LastByte = IRB.CreateIntToPtr(
  1674. IRB.CreateAdd(AddrLong, ConstantInt::get(IntptrTy, TypeSize / 8 - 1)),
  1675. Addr->getType());
  1676. instrumentAddress(I, InsertBefore, Addr, 8, IsWrite, Size, false, Exp);
  1677. instrumentAddress(I, InsertBefore, LastByte, 8, IsWrite, Size, false, Exp);
  1678. }
  1679. }
  1680. void ModuleAddressSanitizer::poisonOneInitializer(Function &GlobalInit,
  1681. GlobalValue *ModuleName) {
  1682. // Set up the arguments to our poison/unpoison functions.
  1683. IRBuilder<> IRB(&GlobalInit.front(),
  1684. GlobalInit.front().getFirstInsertionPt());
  1685. // Add a call to poison all external globals before the given function starts.
  1686. Value *ModuleNameAddr = ConstantExpr::getPointerCast(ModuleName, IntptrTy);
  1687. IRB.CreateCall(AsanPoisonGlobals, ModuleNameAddr);
  1688. // Add calls to unpoison all globals before each return instruction.
  1689. for (auto &BB : GlobalInit.getBasicBlockList())
  1690. if (ReturnInst *RI = dyn_cast<ReturnInst>(BB.getTerminator()))
  1691. CallInst::Create(AsanUnpoisonGlobals, "", RI);
  1692. }
  1693. void ModuleAddressSanitizer::createInitializerPoisonCalls(
  1694. Module &M, GlobalValue *ModuleName) {
  1695. GlobalVariable *GV = M.getGlobalVariable("llvm.global_ctors");
  1696. if (!GV)
  1697. return;
  1698. ConstantArray *CA = dyn_cast<ConstantArray>(GV->getInitializer());
  1699. if (!CA)
  1700. return;
  1701. for (Use &OP : CA->operands()) {
  1702. if (isa<ConstantAggregateZero>(OP)) continue;
  1703. ConstantStruct *CS = cast<ConstantStruct>(OP);
  1704. // Must have a function or null ptr.
  1705. if (Function *F = dyn_cast<Function>(CS->getOperand(1))) {
  1706. if (F->getName() == kAsanModuleCtorName) continue;
  1707. auto *Priority = cast<ConstantInt>(CS->getOperand(0));
  1708. // Don't instrument CTORs that will run before asan.module_ctor.
  1709. if (Priority->getLimitedValue() <= GetCtorAndDtorPriority(TargetTriple))
  1710. continue;
  1711. poisonOneInitializer(*F, ModuleName);
  1712. }
  1713. }
  1714. }
  1715. const GlobalVariable *
  1716. ModuleAddressSanitizer::getExcludedAliasedGlobal(const GlobalAlias &GA) const {
  1717. // In case this function should be expanded to include rules that do not just
  1718. // apply when CompileKernel is true, either guard all existing rules with an
  1719. // 'if (CompileKernel) { ... }' or be absolutely sure that all these rules
  1720. // should also apply to user space.
  1721. assert(CompileKernel && "Only expecting to be called when compiling kernel");
  1722. const Constant *C = GA.getAliasee();
  1723. // When compiling the kernel, globals that are aliased by symbols prefixed
  1724. // by "__" are special and cannot be padded with a redzone.
  1725. if (GA.getName().startswith("__"))
  1726. return dyn_cast<GlobalVariable>(C->stripPointerCastsAndAliases());
  1727. return nullptr;
  1728. }
  1729. bool ModuleAddressSanitizer::shouldInstrumentGlobal(GlobalVariable *G) const {
  1730. Type *Ty = G->getValueType();
  1731. LLVM_DEBUG(dbgs() << "GLOBAL: " << *G << "\n");
  1732. // FIXME: Metadata should be attched directly to the global directly instead
  1733. // of being added to llvm.asan.globals.
  1734. if (GlobalsMD.get(G).IsExcluded) return false;
  1735. if (!Ty->isSized()) return false;
  1736. if (!G->hasInitializer()) return false;
  1737. // Globals in address space 1 and 4 are supported for AMDGPU.
  1738. if (G->getAddressSpace() &&
  1739. !(TargetTriple.isAMDGPU() && !isUnsupportedAMDGPUAddrspace(G)))
  1740. return false;
  1741. if (GlobalWasGeneratedByCompiler(G)) return false; // Our own globals.
  1742. // Two problems with thread-locals:
  1743. // - The address of the main thread's copy can't be computed at link-time.
  1744. // - Need to poison all copies, not just the main thread's one.
  1745. if (G->isThreadLocal()) return false;
  1746. // For now, just ignore this Global if the alignment is large.
  1747. if (G->getAlignment() > getMinRedzoneSizeForGlobal()) return false;
  1748. // For non-COFF targets, only instrument globals known to be defined by this
  1749. // TU.
  1750. // FIXME: We can instrument comdat globals on ELF if we are using the
  1751. // GC-friendly metadata scheme.
  1752. if (!TargetTriple.isOSBinFormatCOFF()) {
  1753. if (!G->hasExactDefinition() || G->hasComdat())
  1754. return false;
  1755. } else {
  1756. // On COFF, don't instrument non-ODR linkages.
  1757. if (G->isInterposable())
  1758. return false;
  1759. }
  1760. // If a comdat is present, it must have a selection kind that implies ODR
  1761. // semantics: no duplicates, any, or exact match.
  1762. if (Comdat *C = G->getComdat()) {
  1763. switch (C->getSelectionKind()) {
  1764. case Comdat::Any:
  1765. case Comdat::ExactMatch:
  1766. case Comdat::NoDeduplicate:
  1767. break;
  1768. case Comdat::Largest:
  1769. case Comdat::SameSize:
  1770. return false;
  1771. }
  1772. }
  1773. if (G->hasSection()) {
  1774. // The kernel uses explicit sections for mostly special global variables
  1775. // that we should not instrument. E.g. the kernel may rely on their layout
  1776. // without redzones, or remove them at link time ("discard.*"), etc.
  1777. if (CompileKernel)
  1778. return false;
  1779. StringRef Section = G->getSection();
  1780. // Globals from llvm.metadata aren't emitted, do not instrument them.
  1781. if (Section == "llvm.metadata") return false;
  1782. // Do not instrument globals from special LLVM sections.
  1783. if (Section.contains("__llvm") || Section.contains("__LLVM"))
  1784. return false;
  1785. // Do not instrument function pointers to initialization and termination
  1786. // routines: dynamic linker will not properly handle redzones.
  1787. if (Section.startswith(".preinit_array") ||
  1788. Section.startswith(".init_array") ||
  1789. Section.startswith(".fini_array")) {
  1790. return false;
  1791. }
  1792. // Do not instrument user-defined sections (with names resembling
  1793. // valid C identifiers)
  1794. if (TargetTriple.isOSBinFormatELF()) {
  1795. if (llvm::all_of(Section,
  1796. [](char c) { return llvm::isAlnum(c) || c == '_'; }))
  1797. return false;
  1798. }
  1799. // On COFF, if the section name contains '$', it is highly likely that the
  1800. // user is using section sorting to create an array of globals similar to
  1801. // the way initialization callbacks are registered in .init_array and
  1802. // .CRT$XCU. The ATL also registers things in .ATL$__[azm]. Adding redzones
  1803. // to such globals is counterproductive, because the intent is that they
  1804. // will form an array, and out-of-bounds accesses are expected.
  1805. // See https://github.com/google/sanitizers/issues/305
  1806. // and http://msdn.microsoft.com/en-US/en-en/library/bb918180(v=vs.120).aspx
  1807. if (TargetTriple.isOSBinFormatCOFF() && Section.contains('$')) {
  1808. LLVM_DEBUG(dbgs() << "Ignoring global in sorted section (contains '$'): "
  1809. << *G << "\n");
  1810. return false;
  1811. }
  1812. if (TargetTriple.isOSBinFormatMachO()) {
  1813. StringRef ParsedSegment, ParsedSection;
  1814. unsigned TAA = 0, StubSize = 0;
  1815. bool TAAParsed;
  1816. cantFail(MCSectionMachO::ParseSectionSpecifier(
  1817. Section, ParsedSegment, ParsedSection, TAA, TAAParsed, StubSize));
  1818. // Ignore the globals from the __OBJC section. The ObjC runtime assumes
  1819. // those conform to /usr/lib/objc/runtime.h, so we can't add redzones to
  1820. // them.
  1821. if (ParsedSegment == "__OBJC" ||
  1822. (ParsedSegment == "__DATA" && ParsedSection.startswith("__objc_"))) {
  1823. LLVM_DEBUG(dbgs() << "Ignoring ObjC runtime global: " << *G << "\n");
  1824. return false;
  1825. }
  1826. // See https://github.com/google/sanitizers/issues/32
  1827. // Constant CFString instances are compiled in the following way:
  1828. // -- the string buffer is emitted into
  1829. // __TEXT,__cstring,cstring_literals
  1830. // -- the constant NSConstantString structure referencing that buffer
  1831. // is placed into __DATA,__cfstring
  1832. // Therefore there's no point in placing redzones into __DATA,__cfstring.
  1833. // Moreover, it causes the linker to crash on OS X 10.7
  1834. if (ParsedSegment == "__DATA" && ParsedSection == "__cfstring") {
  1835. LLVM_DEBUG(dbgs() << "Ignoring CFString: " << *G << "\n");
  1836. return false;
  1837. }
  1838. // The linker merges the contents of cstring_literals and removes the
  1839. // trailing zeroes.
  1840. if (ParsedSegment == "__TEXT" && (TAA & MachO::S_CSTRING_LITERALS)) {
  1841. LLVM_DEBUG(dbgs() << "Ignoring a cstring literal: " << *G << "\n");
  1842. return false;
  1843. }
  1844. }
  1845. }
  1846. if (CompileKernel) {
  1847. // Globals that prefixed by "__" are special and cannot be padded with a
  1848. // redzone.
  1849. if (G->getName().startswith("__"))
  1850. return false;
  1851. }
  1852. return true;
  1853. }
  1854. // On Mach-O platforms, we emit global metadata in a separate section of the
  1855. // binary in order to allow the linker to properly dead strip. This is only
  1856. // supported on recent versions of ld64.
  1857. bool ModuleAddressSanitizer::ShouldUseMachOGlobalsSection() const {
  1858. if (!TargetTriple.isOSBinFormatMachO())
  1859. return false;
  1860. if (TargetTriple.isMacOSX() && !TargetTriple.isMacOSXVersionLT(10, 11))
  1861. return true;
  1862. if (TargetTriple.isiOS() /* or tvOS */ && !TargetTriple.isOSVersionLT(9))
  1863. return true;
  1864. if (TargetTriple.isWatchOS() && !TargetTriple.isOSVersionLT(2))
  1865. return true;
  1866. return false;
  1867. }
  1868. StringRef ModuleAddressSanitizer::getGlobalMetadataSection() const {
  1869. switch (TargetTriple.getObjectFormat()) {
  1870. case Triple::COFF: return ".ASAN$GL";
  1871. case Triple::ELF: return "asan_globals";
  1872. case Triple::MachO: return "__DATA,__asan_globals,regular";
  1873. case Triple::Wasm:
  1874. case Triple::GOFF:
  1875. case Triple::XCOFF:
  1876. report_fatal_error(
  1877. "ModuleAddressSanitizer not implemented for object file format");
  1878. case Triple::UnknownObjectFormat:
  1879. break;
  1880. }
  1881. llvm_unreachable("unsupported object format");
  1882. }
  1883. void ModuleAddressSanitizer::initializeCallbacks(Module &M) {
  1884. IRBuilder<> IRB(*C);
  1885. // Declare our poisoning and unpoisoning functions.
  1886. AsanPoisonGlobals =
  1887. M.getOrInsertFunction(kAsanPoisonGlobalsName, IRB.getVoidTy(), IntptrTy);
  1888. AsanUnpoisonGlobals =
  1889. M.getOrInsertFunction(kAsanUnpoisonGlobalsName, IRB.getVoidTy());
  1890. // Declare functions that register/unregister globals.
  1891. AsanRegisterGlobals = M.getOrInsertFunction(
  1892. kAsanRegisterGlobalsName, IRB.getVoidTy(), IntptrTy, IntptrTy);
  1893. AsanUnregisterGlobals = M.getOrInsertFunction(
  1894. kAsanUnregisterGlobalsName, IRB.getVoidTy(), IntptrTy, IntptrTy);
  1895. // Declare the functions that find globals in a shared object and then invoke
  1896. // the (un)register function on them.
  1897. AsanRegisterImageGlobals = M.getOrInsertFunction(
  1898. kAsanRegisterImageGlobalsName, IRB.getVoidTy(), IntptrTy);
  1899. AsanUnregisterImageGlobals = M.getOrInsertFunction(
  1900. kAsanUnregisterImageGlobalsName, IRB.getVoidTy(), IntptrTy);
  1901. AsanRegisterElfGlobals =
  1902. M.getOrInsertFunction(kAsanRegisterElfGlobalsName, IRB.getVoidTy(),
  1903. IntptrTy, IntptrTy, IntptrTy);
  1904. AsanUnregisterElfGlobals =
  1905. M.getOrInsertFunction(kAsanUnregisterElfGlobalsName, IRB.getVoidTy(),
  1906. IntptrTy, IntptrTy, IntptrTy);
  1907. }
  1908. // Put the metadata and the instrumented global in the same group. This ensures
  1909. // that the metadata is discarded if the instrumented global is discarded.
  1910. void ModuleAddressSanitizer::SetComdatForGlobalMetadata(
  1911. GlobalVariable *G, GlobalVariable *Metadata, StringRef InternalSuffix) {
  1912. Module &M = *G->getParent();
  1913. Comdat *C = G->getComdat();
  1914. if (!C) {
  1915. if (!G->hasName()) {
  1916. // If G is unnamed, it must be internal. Give it an artificial name
  1917. // so we can put it in a comdat.
  1918. assert(G->hasLocalLinkage());
  1919. G->setName(Twine(kAsanGenPrefix) + "_anon_global");
  1920. }
  1921. if (!InternalSuffix.empty() && G->hasLocalLinkage()) {
  1922. std::string Name = std::string(G->getName());
  1923. Name += InternalSuffix;
  1924. C = M.getOrInsertComdat(Name);
  1925. } else {
  1926. C = M.getOrInsertComdat(G->getName());
  1927. }
  1928. // Make this IMAGE_COMDAT_SELECT_NODUPLICATES on COFF. Also upgrade private
  1929. // linkage to internal linkage so that a symbol table entry is emitted. This
  1930. // is necessary in order to create the comdat group.
  1931. if (TargetTriple.isOSBinFormatCOFF()) {
  1932. C->setSelectionKind(Comdat::NoDeduplicate);
  1933. if (G->hasPrivateLinkage())
  1934. G->setLinkage(GlobalValue::InternalLinkage);
  1935. }
  1936. G->setComdat(C);
  1937. }
  1938. assert(G->hasComdat());
  1939. Metadata->setComdat(G->getComdat());
  1940. }
  1941. // Create a separate metadata global and put it in the appropriate ASan
  1942. // global registration section.
  1943. GlobalVariable *
  1944. ModuleAddressSanitizer::CreateMetadataGlobal(Module &M, Constant *Initializer,
  1945. StringRef OriginalName) {
  1946. auto Linkage = TargetTriple.isOSBinFormatMachO()
  1947. ? GlobalVariable::InternalLinkage
  1948. : GlobalVariable::PrivateLinkage;
  1949. GlobalVariable *Metadata = new GlobalVariable(
  1950. M, Initializer->getType(), false, Linkage, Initializer,
  1951. Twine("__asan_global_") + GlobalValue::dropLLVMManglingEscape(OriginalName));
  1952. Metadata->setSection(getGlobalMetadataSection());
  1953. return Metadata;
  1954. }
  1955. Instruction *ModuleAddressSanitizer::CreateAsanModuleDtor(Module &M) {
  1956. AsanDtorFunction = Function::createWithDefaultAttr(
  1957. FunctionType::get(Type::getVoidTy(*C), false),
  1958. GlobalValue::InternalLinkage, 0, kAsanModuleDtorName, &M);
  1959. AsanDtorFunction->addFnAttr(Attribute::NoUnwind);
  1960. // Ensure Dtor cannot be discarded, even if in a comdat.
  1961. appendToUsed(M, {AsanDtorFunction});
  1962. BasicBlock *AsanDtorBB = BasicBlock::Create(*C, "", AsanDtorFunction);
  1963. return ReturnInst::Create(*C, AsanDtorBB);
  1964. }
  1965. void ModuleAddressSanitizer::InstrumentGlobalsCOFF(
  1966. IRBuilder<> &IRB, Module &M, ArrayRef<GlobalVariable *> ExtendedGlobals,
  1967. ArrayRef<Constant *> MetadataInitializers) {
  1968. assert(ExtendedGlobals.size() == MetadataInitializers.size());
  1969. auto &DL = M.getDataLayout();
  1970. SmallVector<GlobalValue *, 16> MetadataGlobals(ExtendedGlobals.size());
  1971. for (size_t i = 0; i < ExtendedGlobals.size(); i++) {
  1972. Constant *Initializer = MetadataInitializers[i];
  1973. GlobalVariable *G = ExtendedGlobals[i];
  1974. GlobalVariable *Metadata =
  1975. CreateMetadataGlobal(M, Initializer, G->getName());
  1976. MDNode *MD = MDNode::get(M.getContext(), ValueAsMetadata::get(G));
  1977. Metadata->setMetadata(LLVMContext::MD_associated, MD);
  1978. MetadataGlobals[i] = Metadata;
  1979. // The MSVC linker always inserts padding when linking incrementally. We
  1980. // cope with that by aligning each struct to its size, which must be a power
  1981. // of two.
  1982. unsigned SizeOfGlobalStruct = DL.getTypeAllocSize(Initializer->getType());
  1983. assert(isPowerOf2_32(SizeOfGlobalStruct) &&
  1984. "global metadata will not be padded appropriately");
  1985. Metadata->setAlignment(assumeAligned(SizeOfGlobalStruct));
  1986. SetComdatForGlobalMetadata(G, Metadata, "");
  1987. }
  1988. // Update llvm.compiler.used, adding the new metadata globals. This is
  1989. // needed so that during LTO these variables stay alive.
  1990. if (!MetadataGlobals.empty())
  1991. appendToCompilerUsed(M, MetadataGlobals);
  1992. }
  1993. void ModuleAddressSanitizer::InstrumentGlobalsELF(
  1994. IRBuilder<> &IRB, Module &M, ArrayRef<GlobalVariable *> ExtendedGlobals,
  1995. ArrayRef<Constant *> MetadataInitializers,
  1996. const std::string &UniqueModuleId) {
  1997. assert(ExtendedGlobals.size() == MetadataInitializers.size());
  1998. // Putting globals in a comdat changes the semantic and potentially cause
  1999. // false negative odr violations at link time. If odr indicators are used, we
  2000. // keep the comdat sections, as link time odr violations will be dectected on
  2001. // the odr indicator symbols.
  2002. bool UseComdatForGlobalsGC = UseOdrIndicator;
  2003. SmallVector<GlobalValue *, 16> MetadataGlobals(ExtendedGlobals.size());
  2004. for (size_t i = 0; i < ExtendedGlobals.size(); i++) {
  2005. GlobalVariable *G = ExtendedGlobals[i];
  2006. GlobalVariable *Metadata =
  2007. CreateMetadataGlobal(M, MetadataInitializers[i], G->getName());
  2008. MDNode *MD = MDNode::get(M.getContext(), ValueAsMetadata::get(G));
  2009. Metadata->setMetadata(LLVMContext::MD_associated, MD);
  2010. MetadataGlobals[i] = Metadata;
  2011. if (UseComdatForGlobalsGC)
  2012. SetComdatForGlobalMetadata(G, Metadata, UniqueModuleId);
  2013. }
  2014. // Update llvm.compiler.used, adding the new metadata globals. This is
  2015. // needed so that during LTO these variables stay alive.
  2016. if (!MetadataGlobals.empty())
  2017. appendToCompilerUsed(M, MetadataGlobals);
  2018. // RegisteredFlag serves two purposes. First, we can pass it to dladdr()
  2019. // to look up the loaded image that contains it. Second, we can store in it
  2020. // whether registration has already occurred, to prevent duplicate
  2021. // registration.
  2022. //
  2023. // Common linkage ensures that there is only one global per shared library.
  2024. GlobalVariable *RegisteredFlag = new GlobalVariable(
  2025. M, IntptrTy, false, GlobalVariable::CommonLinkage,
  2026. ConstantInt::get(IntptrTy, 0), kAsanGlobalsRegisteredFlagName);
  2027. RegisteredFlag->setVisibility(GlobalVariable::HiddenVisibility);
  2028. // Create start and stop symbols.
  2029. GlobalVariable *StartELFMetadata = new GlobalVariable(
  2030. M, IntptrTy, false, GlobalVariable::ExternalWeakLinkage, nullptr,
  2031. "__start_" + getGlobalMetadataSection());
  2032. StartELFMetadata->setVisibility(GlobalVariable::HiddenVisibility);
  2033. GlobalVariable *StopELFMetadata = new GlobalVariable(
  2034. M, IntptrTy, false, GlobalVariable::ExternalWeakLinkage, nullptr,
  2035. "__stop_" + getGlobalMetadataSection());
  2036. StopELFMetadata->setVisibility(GlobalVariable::HiddenVisibility);
  2037. // Create a call to register the globals with the runtime.
  2038. IRB.CreateCall(AsanRegisterElfGlobals,
  2039. {IRB.CreatePointerCast(RegisteredFlag, IntptrTy),
  2040. IRB.CreatePointerCast(StartELFMetadata, IntptrTy),
  2041. IRB.CreatePointerCast(StopELFMetadata, IntptrTy)});
  2042. // We also need to unregister globals at the end, e.g., when a shared library
  2043. // gets closed.
  2044. if (DestructorKind != AsanDtorKind::None) {
  2045. IRBuilder<> IrbDtor(CreateAsanModuleDtor(M));
  2046. IrbDtor.CreateCall(AsanUnregisterElfGlobals,
  2047. {IRB.CreatePointerCast(RegisteredFlag, IntptrTy),
  2048. IRB.CreatePointerCast(StartELFMetadata, IntptrTy),
  2049. IRB.CreatePointerCast(StopELFMetadata, IntptrTy)});
  2050. }
  2051. }
  2052. void ModuleAddressSanitizer::InstrumentGlobalsMachO(
  2053. IRBuilder<> &IRB, Module &M, ArrayRef<GlobalVariable *> ExtendedGlobals,
  2054. ArrayRef<Constant *> MetadataInitializers) {
  2055. assert(ExtendedGlobals.size() == MetadataInitializers.size());
  2056. // On recent Mach-O platforms, use a structure which binds the liveness of
  2057. // the global variable to the metadata struct. Keep the list of "Liveness" GV
  2058. // created to be added to llvm.compiler.used
  2059. StructType *LivenessTy = StructType::get(IntptrTy, IntptrTy);
  2060. SmallVector<GlobalValue *, 16> LivenessGlobals(ExtendedGlobals.size());
  2061. for (size_t i = 0; i < ExtendedGlobals.size(); i++) {
  2062. Constant *Initializer = MetadataInitializers[i];
  2063. GlobalVariable *G = ExtendedGlobals[i];
  2064. GlobalVariable *Metadata =
  2065. CreateMetadataGlobal(M, Initializer, G->getName());
  2066. // On recent Mach-O platforms, we emit the global metadata in a way that
  2067. // allows the linker to properly strip dead globals.
  2068. auto LivenessBinder =
  2069. ConstantStruct::get(LivenessTy, Initializer->getAggregateElement(0u),
  2070. ConstantExpr::getPointerCast(Metadata, IntptrTy));
  2071. GlobalVariable *Liveness = new GlobalVariable(
  2072. M, LivenessTy, false, GlobalVariable::InternalLinkage, LivenessBinder,
  2073. Twine("__asan_binder_") + G->getName());
  2074. Liveness->setSection("__DATA,__asan_liveness,regular,live_support");
  2075. LivenessGlobals[i] = Liveness;
  2076. }
  2077. // Update llvm.compiler.used, adding the new liveness globals. This is
  2078. // needed so that during LTO these variables stay alive. The alternative
  2079. // would be to have the linker handling the LTO symbols, but libLTO
  2080. // current API does not expose access to the section for each symbol.
  2081. if (!LivenessGlobals.empty())
  2082. appendToCompilerUsed(M, LivenessGlobals);
  2083. // RegisteredFlag serves two purposes. First, we can pass it to dladdr()
  2084. // to look up the loaded image that contains it. Second, we can store in it
  2085. // whether registration has already occurred, to prevent duplicate
  2086. // registration.
  2087. //
  2088. // common linkage ensures that there is only one global per shared library.
  2089. GlobalVariable *RegisteredFlag = new GlobalVariable(
  2090. M, IntptrTy, false, GlobalVariable::CommonLinkage,
  2091. ConstantInt::get(IntptrTy, 0), kAsanGlobalsRegisteredFlagName);
  2092. RegisteredFlag->setVisibility(GlobalVariable::HiddenVisibility);
  2093. IRB.CreateCall(AsanRegisterImageGlobals,
  2094. {IRB.CreatePointerCast(RegisteredFlag, IntptrTy)});
  2095. // We also need to unregister globals at the end, e.g., when a shared library
  2096. // gets closed.
  2097. if (DestructorKind != AsanDtorKind::None) {
  2098. IRBuilder<> IrbDtor(CreateAsanModuleDtor(M));
  2099. IrbDtor.CreateCall(AsanUnregisterImageGlobals,
  2100. {IRB.CreatePointerCast(RegisteredFlag, IntptrTy)});
  2101. }
  2102. }
  2103. void ModuleAddressSanitizer::InstrumentGlobalsWithMetadataArray(
  2104. IRBuilder<> &IRB, Module &M, ArrayRef<GlobalVariable *> ExtendedGlobals,
  2105. ArrayRef<Constant *> MetadataInitializers) {
  2106. assert(ExtendedGlobals.size() == MetadataInitializers.size());
  2107. unsigned N = ExtendedGlobals.size();
  2108. assert(N > 0);
  2109. // On platforms that don't have a custom metadata section, we emit an array
  2110. // of global metadata structures.
  2111. ArrayType *ArrayOfGlobalStructTy =
  2112. ArrayType::get(MetadataInitializers[0]->getType(), N);
  2113. auto AllGlobals = new GlobalVariable(
  2114. M, ArrayOfGlobalStructTy, false, GlobalVariable::InternalLinkage,
  2115. ConstantArray::get(ArrayOfGlobalStructTy, MetadataInitializers), "");
  2116. if (Mapping.Scale > 3)
  2117. AllGlobals->setAlignment(Align(1ULL << Mapping.Scale));
  2118. IRB.CreateCall(AsanRegisterGlobals,
  2119. {IRB.CreatePointerCast(AllGlobals, IntptrTy),
  2120. ConstantInt::get(IntptrTy, N)});
  2121. // We also need to unregister globals at the end, e.g., when a shared library
  2122. // gets closed.
  2123. if (DestructorKind != AsanDtorKind::None) {
  2124. IRBuilder<> IrbDtor(CreateAsanModuleDtor(M));
  2125. IrbDtor.CreateCall(AsanUnregisterGlobals,
  2126. {IRB.CreatePointerCast(AllGlobals, IntptrTy),
  2127. ConstantInt::get(IntptrTy, N)});
  2128. }
  2129. }
  2130. // This function replaces all global variables with new variables that have
  2131. // trailing redzones. It also creates a function that poisons
  2132. // redzones and inserts this function into llvm.global_ctors.
  2133. // Sets *CtorComdat to true if the global registration code emitted into the
  2134. // asan constructor is comdat-compatible.
  2135. bool ModuleAddressSanitizer::InstrumentGlobals(IRBuilder<> &IRB, Module &M,
  2136. bool *CtorComdat) {
  2137. *CtorComdat = false;
  2138. // Build set of globals that are aliased by some GA, where
  2139. // getExcludedAliasedGlobal(GA) returns the relevant GlobalVariable.
  2140. SmallPtrSet<const GlobalVariable *, 16> AliasedGlobalExclusions;
  2141. if (CompileKernel) {
  2142. for (auto &GA : M.aliases()) {
  2143. if (const GlobalVariable *GV = getExcludedAliasedGlobal(GA))
  2144. AliasedGlobalExclusions.insert(GV);
  2145. }
  2146. }
  2147. SmallVector<GlobalVariable *, 16> GlobalsToChange;
  2148. for (auto &G : M.globals()) {
  2149. if (!AliasedGlobalExclusions.count(&G) && shouldInstrumentGlobal(&G))
  2150. GlobalsToChange.push_back(&G);
  2151. }
  2152. size_t n = GlobalsToChange.size();
  2153. if (n == 0) {
  2154. *CtorComdat = true;
  2155. return false;
  2156. }
  2157. auto &DL = M.getDataLayout();
  2158. // A global is described by a structure
  2159. // size_t beg;
  2160. // size_t size;
  2161. // size_t size_with_redzone;
  2162. // const char *name;
  2163. // const char *module_name;
  2164. // size_t has_dynamic_init;
  2165. // void *source_location;
  2166. // size_t odr_indicator;
  2167. // We initialize an array of such structures and pass it to a run-time call.
  2168. StructType *GlobalStructTy =
  2169. StructType::get(IntptrTy, IntptrTy, IntptrTy, IntptrTy, IntptrTy,
  2170. IntptrTy, IntptrTy, IntptrTy);
  2171. SmallVector<GlobalVariable *, 16> NewGlobals(n);
  2172. SmallVector<Constant *, 16> Initializers(n);
  2173. bool HasDynamicallyInitializedGlobals = false;
  2174. // We shouldn't merge same module names, as this string serves as unique
  2175. // module ID in runtime.
  2176. GlobalVariable *ModuleName = createPrivateGlobalForString(
  2177. M, M.getModuleIdentifier(), /*AllowMerging*/ false, kAsanGenPrefix);
  2178. for (size_t i = 0; i < n; i++) {
  2179. GlobalVariable *G = GlobalsToChange[i];
  2180. // FIXME: Metadata should be attched directly to the global directly instead
  2181. // of being added to llvm.asan.globals.
  2182. auto MD = GlobalsMD.get(G);
  2183. StringRef NameForGlobal = G->getName();
  2184. // Create string holding the global name (use global name from metadata
  2185. // if it's available, otherwise just write the name of global variable).
  2186. GlobalVariable *Name = createPrivateGlobalForString(
  2187. M, MD.Name.empty() ? NameForGlobal : MD.Name,
  2188. /*AllowMerging*/ true, kAsanGenPrefix);
  2189. Type *Ty = G->getValueType();
  2190. const uint64_t SizeInBytes = DL.getTypeAllocSize(Ty);
  2191. const uint64_t RightRedzoneSize = getRedzoneSizeForGlobal(SizeInBytes);
  2192. Type *RightRedZoneTy = ArrayType::get(IRB.getInt8Ty(), RightRedzoneSize);
  2193. StructType *NewTy = StructType::get(Ty, RightRedZoneTy);
  2194. Constant *NewInitializer = ConstantStruct::get(
  2195. NewTy, G->getInitializer(), Constant::getNullValue(RightRedZoneTy));
  2196. // Create a new global variable with enough space for a redzone.
  2197. GlobalValue::LinkageTypes Linkage = G->getLinkage();
  2198. if (G->isConstant() && Linkage == GlobalValue::PrivateLinkage)
  2199. Linkage = GlobalValue::InternalLinkage;
  2200. GlobalVariable *NewGlobal = new GlobalVariable(
  2201. M, NewTy, G->isConstant(), Linkage, NewInitializer, "", G,
  2202. G->getThreadLocalMode(), G->getAddressSpace());
  2203. NewGlobal->copyAttributesFrom(G);
  2204. NewGlobal->setComdat(G->getComdat());
  2205. NewGlobal->setAlignment(MaybeAlign(getMinRedzoneSizeForGlobal()));
  2206. // Don't fold globals with redzones. ODR violation detector and redzone
  2207. // poisoning implicitly creates a dependence on the global's address, so it
  2208. // is no longer valid for it to be marked unnamed_addr.
  2209. NewGlobal->setUnnamedAddr(GlobalValue::UnnamedAddr::None);
  2210. // Move null-terminated C strings to "__asan_cstring" section on Darwin.
  2211. if (TargetTriple.isOSBinFormatMachO() && !G->hasSection() &&
  2212. G->isConstant()) {
  2213. auto Seq = dyn_cast<ConstantDataSequential>(G->getInitializer());
  2214. if (Seq && Seq->isCString())
  2215. NewGlobal->setSection("__TEXT,__asan_cstring,regular");
  2216. }
  2217. // Transfer the debug info and type metadata. The payload starts at offset
  2218. // zero so we can copy the metadata over as is.
  2219. NewGlobal->copyMetadata(G, 0);
  2220. Value *Indices2[2];
  2221. Indices2[0] = IRB.getInt32(0);
  2222. Indices2[1] = IRB.getInt32(0);
  2223. G->replaceAllUsesWith(
  2224. ConstantExpr::getGetElementPtr(NewTy, NewGlobal, Indices2, true));
  2225. NewGlobal->takeName(G);
  2226. G->eraseFromParent();
  2227. NewGlobals[i] = NewGlobal;
  2228. Constant *SourceLoc;
  2229. if (!MD.SourceLoc.empty()) {
  2230. auto SourceLocGlobal = createPrivateGlobalForSourceLoc(M, MD.SourceLoc);
  2231. SourceLoc = ConstantExpr::getPointerCast(SourceLocGlobal, IntptrTy);
  2232. } else {
  2233. SourceLoc = ConstantInt::get(IntptrTy, 0);
  2234. }
  2235. Constant *ODRIndicator = ConstantExpr::getNullValue(IRB.getInt8PtrTy());
  2236. GlobalValue *InstrumentedGlobal = NewGlobal;
  2237. bool CanUsePrivateAliases =
  2238. TargetTriple.isOSBinFormatELF() || TargetTriple.isOSBinFormatMachO() ||
  2239. TargetTriple.isOSBinFormatWasm();
  2240. if (CanUsePrivateAliases && UsePrivateAlias) {
  2241. // Create local alias for NewGlobal to avoid crash on ODR between
  2242. // instrumented and non-instrumented libraries.
  2243. InstrumentedGlobal =
  2244. GlobalAlias::create(GlobalValue::PrivateLinkage, "", NewGlobal);
  2245. }
  2246. // ODR should not happen for local linkage.
  2247. if (NewGlobal->hasLocalLinkage()) {
  2248. ODRIndicator = ConstantExpr::getIntToPtr(ConstantInt::get(IntptrTy, -1),
  2249. IRB.getInt8PtrTy());
  2250. } else if (UseOdrIndicator) {
  2251. // With local aliases, we need to provide another externally visible
  2252. // symbol __odr_asan_XXX to detect ODR violation.
  2253. auto *ODRIndicatorSym =
  2254. new GlobalVariable(M, IRB.getInt8Ty(), false, Linkage,
  2255. Constant::getNullValue(IRB.getInt8Ty()),
  2256. kODRGenPrefix + NameForGlobal, nullptr,
  2257. NewGlobal->getThreadLocalMode());
  2258. // Set meaningful attributes for indicator symbol.
  2259. ODRIndicatorSym->setVisibility(NewGlobal->getVisibility());
  2260. ODRIndicatorSym->setDLLStorageClass(NewGlobal->getDLLStorageClass());
  2261. ODRIndicatorSym->setAlignment(Align(1));
  2262. ODRIndicator = ODRIndicatorSym;
  2263. }
  2264. Constant *Initializer = ConstantStruct::get(
  2265. GlobalStructTy,
  2266. ConstantExpr::getPointerCast(InstrumentedGlobal, IntptrTy),
  2267. ConstantInt::get(IntptrTy, SizeInBytes),
  2268. ConstantInt::get(IntptrTy, SizeInBytes + RightRedzoneSize),
  2269. ConstantExpr::getPointerCast(Name, IntptrTy),
  2270. ConstantExpr::getPointerCast(ModuleName, IntptrTy),
  2271. ConstantInt::get(IntptrTy, MD.IsDynInit), SourceLoc,
  2272. ConstantExpr::getPointerCast(ODRIndicator, IntptrTy));
  2273. if (ClInitializers && MD.IsDynInit) HasDynamicallyInitializedGlobals = true;
  2274. LLVM_DEBUG(dbgs() << "NEW GLOBAL: " << *NewGlobal << "\n");
  2275. Initializers[i] = Initializer;
  2276. }
  2277. // Add instrumented globals to llvm.compiler.used list to avoid LTO from
  2278. // ConstantMerge'ing them.
  2279. SmallVector<GlobalValue *, 16> GlobalsToAddToUsedList;
  2280. for (size_t i = 0; i < n; i++) {
  2281. GlobalVariable *G = NewGlobals[i];
  2282. if (G->getName().empty()) continue;
  2283. GlobalsToAddToUsedList.push_back(G);
  2284. }
  2285. appendToCompilerUsed(M, ArrayRef<GlobalValue *>(GlobalsToAddToUsedList));
  2286. std::string ELFUniqueModuleId =
  2287. (UseGlobalsGC && TargetTriple.isOSBinFormatELF()) ? getUniqueModuleId(&M)
  2288. : "";
  2289. if (!ELFUniqueModuleId.empty()) {
  2290. InstrumentGlobalsELF(IRB, M, NewGlobals, Initializers, ELFUniqueModuleId);
  2291. *CtorComdat = true;
  2292. } else if (UseGlobalsGC && TargetTriple.isOSBinFormatCOFF()) {
  2293. InstrumentGlobalsCOFF(IRB, M, NewGlobals, Initializers);
  2294. } else if (UseGlobalsGC && ShouldUseMachOGlobalsSection()) {
  2295. InstrumentGlobalsMachO(IRB, M, NewGlobals, Initializers);
  2296. } else {
  2297. InstrumentGlobalsWithMetadataArray(IRB, M, NewGlobals, Initializers);
  2298. }
  2299. // Create calls for poisoning before initializers run and unpoisoning after.
  2300. if (HasDynamicallyInitializedGlobals)
  2301. createInitializerPoisonCalls(M, ModuleName);
  2302. LLVM_DEBUG(dbgs() << M);
  2303. return true;
  2304. }
  2305. uint64_t
  2306. ModuleAddressSanitizer::getRedzoneSizeForGlobal(uint64_t SizeInBytes) const {
  2307. constexpr uint64_t kMaxRZ = 1 << 18;
  2308. const uint64_t MinRZ = getMinRedzoneSizeForGlobal();
  2309. uint64_t RZ = 0;
  2310. if (SizeInBytes <= MinRZ / 2) {
  2311. // Reduce redzone size for small size objects, e.g. int, char[1]. MinRZ is
  2312. // at least 32 bytes, optimize when SizeInBytes is less than or equal to
  2313. // half of MinRZ.
  2314. RZ = MinRZ - SizeInBytes;
  2315. } else {
  2316. // Calculate RZ, where MinRZ <= RZ <= MaxRZ, and RZ ~ 1/4 * SizeInBytes.
  2317. RZ = std::max(MinRZ, std::min(kMaxRZ, (SizeInBytes / MinRZ / 4) * MinRZ));
  2318. // Round up to multiple of MinRZ.
  2319. if (SizeInBytes % MinRZ)
  2320. RZ += MinRZ - (SizeInBytes % MinRZ);
  2321. }
  2322. assert((RZ + SizeInBytes) % MinRZ == 0);
  2323. return RZ;
  2324. }
  2325. int ModuleAddressSanitizer::GetAsanVersion(const Module &M) const {
  2326. int LongSize = M.getDataLayout().getPointerSizeInBits();
  2327. bool isAndroid = Triple(M.getTargetTriple()).isAndroid();
  2328. int Version = 8;
  2329. // 32-bit Android is one version ahead because of the switch to dynamic
  2330. // shadow.
  2331. Version += (LongSize == 32 && isAndroid);
  2332. return Version;
  2333. }
  2334. bool ModuleAddressSanitizer::instrumentModule(Module &M) {
  2335. initializeCallbacks(M);
  2336. // Create a module constructor. A destructor is created lazily because not all
  2337. // platforms, and not all modules need it.
  2338. if (CompileKernel) {
  2339. // The kernel always builds with its own runtime, and therefore does not
  2340. // need the init and version check calls.
  2341. AsanCtorFunction = createSanitizerCtor(M, kAsanModuleCtorName);
  2342. } else {
  2343. std::string AsanVersion = std::to_string(GetAsanVersion(M));
  2344. std::string VersionCheckName =
  2345. ClInsertVersionCheck ? (kAsanVersionCheckNamePrefix + AsanVersion) : "";
  2346. std::tie(AsanCtorFunction, std::ignore) =
  2347. createSanitizerCtorAndInitFunctions(M, kAsanModuleCtorName,
  2348. kAsanInitName, /*InitArgTypes=*/{},
  2349. /*InitArgs=*/{}, VersionCheckName);
  2350. }
  2351. bool CtorComdat = true;
  2352. if (ClGlobals) {
  2353. IRBuilder<> IRB(AsanCtorFunction->getEntryBlock().getTerminator());
  2354. InstrumentGlobals(IRB, M, &CtorComdat);
  2355. }
  2356. const uint64_t Priority = GetCtorAndDtorPriority(TargetTriple);
  2357. // Put the constructor and destructor in comdat if both
  2358. // (1) global instrumentation is not TU-specific
  2359. // (2) target is ELF.
  2360. if (UseCtorComdat && TargetTriple.isOSBinFormatELF() && CtorComdat) {
  2361. AsanCtorFunction->setComdat(M.getOrInsertComdat(kAsanModuleCtorName));
  2362. appendToGlobalCtors(M, AsanCtorFunction, Priority, AsanCtorFunction);
  2363. if (AsanDtorFunction) {
  2364. AsanDtorFunction->setComdat(M.getOrInsertComdat(kAsanModuleDtorName));
  2365. appendToGlobalDtors(M, AsanDtorFunction, Priority, AsanDtorFunction);
  2366. }
  2367. } else {
  2368. appendToGlobalCtors(M, AsanCtorFunction, Priority);
  2369. if (AsanDtorFunction)
  2370. appendToGlobalDtors(M, AsanDtorFunction, Priority);
  2371. }
  2372. return true;
  2373. }
  2374. void AddressSanitizer::initializeCallbacks(Module &M) {
  2375. IRBuilder<> IRB(*C);
  2376. // Create __asan_report* callbacks.
  2377. // IsWrite, TypeSize and Exp are encoded in the function name.
  2378. for (int Exp = 0; Exp < 2; Exp++) {
  2379. for (size_t AccessIsWrite = 0; AccessIsWrite <= 1; AccessIsWrite++) {
  2380. const std::string TypeStr = AccessIsWrite ? "store" : "load";
  2381. const std::string ExpStr = Exp ? "exp_" : "";
  2382. const std::string EndingStr = Recover ? "_noabort" : "";
  2383. SmallVector<Type *, 3> Args2 = {IntptrTy, IntptrTy};
  2384. SmallVector<Type *, 2> Args1{1, IntptrTy};
  2385. if (Exp) {
  2386. Type *ExpType = Type::getInt32Ty(*C);
  2387. Args2.push_back(ExpType);
  2388. Args1.push_back(ExpType);
  2389. }
  2390. AsanErrorCallbackSized[AccessIsWrite][Exp] = M.getOrInsertFunction(
  2391. kAsanReportErrorTemplate + ExpStr + TypeStr + "_n" + EndingStr,
  2392. FunctionType::get(IRB.getVoidTy(), Args2, false));
  2393. AsanMemoryAccessCallbackSized[AccessIsWrite][Exp] = M.getOrInsertFunction(
  2394. ClMemoryAccessCallbackPrefix + ExpStr + TypeStr + "N" + EndingStr,
  2395. FunctionType::get(IRB.getVoidTy(), Args2, false));
  2396. for (size_t AccessSizeIndex = 0; AccessSizeIndex < kNumberOfAccessSizes;
  2397. AccessSizeIndex++) {
  2398. const std::string Suffix = TypeStr + itostr(1ULL << AccessSizeIndex);
  2399. AsanErrorCallback[AccessIsWrite][Exp][AccessSizeIndex] =
  2400. M.getOrInsertFunction(
  2401. kAsanReportErrorTemplate + ExpStr + Suffix + EndingStr,
  2402. FunctionType::get(IRB.getVoidTy(), Args1, false));
  2403. AsanMemoryAccessCallback[AccessIsWrite][Exp][AccessSizeIndex] =
  2404. M.getOrInsertFunction(
  2405. ClMemoryAccessCallbackPrefix + ExpStr + Suffix + EndingStr,
  2406. FunctionType::get(IRB.getVoidTy(), Args1, false));
  2407. }
  2408. }
  2409. }
  2410. const std::string MemIntrinCallbackPrefix =
  2411. CompileKernel ? std::string("") : ClMemoryAccessCallbackPrefix;
  2412. AsanMemmove = M.getOrInsertFunction(MemIntrinCallbackPrefix + "memmove",
  2413. IRB.getInt8PtrTy(), IRB.getInt8PtrTy(),
  2414. IRB.getInt8PtrTy(), IntptrTy);
  2415. AsanMemcpy = M.getOrInsertFunction(MemIntrinCallbackPrefix + "memcpy",
  2416. IRB.getInt8PtrTy(), IRB.getInt8PtrTy(),
  2417. IRB.getInt8PtrTy(), IntptrTy);
  2418. AsanMemset = M.getOrInsertFunction(MemIntrinCallbackPrefix + "memset",
  2419. IRB.getInt8PtrTy(), IRB.getInt8PtrTy(),
  2420. IRB.getInt32Ty(), IntptrTy);
  2421. AsanHandleNoReturnFunc =
  2422. M.getOrInsertFunction(kAsanHandleNoReturnName, IRB.getVoidTy());
  2423. AsanPtrCmpFunction =
  2424. M.getOrInsertFunction(kAsanPtrCmp, IRB.getVoidTy(), IntptrTy, IntptrTy);
  2425. AsanPtrSubFunction =
  2426. M.getOrInsertFunction(kAsanPtrSub, IRB.getVoidTy(), IntptrTy, IntptrTy);
  2427. if (Mapping.InGlobal)
  2428. AsanShadowGlobal = M.getOrInsertGlobal("__asan_shadow",
  2429. ArrayType::get(IRB.getInt8Ty(), 0));
  2430. AMDGPUAddressShared = M.getOrInsertFunction(
  2431. kAMDGPUAddressSharedName, IRB.getInt1Ty(), IRB.getInt8PtrTy());
  2432. AMDGPUAddressPrivate = M.getOrInsertFunction(
  2433. kAMDGPUAddressPrivateName, IRB.getInt1Ty(), IRB.getInt8PtrTy());
  2434. }
  2435. bool AddressSanitizer::maybeInsertAsanInitAtFunctionEntry(Function &F) {
  2436. // For each NSObject descendant having a +load method, this method is invoked
  2437. // by the ObjC runtime before any of the static constructors is called.
  2438. // Therefore we need to instrument such methods with a call to __asan_init
  2439. // at the beginning in order to initialize our runtime before any access to
  2440. // the shadow memory.
  2441. // We cannot just ignore these methods, because they may call other
  2442. // instrumented functions.
  2443. if (F.getName().find(" load]") != std::string::npos) {
  2444. FunctionCallee AsanInitFunction =
  2445. declareSanitizerInitFunction(*F.getParent(), kAsanInitName, {});
  2446. IRBuilder<> IRB(&F.front(), F.front().begin());
  2447. IRB.CreateCall(AsanInitFunction, {});
  2448. return true;
  2449. }
  2450. return false;
  2451. }
  2452. bool AddressSanitizer::maybeInsertDynamicShadowAtFunctionEntry(Function &F) {
  2453. // Generate code only when dynamic addressing is needed.
  2454. if (Mapping.Offset != kDynamicShadowSentinel)
  2455. return false;
  2456. IRBuilder<> IRB(&F.front().front());
  2457. if (Mapping.InGlobal) {
  2458. if (ClWithIfuncSuppressRemat) {
  2459. // An empty inline asm with input reg == output reg.
  2460. // An opaque pointer-to-int cast, basically.
  2461. InlineAsm *Asm = InlineAsm::get(
  2462. FunctionType::get(IntptrTy, {AsanShadowGlobal->getType()}, false),
  2463. StringRef(""), StringRef("=r,0"),
  2464. /*hasSideEffects=*/false);
  2465. LocalDynamicShadow =
  2466. IRB.CreateCall(Asm, {AsanShadowGlobal}, ".asan.shadow");
  2467. } else {
  2468. LocalDynamicShadow =
  2469. IRB.CreatePointerCast(AsanShadowGlobal, IntptrTy, ".asan.shadow");
  2470. }
  2471. } else {
  2472. Value *GlobalDynamicAddress = F.getParent()->getOrInsertGlobal(
  2473. kAsanShadowMemoryDynamicAddress, IntptrTy);
  2474. LocalDynamicShadow = IRB.CreateLoad(IntptrTy, GlobalDynamicAddress);
  2475. }
  2476. return true;
  2477. }
  2478. void AddressSanitizer::markEscapedLocalAllocas(Function &F) {
  2479. // Find the one possible call to llvm.localescape and pre-mark allocas passed
  2480. // to it as uninteresting. This assumes we haven't started processing allocas
  2481. // yet. This check is done up front because iterating the use list in
  2482. // isInterestingAlloca would be algorithmically slower.
  2483. assert(ProcessedAllocas.empty() && "must process localescape before allocas");
  2484. // Try to get the declaration of llvm.localescape. If it's not in the module,
  2485. // we can exit early.
  2486. if (!F.getParent()->getFunction("llvm.localescape")) return;
  2487. // Look for a call to llvm.localescape call in the entry block. It can't be in
  2488. // any other block.
  2489. for (Instruction &I : F.getEntryBlock()) {
  2490. IntrinsicInst *II = dyn_cast<IntrinsicInst>(&I);
  2491. if (II && II->getIntrinsicID() == Intrinsic::localescape) {
  2492. // We found a call. Mark all the allocas passed in as uninteresting.
  2493. for (Value *Arg : II->args()) {
  2494. AllocaInst *AI = dyn_cast<AllocaInst>(Arg->stripPointerCasts());
  2495. assert(AI && AI->isStaticAlloca() &&
  2496. "non-static alloca arg to localescape");
  2497. ProcessedAllocas[AI] = false;
  2498. }
  2499. break;
  2500. }
  2501. }
  2502. }
  2503. bool AddressSanitizer::suppressInstrumentationSiteForDebug(int &Instrumented) {
  2504. bool ShouldInstrument =
  2505. ClDebugMin < 0 || ClDebugMax < 0 ||
  2506. (Instrumented >= ClDebugMin && Instrumented <= ClDebugMax);
  2507. Instrumented++;
  2508. return !ShouldInstrument;
  2509. }
  2510. bool AddressSanitizer::instrumentFunction(Function &F,
  2511. const TargetLibraryInfo *TLI) {
  2512. if (F.empty())
  2513. return false;
  2514. if (F.getLinkage() == GlobalValue::AvailableExternallyLinkage) return false;
  2515. if (!ClDebugFunc.empty() && ClDebugFunc == F.getName()) return false;
  2516. if (F.getName().startswith("__asan_")) return false;
  2517. bool FunctionModified = false;
  2518. // If needed, insert __asan_init before checking for SanitizeAddress attr.
  2519. // This function needs to be called even if the function body is not
  2520. // instrumented.
  2521. if (maybeInsertAsanInitAtFunctionEntry(F))
  2522. FunctionModified = true;
  2523. // Leave if the function doesn't need instrumentation.
  2524. if (!F.hasFnAttribute(Attribute::SanitizeAddress)) return FunctionModified;
  2525. LLVM_DEBUG(dbgs() << "ASAN instrumenting:\n" << F << "\n");
  2526. initializeCallbacks(*F.getParent());
  2527. FunctionStateRAII CleanupObj(this);
  2528. FunctionModified |= maybeInsertDynamicShadowAtFunctionEntry(F);
  2529. // We can't instrument allocas used with llvm.localescape. Only static allocas
  2530. // can be passed to that intrinsic.
  2531. markEscapedLocalAllocas(F);
  2532. // We want to instrument every address only once per basic block (unless there
  2533. // are calls between uses).
  2534. SmallPtrSet<Value *, 16> TempsToInstrument;
  2535. SmallVector<InterestingMemoryOperand, 16> OperandsToInstrument;
  2536. SmallVector<MemIntrinsic *, 16> IntrinToInstrument;
  2537. SmallVector<Instruction *, 8> NoReturnCalls;
  2538. SmallVector<BasicBlock *, 16> AllBlocks;
  2539. SmallVector<Instruction *, 16> PointerComparisonsOrSubtracts;
  2540. int NumAllocas = 0;
  2541. // Fill the set of memory operations to instrument.
  2542. for (auto &BB : F) {
  2543. AllBlocks.push_back(&BB);
  2544. TempsToInstrument.clear();
  2545. int NumInsnsPerBB = 0;
  2546. for (auto &Inst : BB) {
  2547. if (LooksLikeCodeInBug11395(&Inst)) return false;
  2548. SmallVector<InterestingMemoryOperand, 1> InterestingOperands;
  2549. getInterestingMemoryOperands(&Inst, InterestingOperands);
  2550. if (!InterestingOperands.empty()) {
  2551. for (auto &Operand : InterestingOperands) {
  2552. if (ClOpt && ClOptSameTemp) {
  2553. Value *Ptr = Operand.getPtr();
  2554. // If we have a mask, skip instrumentation if we've already
  2555. // instrumented the full object. But don't add to TempsToInstrument
  2556. // because we might get another load/store with a different mask.
  2557. if (Operand.MaybeMask) {
  2558. if (TempsToInstrument.count(Ptr))
  2559. continue; // We've seen this (whole) temp in the current BB.
  2560. } else {
  2561. if (!TempsToInstrument.insert(Ptr).second)
  2562. continue; // We've seen this temp in the current BB.
  2563. }
  2564. }
  2565. OperandsToInstrument.push_back(Operand);
  2566. NumInsnsPerBB++;
  2567. }
  2568. } else if (((ClInvalidPointerPairs || ClInvalidPointerCmp) &&
  2569. isInterestingPointerComparison(&Inst)) ||
  2570. ((ClInvalidPointerPairs || ClInvalidPointerSub) &&
  2571. isInterestingPointerSubtraction(&Inst))) {
  2572. PointerComparisonsOrSubtracts.push_back(&Inst);
  2573. } else if (MemIntrinsic *MI = dyn_cast<MemIntrinsic>(&Inst)) {
  2574. // ok, take it.
  2575. IntrinToInstrument.push_back(MI);
  2576. NumInsnsPerBB++;
  2577. } else {
  2578. if (isa<AllocaInst>(Inst)) NumAllocas++;
  2579. if (auto *CB = dyn_cast<CallBase>(&Inst)) {
  2580. // A call inside BB.
  2581. TempsToInstrument.clear();
  2582. if (CB->doesNotReturn() && !CB->hasMetadata("nosanitize"))
  2583. NoReturnCalls.push_back(CB);
  2584. }
  2585. if (CallInst *CI = dyn_cast<CallInst>(&Inst))
  2586. maybeMarkSanitizerLibraryCallNoBuiltin(CI, TLI);
  2587. }
  2588. if (NumInsnsPerBB >= ClMaxInsnsToInstrumentPerBB) break;
  2589. }
  2590. }
  2591. bool UseCalls = (ClInstrumentationWithCallsThreshold >= 0 &&
  2592. OperandsToInstrument.size() + IntrinToInstrument.size() >
  2593. (unsigned)ClInstrumentationWithCallsThreshold);
  2594. const DataLayout &DL = F.getParent()->getDataLayout();
  2595. ObjectSizeOpts ObjSizeOpts;
  2596. ObjSizeOpts.RoundToAlign = true;
  2597. ObjectSizeOffsetVisitor ObjSizeVis(DL, TLI, F.getContext(), ObjSizeOpts);
  2598. // Instrument.
  2599. int NumInstrumented = 0;
  2600. for (auto &Operand : OperandsToInstrument) {
  2601. if (!suppressInstrumentationSiteForDebug(NumInstrumented))
  2602. instrumentMop(ObjSizeVis, Operand, UseCalls,
  2603. F.getParent()->getDataLayout());
  2604. FunctionModified = true;
  2605. }
  2606. for (auto Inst : IntrinToInstrument) {
  2607. if (!suppressInstrumentationSiteForDebug(NumInstrumented))
  2608. instrumentMemIntrinsic(Inst);
  2609. FunctionModified = true;
  2610. }
  2611. FunctionStackPoisoner FSP(F, *this);
  2612. bool ChangedStack = FSP.runOnFunction();
  2613. // We must unpoison the stack before NoReturn calls (throw, _exit, etc).
  2614. // See e.g. https://github.com/google/sanitizers/issues/37
  2615. for (auto CI : NoReturnCalls) {
  2616. IRBuilder<> IRB(CI);
  2617. IRB.CreateCall(AsanHandleNoReturnFunc, {});
  2618. }
  2619. for (auto Inst : PointerComparisonsOrSubtracts) {
  2620. instrumentPointerComparisonOrSubtraction(Inst);
  2621. FunctionModified = true;
  2622. }
  2623. if (ChangedStack || !NoReturnCalls.empty())
  2624. FunctionModified = true;
  2625. LLVM_DEBUG(dbgs() << "ASAN done instrumenting: " << FunctionModified << " "
  2626. << F << "\n");
  2627. return FunctionModified;
  2628. }
  2629. // Workaround for bug 11395: we don't want to instrument stack in functions
  2630. // with large assembly blobs (32-bit only), otherwise reg alloc may crash.
  2631. // FIXME: remove once the bug 11395 is fixed.
  2632. bool AddressSanitizer::LooksLikeCodeInBug11395(Instruction *I) {
  2633. if (LongSize != 32) return false;
  2634. CallInst *CI = dyn_cast<CallInst>(I);
  2635. if (!CI || !CI->isInlineAsm()) return false;
  2636. if (CI->arg_size() <= 5)
  2637. return false;
  2638. // We have inline assembly with quite a few arguments.
  2639. return true;
  2640. }
  2641. void FunctionStackPoisoner::initializeCallbacks(Module &M) {
  2642. IRBuilder<> IRB(*C);
  2643. if (ASan.UseAfterReturn == AsanDetectStackUseAfterReturnMode::Always ||
  2644. ASan.UseAfterReturn == AsanDetectStackUseAfterReturnMode::Runtime) {
  2645. const char *MallocNameTemplate =
  2646. ASan.UseAfterReturn == AsanDetectStackUseAfterReturnMode::Always
  2647. ? kAsanStackMallocAlwaysNameTemplate
  2648. : kAsanStackMallocNameTemplate;
  2649. for (int Index = 0; Index <= kMaxAsanStackMallocSizeClass; Index++) {
  2650. std::string Suffix = itostr(Index);
  2651. AsanStackMallocFunc[Index] = M.getOrInsertFunction(
  2652. MallocNameTemplate + Suffix, IntptrTy, IntptrTy);
  2653. AsanStackFreeFunc[Index] =
  2654. M.getOrInsertFunction(kAsanStackFreeNameTemplate + Suffix,
  2655. IRB.getVoidTy(), IntptrTy, IntptrTy);
  2656. }
  2657. }
  2658. if (ASan.UseAfterScope) {
  2659. AsanPoisonStackMemoryFunc = M.getOrInsertFunction(
  2660. kAsanPoisonStackMemoryName, IRB.getVoidTy(), IntptrTy, IntptrTy);
  2661. AsanUnpoisonStackMemoryFunc = M.getOrInsertFunction(
  2662. kAsanUnpoisonStackMemoryName, IRB.getVoidTy(), IntptrTy, IntptrTy);
  2663. }
  2664. for (size_t Val : {0x00, 0xf1, 0xf2, 0xf3, 0xf5, 0xf8}) {
  2665. std::ostringstream Name;
  2666. Name << kAsanSetShadowPrefix;
  2667. Name << std::setw(2) << std::setfill('0') << std::hex << Val;
  2668. AsanSetShadowFunc[Val] =
  2669. M.getOrInsertFunction(Name.str(), IRB.getVoidTy(), IntptrTy, IntptrTy);
  2670. }
  2671. AsanAllocaPoisonFunc = M.getOrInsertFunction(
  2672. kAsanAllocaPoison, IRB.getVoidTy(), IntptrTy, IntptrTy);
  2673. AsanAllocasUnpoisonFunc = M.getOrInsertFunction(
  2674. kAsanAllocasUnpoison, IRB.getVoidTy(), IntptrTy, IntptrTy);
  2675. }
  2676. void FunctionStackPoisoner::copyToShadowInline(ArrayRef<uint8_t> ShadowMask,
  2677. ArrayRef<uint8_t> ShadowBytes,
  2678. size_t Begin, size_t End,
  2679. IRBuilder<> &IRB,
  2680. Value *ShadowBase) {
  2681. if (Begin >= End)
  2682. return;
  2683. const size_t LargestStoreSizeInBytes =
  2684. std::min<size_t>(sizeof(uint64_t), ASan.LongSize / 8);
  2685. const bool IsLittleEndian = F.getParent()->getDataLayout().isLittleEndian();
  2686. // Poison given range in shadow using larges store size with out leading and
  2687. // trailing zeros in ShadowMask. Zeros never change, so they need neither
  2688. // poisoning nor up-poisoning. Still we don't mind if some of them get into a
  2689. // middle of a store.
  2690. for (size_t i = Begin; i < End;) {
  2691. if (!ShadowMask[i]) {
  2692. assert(!ShadowBytes[i]);
  2693. ++i;
  2694. continue;
  2695. }
  2696. size_t StoreSizeInBytes = LargestStoreSizeInBytes;
  2697. // Fit store size into the range.
  2698. while (StoreSizeInBytes > End - i)
  2699. StoreSizeInBytes /= 2;
  2700. // Minimize store size by trimming trailing zeros.
  2701. for (size_t j = StoreSizeInBytes - 1; j && !ShadowMask[i + j]; --j) {
  2702. while (j <= StoreSizeInBytes / 2)
  2703. StoreSizeInBytes /= 2;
  2704. }
  2705. uint64_t Val = 0;
  2706. for (size_t j = 0; j < StoreSizeInBytes; j++) {
  2707. if (IsLittleEndian)
  2708. Val |= (uint64_t)ShadowBytes[i + j] << (8 * j);
  2709. else
  2710. Val = (Val << 8) | ShadowBytes[i + j];
  2711. }
  2712. Value *Ptr = IRB.CreateAdd(ShadowBase, ConstantInt::get(IntptrTy, i));
  2713. Value *Poison = IRB.getIntN(StoreSizeInBytes * 8, Val);
  2714. IRB.CreateAlignedStore(
  2715. Poison, IRB.CreateIntToPtr(Ptr, Poison->getType()->getPointerTo()),
  2716. Align(1));
  2717. i += StoreSizeInBytes;
  2718. }
  2719. }
  2720. void FunctionStackPoisoner::copyToShadow(ArrayRef<uint8_t> ShadowMask,
  2721. ArrayRef<uint8_t> ShadowBytes,
  2722. IRBuilder<> &IRB, Value *ShadowBase) {
  2723. copyToShadow(ShadowMask, ShadowBytes, 0, ShadowMask.size(), IRB, ShadowBase);
  2724. }
  2725. void FunctionStackPoisoner::copyToShadow(ArrayRef<uint8_t> ShadowMask,
  2726. ArrayRef<uint8_t> ShadowBytes,
  2727. size_t Begin, size_t End,
  2728. IRBuilder<> &IRB, Value *ShadowBase) {
  2729. assert(ShadowMask.size() == ShadowBytes.size());
  2730. size_t Done = Begin;
  2731. for (size_t i = Begin, j = Begin + 1; i < End; i = j++) {
  2732. if (!ShadowMask[i]) {
  2733. assert(!ShadowBytes[i]);
  2734. continue;
  2735. }
  2736. uint8_t Val = ShadowBytes[i];
  2737. if (!AsanSetShadowFunc[Val])
  2738. continue;
  2739. // Skip same values.
  2740. for (; j < End && ShadowMask[j] && Val == ShadowBytes[j]; ++j) {
  2741. }
  2742. if (j - i >= ClMaxInlinePoisoningSize) {
  2743. copyToShadowInline(ShadowMask, ShadowBytes, Done, i, IRB, ShadowBase);
  2744. IRB.CreateCall(AsanSetShadowFunc[Val],
  2745. {IRB.CreateAdd(ShadowBase, ConstantInt::get(IntptrTy, i)),
  2746. ConstantInt::get(IntptrTy, j - i)});
  2747. Done = j;
  2748. }
  2749. }
  2750. copyToShadowInline(ShadowMask, ShadowBytes, Done, End, IRB, ShadowBase);
  2751. }
  2752. // Fake stack allocator (asan_fake_stack.h) has 11 size classes
  2753. // for every power of 2 from kMinStackMallocSize to kMaxAsanStackMallocSizeClass
  2754. static int StackMallocSizeClass(uint64_t LocalStackSize) {
  2755. assert(LocalStackSize <= kMaxStackMallocSize);
  2756. uint64_t MaxSize = kMinStackMallocSize;
  2757. for (int i = 0;; i++, MaxSize *= 2)
  2758. if (LocalStackSize <= MaxSize) return i;
  2759. llvm_unreachable("impossible LocalStackSize");
  2760. }
  2761. void FunctionStackPoisoner::copyArgsPassedByValToAllocas() {
  2762. Instruction *CopyInsertPoint = &F.front().front();
  2763. if (CopyInsertPoint == ASan.LocalDynamicShadow) {
  2764. // Insert after the dynamic shadow location is determined
  2765. CopyInsertPoint = CopyInsertPoint->getNextNode();
  2766. assert(CopyInsertPoint);
  2767. }
  2768. IRBuilder<> IRB(CopyInsertPoint);
  2769. const DataLayout &DL = F.getParent()->getDataLayout();
  2770. for (Argument &Arg : F.args()) {
  2771. if (Arg.hasByValAttr()) {
  2772. Type *Ty = Arg.getParamByValType();
  2773. const Align Alignment =
  2774. DL.getValueOrABITypeAlignment(Arg.getParamAlign(), Ty);
  2775. AllocaInst *AI = IRB.CreateAlloca(
  2776. Ty, nullptr,
  2777. (Arg.hasName() ? Arg.getName() : "Arg" + Twine(Arg.getArgNo())) +
  2778. ".byval");
  2779. AI->setAlignment(Alignment);
  2780. Arg.replaceAllUsesWith(AI);
  2781. uint64_t AllocSize = DL.getTypeAllocSize(Ty);
  2782. IRB.CreateMemCpy(AI, Alignment, &Arg, Alignment, AllocSize);
  2783. }
  2784. }
  2785. }
  2786. PHINode *FunctionStackPoisoner::createPHI(IRBuilder<> &IRB, Value *Cond,
  2787. Value *ValueIfTrue,
  2788. Instruction *ThenTerm,
  2789. Value *ValueIfFalse) {
  2790. PHINode *PHI = IRB.CreatePHI(IntptrTy, 2);
  2791. BasicBlock *CondBlock = cast<Instruction>(Cond)->getParent();
  2792. PHI->addIncoming(ValueIfFalse, CondBlock);
  2793. BasicBlock *ThenBlock = ThenTerm->getParent();
  2794. PHI->addIncoming(ValueIfTrue, ThenBlock);
  2795. return PHI;
  2796. }
  2797. Value *FunctionStackPoisoner::createAllocaForLayout(
  2798. IRBuilder<> &IRB, const ASanStackFrameLayout &L, bool Dynamic) {
  2799. AllocaInst *Alloca;
  2800. if (Dynamic) {
  2801. Alloca = IRB.CreateAlloca(IRB.getInt8Ty(),
  2802. ConstantInt::get(IRB.getInt64Ty(), L.FrameSize),
  2803. "MyAlloca");
  2804. } else {
  2805. Alloca = IRB.CreateAlloca(ArrayType::get(IRB.getInt8Ty(), L.FrameSize),
  2806. nullptr, "MyAlloca");
  2807. assert(Alloca->isStaticAlloca());
  2808. }
  2809. assert((ClRealignStack & (ClRealignStack - 1)) == 0);
  2810. uint64_t FrameAlignment = std::max(L.FrameAlignment, uint64_t(ClRealignStack));
  2811. Alloca->setAlignment(Align(FrameAlignment));
  2812. return IRB.CreatePointerCast(Alloca, IntptrTy);
  2813. }
  2814. void FunctionStackPoisoner::createDynamicAllocasInitStorage() {
  2815. BasicBlock &FirstBB = *F.begin();
  2816. IRBuilder<> IRB(dyn_cast<Instruction>(FirstBB.begin()));
  2817. DynamicAllocaLayout = IRB.CreateAlloca(IntptrTy, nullptr);
  2818. IRB.CreateStore(Constant::getNullValue(IntptrTy), DynamicAllocaLayout);
  2819. DynamicAllocaLayout->setAlignment(Align(32));
  2820. }
  2821. void FunctionStackPoisoner::processDynamicAllocas() {
  2822. if (!ClInstrumentDynamicAllocas || DynamicAllocaVec.empty()) {
  2823. assert(DynamicAllocaPoisonCallVec.empty());
  2824. return;
  2825. }
  2826. // Insert poison calls for lifetime intrinsics for dynamic allocas.
  2827. for (const auto &APC : DynamicAllocaPoisonCallVec) {
  2828. assert(APC.InsBefore);
  2829. assert(APC.AI);
  2830. assert(ASan.isInterestingAlloca(*APC.AI));
  2831. assert(!APC.AI->isStaticAlloca());
  2832. IRBuilder<> IRB(APC.InsBefore);
  2833. poisonAlloca(APC.AI, APC.Size, IRB, APC.DoPoison);
  2834. // Dynamic allocas will be unpoisoned unconditionally below in
  2835. // unpoisonDynamicAllocas.
  2836. // Flag that we need unpoison static allocas.
  2837. }
  2838. // Handle dynamic allocas.
  2839. createDynamicAllocasInitStorage();
  2840. for (auto &AI : DynamicAllocaVec)
  2841. handleDynamicAllocaCall(AI);
  2842. unpoisonDynamicAllocas();
  2843. }
  2844. /// Collect instructions in the entry block after \p InsBefore which initialize
  2845. /// permanent storage for a function argument. These instructions must remain in
  2846. /// the entry block so that uninitialized values do not appear in backtraces. An
  2847. /// added benefit is that this conserves spill slots. This does not move stores
  2848. /// before instrumented / "interesting" allocas.
  2849. static void findStoresToUninstrumentedArgAllocas(
  2850. AddressSanitizer &ASan, Instruction &InsBefore,
  2851. SmallVectorImpl<Instruction *> &InitInsts) {
  2852. Instruction *Start = InsBefore.getNextNonDebugInstruction();
  2853. for (Instruction *It = Start; It; It = It->getNextNonDebugInstruction()) {
  2854. // Argument initialization looks like:
  2855. // 1) store <Argument>, <Alloca> OR
  2856. // 2) <CastArgument> = cast <Argument> to ...
  2857. // store <CastArgument> to <Alloca>
  2858. // Do not consider any other kind of instruction.
  2859. //
  2860. // Note: This covers all known cases, but may not be exhaustive. An
  2861. // alternative to pattern-matching stores is to DFS over all Argument uses:
  2862. // this might be more general, but is probably much more complicated.
  2863. if (isa<AllocaInst>(It) || isa<CastInst>(It))
  2864. continue;
  2865. if (auto *Store = dyn_cast<StoreInst>(It)) {
  2866. // The store destination must be an alloca that isn't interesting for
  2867. // ASan to instrument. These are moved up before InsBefore, and they're
  2868. // not interesting because allocas for arguments can be mem2reg'd.
  2869. auto *Alloca = dyn_cast<AllocaInst>(Store->getPointerOperand());
  2870. if (!Alloca || ASan.isInterestingAlloca(*Alloca))
  2871. continue;
  2872. Value *Val = Store->getValueOperand();
  2873. bool IsDirectArgInit = isa<Argument>(Val);
  2874. bool IsArgInitViaCast =
  2875. isa<CastInst>(Val) &&
  2876. isa<Argument>(cast<CastInst>(Val)->getOperand(0)) &&
  2877. // Check that the cast appears directly before the store. Otherwise
  2878. // moving the cast before InsBefore may break the IR.
  2879. Val == It->getPrevNonDebugInstruction();
  2880. bool IsArgInit = IsDirectArgInit || IsArgInitViaCast;
  2881. if (!IsArgInit)
  2882. continue;
  2883. if (IsArgInitViaCast)
  2884. InitInsts.push_back(cast<Instruction>(Val));
  2885. InitInsts.push_back(Store);
  2886. continue;
  2887. }
  2888. // Do not reorder past unknown instructions: argument initialization should
  2889. // only involve casts and stores.
  2890. return;
  2891. }
  2892. }
  2893. void FunctionStackPoisoner::processStaticAllocas() {
  2894. if (AllocaVec.empty()) {
  2895. assert(StaticAllocaPoisonCallVec.empty());
  2896. return;
  2897. }
  2898. int StackMallocIdx = -1;
  2899. DebugLoc EntryDebugLocation;
  2900. if (auto SP = F.getSubprogram())
  2901. EntryDebugLocation =
  2902. DILocation::get(SP->getContext(), SP->getScopeLine(), 0, SP);
  2903. Instruction *InsBefore = AllocaVec[0];
  2904. IRBuilder<> IRB(InsBefore);
  2905. // Make sure non-instrumented allocas stay in the entry block. Otherwise,
  2906. // debug info is broken, because only entry-block allocas are treated as
  2907. // regular stack slots.
  2908. auto InsBeforeB = InsBefore->getParent();
  2909. assert(InsBeforeB == &F.getEntryBlock());
  2910. for (auto *AI : StaticAllocasToMoveUp)
  2911. if (AI->getParent() == InsBeforeB)
  2912. AI->moveBefore(InsBefore);
  2913. // Move stores of arguments into entry-block allocas as well. This prevents
  2914. // extra stack slots from being generated (to house the argument values until
  2915. // they can be stored into the allocas). This also prevents uninitialized
  2916. // values from being shown in backtraces.
  2917. SmallVector<Instruction *, 8> ArgInitInsts;
  2918. findStoresToUninstrumentedArgAllocas(ASan, *InsBefore, ArgInitInsts);
  2919. for (Instruction *ArgInitInst : ArgInitInsts)
  2920. ArgInitInst->moveBefore(InsBefore);
  2921. // If we have a call to llvm.localescape, keep it in the entry block.
  2922. if (LocalEscapeCall) LocalEscapeCall->moveBefore(InsBefore);
  2923. SmallVector<ASanStackVariableDescription, 16> SVD;
  2924. SVD.reserve(AllocaVec.size());
  2925. for (AllocaInst *AI : AllocaVec) {
  2926. ASanStackVariableDescription D = {AI->getName().data(),
  2927. ASan.getAllocaSizeInBytes(*AI),
  2928. 0,
  2929. AI->getAlignment(),
  2930. AI,
  2931. 0,
  2932. 0};
  2933. SVD.push_back(D);
  2934. }
  2935. // Minimal header size (left redzone) is 4 pointers,
  2936. // i.e. 32 bytes on 64-bit platforms and 16 bytes in 32-bit platforms.
  2937. uint64_t Granularity = 1ULL << Mapping.Scale;
  2938. uint64_t MinHeaderSize = std::max((uint64_t)ASan.LongSize / 2, Granularity);
  2939. const ASanStackFrameLayout &L =
  2940. ComputeASanStackFrameLayout(SVD, Granularity, MinHeaderSize);
  2941. // Build AllocaToSVDMap for ASanStackVariableDescription lookup.
  2942. DenseMap<const AllocaInst *, ASanStackVariableDescription *> AllocaToSVDMap;
  2943. for (auto &Desc : SVD)
  2944. AllocaToSVDMap[Desc.AI] = &Desc;
  2945. // Update SVD with information from lifetime intrinsics.
  2946. for (const auto &APC : StaticAllocaPoisonCallVec) {
  2947. assert(APC.InsBefore);
  2948. assert(APC.AI);
  2949. assert(ASan.isInterestingAlloca(*APC.AI));
  2950. assert(APC.AI->isStaticAlloca());
  2951. ASanStackVariableDescription &Desc = *AllocaToSVDMap[APC.AI];
  2952. Desc.LifetimeSize = Desc.Size;
  2953. if (const DILocation *FnLoc = EntryDebugLocation.get()) {
  2954. if (const DILocation *LifetimeLoc = APC.InsBefore->getDebugLoc().get()) {
  2955. if (LifetimeLoc->getFile() == FnLoc->getFile())
  2956. if (unsigned Line = LifetimeLoc->getLine())
  2957. Desc.Line = std::min(Desc.Line ? Desc.Line : Line, Line);
  2958. }
  2959. }
  2960. }
  2961. auto DescriptionString = ComputeASanStackFrameDescription(SVD);
  2962. LLVM_DEBUG(dbgs() << DescriptionString << " --- " << L.FrameSize << "\n");
  2963. uint64_t LocalStackSize = L.FrameSize;
  2964. bool DoStackMalloc =
  2965. ASan.UseAfterReturn != AsanDetectStackUseAfterReturnMode::Never &&
  2966. !ASan.CompileKernel && LocalStackSize <= kMaxStackMallocSize;
  2967. bool DoDynamicAlloca = ClDynamicAllocaStack;
  2968. // Don't do dynamic alloca or stack malloc if:
  2969. // 1) There is inline asm: too often it makes assumptions on which registers
  2970. // are available.
  2971. // 2) There is a returns_twice call (typically setjmp), which is
  2972. // optimization-hostile, and doesn't play well with introduced indirect
  2973. // register-relative calculation of local variable addresses.
  2974. DoDynamicAlloca &= !HasInlineAsm && !HasReturnsTwiceCall;
  2975. DoStackMalloc &= !HasInlineAsm && !HasReturnsTwiceCall;
  2976. Value *StaticAlloca =
  2977. DoDynamicAlloca ? nullptr : createAllocaForLayout(IRB, L, false);
  2978. Value *FakeStack;
  2979. Value *LocalStackBase;
  2980. Value *LocalStackBaseAlloca;
  2981. uint8_t DIExprFlags = DIExpression::ApplyOffset;
  2982. if (DoStackMalloc) {
  2983. LocalStackBaseAlloca =
  2984. IRB.CreateAlloca(IntptrTy, nullptr, "asan_local_stack_base");
  2985. if (ASan.UseAfterReturn == AsanDetectStackUseAfterReturnMode::Runtime) {
  2986. // void *FakeStack = __asan_option_detect_stack_use_after_return
  2987. // ? __asan_stack_malloc_N(LocalStackSize)
  2988. // : nullptr;
  2989. // void *LocalStackBase = (FakeStack) ? FakeStack :
  2990. // alloca(LocalStackSize);
  2991. Constant *OptionDetectUseAfterReturn = F.getParent()->getOrInsertGlobal(
  2992. kAsanOptionDetectUseAfterReturn, IRB.getInt32Ty());
  2993. Value *UseAfterReturnIsEnabled = IRB.CreateICmpNE(
  2994. IRB.CreateLoad(IRB.getInt32Ty(), OptionDetectUseAfterReturn),
  2995. Constant::getNullValue(IRB.getInt32Ty()));
  2996. Instruction *Term =
  2997. SplitBlockAndInsertIfThen(UseAfterReturnIsEnabled, InsBefore, false);
  2998. IRBuilder<> IRBIf(Term);
  2999. StackMallocIdx = StackMallocSizeClass(LocalStackSize);
  3000. assert(StackMallocIdx <= kMaxAsanStackMallocSizeClass);
  3001. Value *FakeStackValue =
  3002. IRBIf.CreateCall(AsanStackMallocFunc[StackMallocIdx],
  3003. ConstantInt::get(IntptrTy, LocalStackSize));
  3004. IRB.SetInsertPoint(InsBefore);
  3005. FakeStack = createPHI(IRB, UseAfterReturnIsEnabled, FakeStackValue, Term,
  3006. ConstantInt::get(IntptrTy, 0));
  3007. } else {
  3008. // assert(ASan.UseAfterReturn == AsanDetectStackUseAfterReturnMode:Always)
  3009. // void *FakeStack = __asan_stack_malloc_N(LocalStackSize);
  3010. // void *LocalStackBase = (FakeStack) ? FakeStack :
  3011. // alloca(LocalStackSize);
  3012. StackMallocIdx = StackMallocSizeClass(LocalStackSize);
  3013. FakeStack = IRB.CreateCall(AsanStackMallocFunc[StackMallocIdx],
  3014. ConstantInt::get(IntptrTy, LocalStackSize));
  3015. }
  3016. Value *NoFakeStack =
  3017. IRB.CreateICmpEQ(FakeStack, Constant::getNullValue(IntptrTy));
  3018. Instruction *Term =
  3019. SplitBlockAndInsertIfThen(NoFakeStack, InsBefore, false);
  3020. IRBuilder<> IRBIf(Term);
  3021. Value *AllocaValue =
  3022. DoDynamicAlloca ? createAllocaForLayout(IRBIf, L, true) : StaticAlloca;
  3023. IRB.SetInsertPoint(InsBefore);
  3024. LocalStackBase = createPHI(IRB, NoFakeStack, AllocaValue, Term, FakeStack);
  3025. IRB.CreateStore(LocalStackBase, LocalStackBaseAlloca);
  3026. DIExprFlags |= DIExpression::DerefBefore;
  3027. } else {
  3028. // void *FakeStack = nullptr;
  3029. // void *LocalStackBase = alloca(LocalStackSize);
  3030. FakeStack = ConstantInt::get(IntptrTy, 0);
  3031. LocalStackBase =
  3032. DoDynamicAlloca ? createAllocaForLayout(IRB, L, true) : StaticAlloca;
  3033. LocalStackBaseAlloca = LocalStackBase;
  3034. }
  3035. // It shouldn't matter whether we pass an `alloca` or a `ptrtoint` as the
  3036. // dbg.declare address opereand, but passing a `ptrtoint` seems to confuse
  3037. // later passes and can result in dropped variable coverage in debug info.
  3038. Value *LocalStackBaseAllocaPtr =
  3039. isa<PtrToIntInst>(LocalStackBaseAlloca)
  3040. ? cast<PtrToIntInst>(LocalStackBaseAlloca)->getPointerOperand()
  3041. : LocalStackBaseAlloca;
  3042. assert(isa<AllocaInst>(LocalStackBaseAllocaPtr) &&
  3043. "Variable descriptions relative to ASan stack base will be dropped");
  3044. // Replace Alloca instructions with base+offset.
  3045. for (const auto &Desc : SVD) {
  3046. AllocaInst *AI = Desc.AI;
  3047. replaceDbgDeclare(AI, LocalStackBaseAllocaPtr, DIB, DIExprFlags,
  3048. Desc.Offset);
  3049. Value *NewAllocaPtr = IRB.CreateIntToPtr(
  3050. IRB.CreateAdd(LocalStackBase, ConstantInt::get(IntptrTy, Desc.Offset)),
  3051. AI->getType());
  3052. AI->replaceAllUsesWith(NewAllocaPtr);
  3053. }
  3054. // The left-most redzone has enough space for at least 4 pointers.
  3055. // Write the Magic value to redzone[0].
  3056. Value *BasePlus0 = IRB.CreateIntToPtr(LocalStackBase, IntptrPtrTy);
  3057. IRB.CreateStore(ConstantInt::get(IntptrTy, kCurrentStackFrameMagic),
  3058. BasePlus0);
  3059. // Write the frame description constant to redzone[1].
  3060. Value *BasePlus1 = IRB.CreateIntToPtr(
  3061. IRB.CreateAdd(LocalStackBase,
  3062. ConstantInt::get(IntptrTy, ASan.LongSize / 8)),
  3063. IntptrPtrTy);
  3064. GlobalVariable *StackDescriptionGlobal =
  3065. createPrivateGlobalForString(*F.getParent(), DescriptionString,
  3066. /*AllowMerging*/ true, kAsanGenPrefix);
  3067. Value *Description = IRB.CreatePointerCast(StackDescriptionGlobal, IntptrTy);
  3068. IRB.CreateStore(Description, BasePlus1);
  3069. // Write the PC to redzone[2].
  3070. Value *BasePlus2 = IRB.CreateIntToPtr(
  3071. IRB.CreateAdd(LocalStackBase,
  3072. ConstantInt::get(IntptrTy, 2 * ASan.LongSize / 8)),
  3073. IntptrPtrTy);
  3074. IRB.CreateStore(IRB.CreatePointerCast(&F, IntptrTy), BasePlus2);
  3075. const auto &ShadowAfterScope = GetShadowBytesAfterScope(SVD, L);
  3076. // Poison the stack red zones at the entry.
  3077. Value *ShadowBase = ASan.memToShadow(LocalStackBase, IRB);
  3078. // As mask we must use most poisoned case: red zones and after scope.
  3079. // As bytes we can use either the same or just red zones only.
  3080. copyToShadow(ShadowAfterScope, ShadowAfterScope, IRB, ShadowBase);
  3081. if (!StaticAllocaPoisonCallVec.empty()) {
  3082. const auto &ShadowInScope = GetShadowBytes(SVD, L);
  3083. // Poison static allocas near lifetime intrinsics.
  3084. for (const auto &APC : StaticAllocaPoisonCallVec) {
  3085. const ASanStackVariableDescription &Desc = *AllocaToSVDMap[APC.AI];
  3086. assert(Desc.Offset % L.Granularity == 0);
  3087. size_t Begin = Desc.Offset / L.Granularity;
  3088. size_t End = Begin + (APC.Size + L.Granularity - 1) / L.Granularity;
  3089. IRBuilder<> IRB(APC.InsBefore);
  3090. copyToShadow(ShadowAfterScope,
  3091. APC.DoPoison ? ShadowAfterScope : ShadowInScope, Begin, End,
  3092. IRB, ShadowBase);
  3093. }
  3094. }
  3095. SmallVector<uint8_t, 64> ShadowClean(ShadowAfterScope.size(), 0);
  3096. SmallVector<uint8_t, 64> ShadowAfterReturn;
  3097. // (Un)poison the stack before all ret instructions.
  3098. for (Instruction *Ret : RetVec) {
  3099. IRBuilder<> IRBRet(Ret);
  3100. // Mark the current frame as retired.
  3101. IRBRet.CreateStore(ConstantInt::get(IntptrTy, kRetiredStackFrameMagic),
  3102. BasePlus0);
  3103. if (DoStackMalloc) {
  3104. assert(StackMallocIdx >= 0);
  3105. // if FakeStack != 0 // LocalStackBase == FakeStack
  3106. // // In use-after-return mode, poison the whole stack frame.
  3107. // if StackMallocIdx <= 4
  3108. // // For small sizes inline the whole thing:
  3109. // memset(ShadowBase, kAsanStackAfterReturnMagic, ShadowSize);
  3110. // **SavedFlagPtr(FakeStack) = 0
  3111. // else
  3112. // __asan_stack_free_N(FakeStack, LocalStackSize)
  3113. // else
  3114. // <This is not a fake stack; unpoison the redzones>
  3115. Value *Cmp =
  3116. IRBRet.CreateICmpNE(FakeStack, Constant::getNullValue(IntptrTy));
  3117. Instruction *ThenTerm, *ElseTerm;
  3118. SplitBlockAndInsertIfThenElse(Cmp, Ret, &ThenTerm, &ElseTerm);
  3119. IRBuilder<> IRBPoison(ThenTerm);
  3120. if (StackMallocIdx <= 4) {
  3121. int ClassSize = kMinStackMallocSize << StackMallocIdx;
  3122. ShadowAfterReturn.resize(ClassSize / L.Granularity,
  3123. kAsanStackUseAfterReturnMagic);
  3124. copyToShadow(ShadowAfterReturn, ShadowAfterReturn, IRBPoison,
  3125. ShadowBase);
  3126. Value *SavedFlagPtrPtr = IRBPoison.CreateAdd(
  3127. FakeStack,
  3128. ConstantInt::get(IntptrTy, ClassSize - ASan.LongSize / 8));
  3129. Value *SavedFlagPtr = IRBPoison.CreateLoad(
  3130. IntptrTy, IRBPoison.CreateIntToPtr(SavedFlagPtrPtr, IntptrPtrTy));
  3131. IRBPoison.CreateStore(
  3132. Constant::getNullValue(IRBPoison.getInt8Ty()),
  3133. IRBPoison.CreateIntToPtr(SavedFlagPtr, IRBPoison.getInt8PtrTy()));
  3134. } else {
  3135. // For larger frames call __asan_stack_free_*.
  3136. IRBPoison.CreateCall(
  3137. AsanStackFreeFunc[StackMallocIdx],
  3138. {FakeStack, ConstantInt::get(IntptrTy, LocalStackSize)});
  3139. }
  3140. IRBuilder<> IRBElse(ElseTerm);
  3141. copyToShadow(ShadowAfterScope, ShadowClean, IRBElse, ShadowBase);
  3142. } else {
  3143. copyToShadow(ShadowAfterScope, ShadowClean, IRBRet, ShadowBase);
  3144. }
  3145. }
  3146. // We are done. Remove the old unused alloca instructions.
  3147. for (auto AI : AllocaVec) AI->eraseFromParent();
  3148. }
  3149. void FunctionStackPoisoner::poisonAlloca(Value *V, uint64_t Size,
  3150. IRBuilder<> &IRB, bool DoPoison) {
  3151. // For now just insert the call to ASan runtime.
  3152. Value *AddrArg = IRB.CreatePointerCast(V, IntptrTy);
  3153. Value *SizeArg = ConstantInt::get(IntptrTy, Size);
  3154. IRB.CreateCall(
  3155. DoPoison ? AsanPoisonStackMemoryFunc : AsanUnpoisonStackMemoryFunc,
  3156. {AddrArg, SizeArg});
  3157. }
  3158. // Handling llvm.lifetime intrinsics for a given %alloca:
  3159. // (1) collect all llvm.lifetime.xxx(%size, %value) describing the alloca.
  3160. // (2) if %size is constant, poison memory for llvm.lifetime.end (to detect
  3161. // invalid accesses) and unpoison it for llvm.lifetime.start (the memory
  3162. // could be poisoned by previous llvm.lifetime.end instruction, as the
  3163. // variable may go in and out of scope several times, e.g. in loops).
  3164. // (3) if we poisoned at least one %alloca in a function,
  3165. // unpoison the whole stack frame at function exit.
  3166. void FunctionStackPoisoner::handleDynamicAllocaCall(AllocaInst *AI) {
  3167. IRBuilder<> IRB(AI);
  3168. const uint64_t Alignment = std::max(kAllocaRzSize, AI->getAlignment());
  3169. const uint64_t AllocaRedzoneMask = kAllocaRzSize - 1;
  3170. Value *Zero = Constant::getNullValue(IntptrTy);
  3171. Value *AllocaRzSize = ConstantInt::get(IntptrTy, kAllocaRzSize);
  3172. Value *AllocaRzMask = ConstantInt::get(IntptrTy, AllocaRedzoneMask);
  3173. // Since we need to extend alloca with additional memory to locate
  3174. // redzones, and OldSize is number of allocated blocks with
  3175. // ElementSize size, get allocated memory size in bytes by
  3176. // OldSize * ElementSize.
  3177. const unsigned ElementSize =
  3178. F.getParent()->getDataLayout().getTypeAllocSize(AI->getAllocatedType());
  3179. Value *OldSize =
  3180. IRB.CreateMul(IRB.CreateIntCast(AI->getArraySize(), IntptrTy, false),
  3181. ConstantInt::get(IntptrTy, ElementSize));
  3182. // PartialSize = OldSize % 32
  3183. Value *PartialSize = IRB.CreateAnd(OldSize, AllocaRzMask);
  3184. // Misalign = kAllocaRzSize - PartialSize;
  3185. Value *Misalign = IRB.CreateSub(AllocaRzSize, PartialSize);
  3186. // PartialPadding = Misalign != kAllocaRzSize ? Misalign : 0;
  3187. Value *Cond = IRB.CreateICmpNE(Misalign, AllocaRzSize);
  3188. Value *PartialPadding = IRB.CreateSelect(Cond, Misalign, Zero);
  3189. // AdditionalChunkSize = Alignment + PartialPadding + kAllocaRzSize
  3190. // Alignment is added to locate left redzone, PartialPadding for possible
  3191. // partial redzone and kAllocaRzSize for right redzone respectively.
  3192. Value *AdditionalChunkSize = IRB.CreateAdd(
  3193. ConstantInt::get(IntptrTy, Alignment + kAllocaRzSize), PartialPadding);
  3194. Value *NewSize = IRB.CreateAdd(OldSize, AdditionalChunkSize);
  3195. // Insert new alloca with new NewSize and Alignment params.
  3196. AllocaInst *NewAlloca = IRB.CreateAlloca(IRB.getInt8Ty(), NewSize);
  3197. NewAlloca->setAlignment(Align(Alignment));
  3198. // NewAddress = Address + Alignment
  3199. Value *NewAddress = IRB.CreateAdd(IRB.CreatePtrToInt(NewAlloca, IntptrTy),
  3200. ConstantInt::get(IntptrTy, Alignment));
  3201. // Insert __asan_alloca_poison call for new created alloca.
  3202. IRB.CreateCall(AsanAllocaPoisonFunc, {NewAddress, OldSize});
  3203. // Store the last alloca's address to DynamicAllocaLayout. We'll need this
  3204. // for unpoisoning stuff.
  3205. IRB.CreateStore(IRB.CreatePtrToInt(NewAlloca, IntptrTy), DynamicAllocaLayout);
  3206. Value *NewAddressPtr = IRB.CreateIntToPtr(NewAddress, AI->getType());
  3207. // Replace all uses of AddessReturnedByAlloca with NewAddressPtr.
  3208. AI->replaceAllUsesWith(NewAddressPtr);
  3209. // We are done. Erase old alloca from parent.
  3210. AI->eraseFromParent();
  3211. }
  3212. // isSafeAccess returns true if Addr is always inbounds with respect to its
  3213. // base object. For example, it is a field access or an array access with
  3214. // constant inbounds index.
  3215. bool AddressSanitizer::isSafeAccess(ObjectSizeOffsetVisitor &ObjSizeVis,
  3216. Value *Addr, uint64_t TypeSize) const {
  3217. SizeOffsetType SizeOffset = ObjSizeVis.compute(Addr);
  3218. if (!ObjSizeVis.bothKnown(SizeOffset)) return false;
  3219. uint64_t Size = SizeOffset.first.getZExtValue();
  3220. int64_t Offset = SizeOffset.second.getSExtValue();
  3221. // Three checks are required to ensure safety:
  3222. // . Offset >= 0 (since the offset is given from the base ptr)
  3223. // . Size >= Offset (unsigned)
  3224. // . Size - Offset >= NeededSize (unsigned)
  3225. return Offset >= 0 && Size >= uint64_t(Offset) &&
  3226. Size - uint64_t(Offset) >= TypeSize / 8;
  3227. }