StackProtector.cpp 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635
  1. //===- StackProtector.cpp - Stack Protector Insertion ---------------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This pass inserts stack protectors into functions which need them. A variable
  10. // with a random value in it is stored onto the stack before the local variables
  11. // are allocated. Upon exiting the block, the stored value is checked. If it's
  12. // changed, then there was some sort of violation and the program aborts.
  13. //
  14. //===----------------------------------------------------------------------===//
  15. #include "llvm/CodeGen/StackProtector.h"
  16. #include "llvm/ADT/SmallPtrSet.h"
  17. #include "llvm/ADT/Statistic.h"
  18. #include "llvm/Analysis/BranchProbabilityInfo.h"
  19. #include "llvm/Analysis/EHPersonalities.h"
  20. #include "llvm/Analysis/MemoryLocation.h"
  21. #include "llvm/Analysis/OptimizationRemarkEmitter.h"
  22. #include "llvm/CodeGen/Passes.h"
  23. #include "llvm/CodeGen/TargetLowering.h"
  24. #include "llvm/CodeGen/TargetPassConfig.h"
  25. #include "llvm/CodeGen/TargetSubtargetInfo.h"
  26. #include "llvm/IR/Attributes.h"
  27. #include "llvm/IR/BasicBlock.h"
  28. #include "llvm/IR/Constants.h"
  29. #include "llvm/IR/DataLayout.h"
  30. #include "llvm/IR/DerivedTypes.h"
  31. #include "llvm/IR/Dominators.h"
  32. #include "llvm/IR/Function.h"
  33. #include "llvm/IR/IRBuilder.h"
  34. #include "llvm/IR/Instruction.h"
  35. #include "llvm/IR/Instructions.h"
  36. #include "llvm/IR/IntrinsicInst.h"
  37. #include "llvm/IR/Intrinsics.h"
  38. #include "llvm/IR/MDBuilder.h"
  39. #include "llvm/IR/Module.h"
  40. #include "llvm/IR/Type.h"
  41. #include "llvm/IR/User.h"
  42. #include "llvm/InitializePasses.h"
  43. #include "llvm/Pass.h"
  44. #include "llvm/Support/Casting.h"
  45. #include "llvm/Support/CommandLine.h"
  46. #include "llvm/Target/TargetMachine.h"
  47. #include "llvm/Target/TargetOptions.h"
  48. #include "llvm/Transforms/Utils/BasicBlockUtils.h"
  49. #include <optional>
  50. #include <utility>
  51. using namespace llvm;
  52. #define DEBUG_TYPE "stack-protector"
  53. STATISTIC(NumFunProtected, "Number of functions protected");
  54. STATISTIC(NumAddrTaken, "Number of local variables that have their address"
  55. " taken.");
  56. static cl::opt<bool> EnableSelectionDAGSP("enable-selectiondag-sp",
  57. cl::init(true), cl::Hidden);
  58. static cl::opt<bool> DisableCheckNoReturn("disable-check-noreturn-call",
  59. cl::init(false), cl::Hidden);
  60. char StackProtector::ID = 0;
  61. StackProtector::StackProtector() : FunctionPass(ID) {
  62. initializeStackProtectorPass(*PassRegistry::getPassRegistry());
  63. }
  64. INITIALIZE_PASS_BEGIN(StackProtector, DEBUG_TYPE,
  65. "Insert stack protectors", false, true)
  66. INITIALIZE_PASS_DEPENDENCY(TargetPassConfig)
  67. INITIALIZE_PASS_DEPENDENCY(DominatorTreeWrapperPass)
  68. INITIALIZE_PASS_END(StackProtector, DEBUG_TYPE,
  69. "Insert stack protectors", false, true)
  70. FunctionPass *llvm::createStackProtectorPass() { return new StackProtector(); }
  71. void StackProtector::getAnalysisUsage(AnalysisUsage &AU) const {
  72. AU.addRequired<TargetPassConfig>();
  73. AU.addPreserved<DominatorTreeWrapperPass>();
  74. }
  75. bool StackProtector::runOnFunction(Function &Fn) {
  76. F = &Fn;
  77. M = F->getParent();
  78. if (auto *DTWP = getAnalysisIfAvailable<DominatorTreeWrapperPass>())
  79. DTU.emplace(DTWP->getDomTree(), DomTreeUpdater::UpdateStrategy::Lazy);
  80. TM = &getAnalysis<TargetPassConfig>().getTM<TargetMachine>();
  81. Trip = TM->getTargetTriple();
  82. TLI = TM->getSubtargetImpl(Fn)->getTargetLowering();
  83. HasPrologue = false;
  84. HasIRCheck = false;
  85. SSPBufferSize = Fn.getFnAttributeAsParsedInteger(
  86. "stack-protector-buffer-size", DefaultSSPBufferSize);
  87. if (!RequiresStackProtector())
  88. return false;
  89. // TODO(etienneb): Functions with funclets are not correctly supported now.
  90. // Do nothing if this is funclet-based personality.
  91. if (Fn.hasPersonalityFn()) {
  92. EHPersonality Personality = classifyEHPersonality(Fn.getPersonalityFn());
  93. if (isFuncletEHPersonality(Personality))
  94. return false;
  95. }
  96. ++NumFunProtected;
  97. bool Changed = InsertStackProtectors();
  98. #ifdef EXPENSIVE_CHECKS
  99. assert((!DTU ||
  100. DTU->getDomTree().verify(DominatorTree::VerificationLevel::Full)) &&
  101. "Failed to maintain validity of domtree!");
  102. #endif
  103. DTU.reset();
  104. return Changed;
  105. }
  106. /// \param [out] IsLarge is set to true if a protectable array is found and
  107. /// it is "large" ( >= ssp-buffer-size). In the case of a structure with
  108. /// multiple arrays, this gets set if any of them is large.
  109. bool StackProtector::ContainsProtectableArray(Type *Ty, bool &IsLarge,
  110. bool Strong,
  111. bool InStruct) const {
  112. if (!Ty)
  113. return false;
  114. if (ArrayType *AT = dyn_cast<ArrayType>(Ty)) {
  115. if (!AT->getElementType()->isIntegerTy(8)) {
  116. // If we're on a non-Darwin platform or we're inside of a structure, don't
  117. // add stack protectors unless the array is a character array.
  118. // However, in strong mode any array, regardless of type and size,
  119. // triggers a protector.
  120. if (!Strong && (InStruct || !Trip.isOSDarwin()))
  121. return false;
  122. }
  123. // If an array has more than SSPBufferSize bytes of allocated space, then we
  124. // emit stack protectors.
  125. if (SSPBufferSize <= M->getDataLayout().getTypeAllocSize(AT)) {
  126. IsLarge = true;
  127. return true;
  128. }
  129. if (Strong)
  130. // Require a protector for all arrays in strong mode
  131. return true;
  132. }
  133. const StructType *ST = dyn_cast<StructType>(Ty);
  134. if (!ST)
  135. return false;
  136. bool NeedsProtector = false;
  137. for (Type *ET : ST->elements())
  138. if (ContainsProtectableArray(ET, IsLarge, Strong, true)) {
  139. // If the element is a protectable array and is large (>= SSPBufferSize)
  140. // then we are done. If the protectable array is not large, then
  141. // keep looking in case a subsequent element is a large array.
  142. if (IsLarge)
  143. return true;
  144. NeedsProtector = true;
  145. }
  146. return NeedsProtector;
  147. }
  148. bool StackProtector::HasAddressTaken(const Instruction *AI,
  149. TypeSize AllocSize) {
  150. const DataLayout &DL = M->getDataLayout();
  151. for (const User *U : AI->users()) {
  152. const auto *I = cast<Instruction>(U);
  153. // If this instruction accesses memory make sure it doesn't access beyond
  154. // the bounds of the allocated object.
  155. std::optional<MemoryLocation> MemLoc = MemoryLocation::getOrNone(I);
  156. if (MemLoc && MemLoc->Size.hasValue() &&
  157. !TypeSize::isKnownGE(AllocSize,
  158. TypeSize::getFixed(MemLoc->Size.getValue())))
  159. return true;
  160. switch (I->getOpcode()) {
  161. case Instruction::Store:
  162. if (AI == cast<StoreInst>(I)->getValueOperand())
  163. return true;
  164. break;
  165. case Instruction::AtomicCmpXchg:
  166. // cmpxchg conceptually includes both a load and store from the same
  167. // location. So, like store, the value being stored is what matters.
  168. if (AI == cast<AtomicCmpXchgInst>(I)->getNewValOperand())
  169. return true;
  170. break;
  171. case Instruction::PtrToInt:
  172. if (AI == cast<PtrToIntInst>(I)->getOperand(0))
  173. return true;
  174. break;
  175. case Instruction::Call: {
  176. // Ignore intrinsics that do not become real instructions.
  177. // TODO: Narrow this to intrinsics that have store-like effects.
  178. const auto *CI = cast<CallInst>(I);
  179. if (!CI->isDebugOrPseudoInst() && !CI->isLifetimeStartOrEnd())
  180. return true;
  181. break;
  182. }
  183. case Instruction::Invoke:
  184. return true;
  185. case Instruction::GetElementPtr: {
  186. // If the GEP offset is out-of-bounds, or is non-constant and so has to be
  187. // assumed to be potentially out-of-bounds, then any memory access that
  188. // would use it could also be out-of-bounds meaning stack protection is
  189. // required.
  190. const GetElementPtrInst *GEP = cast<GetElementPtrInst>(I);
  191. unsigned IndexSize = DL.getIndexTypeSizeInBits(I->getType());
  192. APInt Offset(IndexSize, 0);
  193. if (!GEP->accumulateConstantOffset(DL, Offset))
  194. return true;
  195. TypeSize OffsetSize = TypeSize::Fixed(Offset.getLimitedValue());
  196. if (!TypeSize::isKnownGT(AllocSize, OffsetSize))
  197. return true;
  198. // Adjust AllocSize to be the space remaining after this offset.
  199. // We can't subtract a fixed size from a scalable one, so in that case
  200. // assume the scalable value is of minimum size.
  201. TypeSize NewAllocSize =
  202. TypeSize::Fixed(AllocSize.getKnownMinValue()) - OffsetSize;
  203. if (HasAddressTaken(I, NewAllocSize))
  204. return true;
  205. break;
  206. }
  207. case Instruction::BitCast:
  208. case Instruction::Select:
  209. case Instruction::AddrSpaceCast:
  210. if (HasAddressTaken(I, AllocSize))
  211. return true;
  212. break;
  213. case Instruction::PHI: {
  214. // Keep track of what PHI nodes we have already visited to ensure
  215. // they are only visited once.
  216. const auto *PN = cast<PHINode>(I);
  217. if (VisitedPHIs.insert(PN).second)
  218. if (HasAddressTaken(PN, AllocSize))
  219. return true;
  220. break;
  221. }
  222. case Instruction::Load:
  223. case Instruction::AtomicRMW:
  224. case Instruction::Ret:
  225. // These instructions take an address operand, but have load-like or
  226. // other innocuous behavior that should not trigger a stack protector.
  227. // atomicrmw conceptually has both load and store semantics, but the
  228. // value being stored must be integer; so if a pointer is being stored,
  229. // we'll catch it in the PtrToInt case above.
  230. break;
  231. default:
  232. // Conservatively return true for any instruction that takes an address
  233. // operand, but is not handled above.
  234. return true;
  235. }
  236. }
  237. return false;
  238. }
  239. /// Search for the first call to the llvm.stackprotector intrinsic and return it
  240. /// if present.
  241. static const CallInst *findStackProtectorIntrinsic(Function &F) {
  242. for (const BasicBlock &BB : F)
  243. for (const Instruction &I : BB)
  244. if (const auto *II = dyn_cast<IntrinsicInst>(&I))
  245. if (II->getIntrinsicID() == Intrinsic::stackprotector)
  246. return II;
  247. return nullptr;
  248. }
  249. /// Check whether or not this function needs a stack protector based
  250. /// upon the stack protector level.
  251. ///
  252. /// We use two heuristics: a standard (ssp) and strong (sspstrong).
  253. /// The standard heuristic which will add a guard variable to functions that
  254. /// call alloca with a either a variable size or a size >= SSPBufferSize,
  255. /// functions with character buffers larger than SSPBufferSize, and functions
  256. /// with aggregates containing character buffers larger than SSPBufferSize. The
  257. /// strong heuristic will add a guard variables to functions that call alloca
  258. /// regardless of size, functions with any buffer regardless of type and size,
  259. /// functions with aggregates that contain any buffer regardless of type and
  260. /// size, and functions that contain stack-based variables that have had their
  261. /// address taken.
  262. bool StackProtector::RequiresStackProtector() {
  263. bool Strong = false;
  264. bool NeedsProtector = false;
  265. if (F->hasFnAttribute(Attribute::SafeStack))
  266. return false;
  267. // We are constructing the OptimizationRemarkEmitter on the fly rather than
  268. // using the analysis pass to avoid building DominatorTree and LoopInfo which
  269. // are not available this late in the IR pipeline.
  270. OptimizationRemarkEmitter ORE(F);
  271. if (F->hasFnAttribute(Attribute::StackProtectReq)) {
  272. ORE.emit([&]() {
  273. return OptimizationRemark(DEBUG_TYPE, "StackProtectorRequested", F)
  274. << "Stack protection applied to function "
  275. << ore::NV("Function", F)
  276. << " due to a function attribute or command-line switch";
  277. });
  278. NeedsProtector = true;
  279. Strong = true; // Use the same heuristic as strong to determine SSPLayout
  280. } else if (F->hasFnAttribute(Attribute::StackProtectStrong))
  281. Strong = true;
  282. else if (!F->hasFnAttribute(Attribute::StackProtect))
  283. return false;
  284. for (const BasicBlock &BB : *F) {
  285. for (const Instruction &I : BB) {
  286. if (const AllocaInst *AI = dyn_cast<AllocaInst>(&I)) {
  287. if (AI->isArrayAllocation()) {
  288. auto RemarkBuilder = [&]() {
  289. return OptimizationRemark(DEBUG_TYPE, "StackProtectorAllocaOrArray",
  290. &I)
  291. << "Stack protection applied to function "
  292. << ore::NV("Function", F)
  293. << " due to a call to alloca or use of a variable length "
  294. "array";
  295. };
  296. if (const auto *CI = dyn_cast<ConstantInt>(AI->getArraySize())) {
  297. if (CI->getLimitedValue(SSPBufferSize) >= SSPBufferSize) {
  298. // A call to alloca with size >= SSPBufferSize requires
  299. // stack protectors.
  300. Layout.insert(std::make_pair(AI,
  301. MachineFrameInfo::SSPLK_LargeArray));
  302. ORE.emit(RemarkBuilder);
  303. NeedsProtector = true;
  304. } else if (Strong) {
  305. // Require protectors for all alloca calls in strong mode.
  306. Layout.insert(std::make_pair(AI,
  307. MachineFrameInfo::SSPLK_SmallArray));
  308. ORE.emit(RemarkBuilder);
  309. NeedsProtector = true;
  310. }
  311. } else {
  312. // A call to alloca with a variable size requires protectors.
  313. Layout.insert(std::make_pair(AI,
  314. MachineFrameInfo::SSPLK_LargeArray));
  315. ORE.emit(RemarkBuilder);
  316. NeedsProtector = true;
  317. }
  318. continue;
  319. }
  320. bool IsLarge = false;
  321. if (ContainsProtectableArray(AI->getAllocatedType(), IsLarge, Strong)) {
  322. Layout.insert(std::make_pair(AI, IsLarge
  323. ? MachineFrameInfo::SSPLK_LargeArray
  324. : MachineFrameInfo::SSPLK_SmallArray));
  325. ORE.emit([&]() {
  326. return OptimizationRemark(DEBUG_TYPE, "StackProtectorBuffer", &I)
  327. << "Stack protection applied to function "
  328. << ore::NV("Function", F)
  329. << " due to a stack allocated buffer or struct containing a "
  330. "buffer";
  331. });
  332. NeedsProtector = true;
  333. continue;
  334. }
  335. if (Strong && HasAddressTaken(AI, M->getDataLayout().getTypeAllocSize(
  336. AI->getAllocatedType()))) {
  337. ++NumAddrTaken;
  338. Layout.insert(std::make_pair(AI, MachineFrameInfo::SSPLK_AddrOf));
  339. ORE.emit([&]() {
  340. return OptimizationRemark(DEBUG_TYPE, "StackProtectorAddressTaken",
  341. &I)
  342. << "Stack protection applied to function "
  343. << ore::NV("Function", F)
  344. << " due to the address of a local variable being taken";
  345. });
  346. NeedsProtector = true;
  347. }
  348. // Clear any PHIs that we visited, to make sure we examine all uses of
  349. // any subsequent allocas that we look at.
  350. VisitedPHIs.clear();
  351. }
  352. }
  353. }
  354. return NeedsProtector;
  355. }
  356. /// Create a stack guard loading and populate whether SelectionDAG SSP is
  357. /// supported.
  358. static Value *getStackGuard(const TargetLoweringBase *TLI, Module *M,
  359. IRBuilder<> &B,
  360. bool *SupportsSelectionDAGSP = nullptr) {
  361. Value *Guard = TLI->getIRStackGuard(B);
  362. StringRef GuardMode = M->getStackProtectorGuard();
  363. if ((GuardMode == "tls" || GuardMode.empty()) && Guard)
  364. return B.CreateLoad(B.getInt8PtrTy(), Guard, true, "StackGuard");
  365. // Use SelectionDAG SSP handling, since there isn't an IR guard.
  366. //
  367. // This is more or less weird, since we optionally output whether we
  368. // should perform a SelectionDAG SP here. The reason is that it's strictly
  369. // defined as !TLI->getIRStackGuard(B), where getIRStackGuard is also
  370. // mutating. There is no way to get this bit without mutating the IR, so
  371. // getting this bit has to happen in this right time.
  372. //
  373. // We could have define a new function TLI::supportsSelectionDAGSP(), but that
  374. // will put more burden on the backends' overriding work, especially when it
  375. // actually conveys the same information getIRStackGuard() already gives.
  376. if (SupportsSelectionDAGSP)
  377. *SupportsSelectionDAGSP = true;
  378. TLI->insertSSPDeclarations(*M);
  379. return B.CreateCall(Intrinsic::getDeclaration(M, Intrinsic::stackguard));
  380. }
  381. /// Insert code into the entry block that stores the stack guard
  382. /// variable onto the stack:
  383. ///
  384. /// entry:
  385. /// StackGuardSlot = alloca i8*
  386. /// StackGuard = <stack guard>
  387. /// call void @llvm.stackprotector(StackGuard, StackGuardSlot)
  388. ///
  389. /// Returns true if the platform/triple supports the stackprotectorcreate pseudo
  390. /// node.
  391. static bool CreatePrologue(Function *F, Module *M, Instruction *CheckLoc,
  392. const TargetLoweringBase *TLI, AllocaInst *&AI) {
  393. bool SupportsSelectionDAGSP = false;
  394. IRBuilder<> B(&F->getEntryBlock().front());
  395. PointerType *PtrTy = Type::getInt8PtrTy(CheckLoc->getContext());
  396. AI = B.CreateAlloca(PtrTy, nullptr, "StackGuardSlot");
  397. Value *GuardSlot = getStackGuard(TLI, M, B, &SupportsSelectionDAGSP);
  398. B.CreateCall(Intrinsic::getDeclaration(M, Intrinsic::stackprotector),
  399. {GuardSlot, AI});
  400. return SupportsSelectionDAGSP;
  401. }
  402. /// InsertStackProtectors - Insert code into the prologue and epilogue of the
  403. /// function.
  404. ///
  405. /// - The prologue code loads and stores the stack guard onto the stack.
  406. /// - The epilogue checks the value stored in the prologue against the original
  407. /// value. It calls __stack_chk_fail if they differ.
  408. bool StackProtector::InsertStackProtectors() {
  409. // If the target wants to XOR the frame pointer into the guard value, it's
  410. // impossible to emit the check in IR, so the target *must* support stack
  411. // protection in SDAG.
  412. bool SupportsSelectionDAGSP =
  413. TLI->useStackGuardXorFP() ||
  414. (EnableSelectionDAGSP && !TM->Options.EnableFastISel);
  415. AllocaInst *AI = nullptr; // Place on stack that stores the stack guard.
  416. BasicBlock *FailBB = nullptr;
  417. for (BasicBlock &BB : llvm::make_early_inc_range(*F)) {
  418. // This is stack protector auto generated check BB, skip it.
  419. if (&BB == FailBB)
  420. continue;
  421. Instruction *CheckLoc = dyn_cast<ReturnInst>(BB.getTerminator());
  422. if (!CheckLoc && !DisableCheckNoReturn) {
  423. for (auto &Inst : BB) {
  424. auto *CB = dyn_cast<CallBase>(&Inst);
  425. if (!CB)
  426. continue;
  427. if (!CB->doesNotReturn())
  428. continue;
  429. // Do stack check before non-return calls (e.g: __cxa_throw)
  430. CheckLoc = CB;
  431. break;
  432. }
  433. }
  434. if (!CheckLoc)
  435. continue;
  436. // Generate prologue instrumentation if not already generated.
  437. if (!HasPrologue) {
  438. HasPrologue = true;
  439. SupportsSelectionDAGSP &= CreatePrologue(F, M, CheckLoc, TLI, AI);
  440. }
  441. // SelectionDAG based code generation. Nothing else needs to be done here.
  442. // The epilogue instrumentation is postponed to SelectionDAG.
  443. if (SupportsSelectionDAGSP)
  444. break;
  445. // Find the stack guard slot if the prologue was not created by this pass
  446. // itself via a previous call to CreatePrologue().
  447. if (!AI) {
  448. const CallInst *SPCall = findStackProtectorIntrinsic(*F);
  449. assert(SPCall && "Call to llvm.stackprotector is missing");
  450. AI = cast<AllocaInst>(SPCall->getArgOperand(1));
  451. }
  452. // Set HasIRCheck to true, so that SelectionDAG will not generate its own
  453. // version. SelectionDAG called 'shouldEmitSDCheck' to check whether
  454. // instrumentation has already been generated.
  455. HasIRCheck = true;
  456. // If we're instrumenting a block with a tail call, the check has to be
  457. // inserted before the call rather than between it and the return. The
  458. // verifier guarantees that a tail call is either directly before the
  459. // return or with a single correct bitcast of the return value in between so
  460. // we don't need to worry about many situations here.
  461. Instruction *Prev = CheckLoc->getPrevNonDebugInstruction();
  462. if (Prev && isa<CallInst>(Prev) && cast<CallInst>(Prev)->isTailCall())
  463. CheckLoc = Prev;
  464. else if (Prev) {
  465. Prev = Prev->getPrevNonDebugInstruction();
  466. if (Prev && isa<CallInst>(Prev) && cast<CallInst>(Prev)->isTailCall())
  467. CheckLoc = Prev;
  468. }
  469. // Generate epilogue instrumentation. The epilogue intrumentation can be
  470. // function-based or inlined depending on which mechanism the target is
  471. // providing.
  472. if (Function *GuardCheck = TLI->getSSPStackGuardCheck(*M)) {
  473. // Generate the function-based epilogue instrumentation.
  474. // The target provides a guard check function, generate a call to it.
  475. IRBuilder<> B(CheckLoc);
  476. LoadInst *Guard = B.CreateLoad(B.getInt8PtrTy(), AI, true, "Guard");
  477. CallInst *Call = B.CreateCall(GuardCheck, {Guard});
  478. Call->setAttributes(GuardCheck->getAttributes());
  479. Call->setCallingConv(GuardCheck->getCallingConv());
  480. } else {
  481. // Generate the epilogue with inline instrumentation.
  482. // If we do not support SelectionDAG based calls, generate IR level
  483. // calls.
  484. //
  485. // For each block with a return instruction, convert this:
  486. //
  487. // return:
  488. // ...
  489. // ret ...
  490. //
  491. // into this:
  492. //
  493. // return:
  494. // ...
  495. // %1 = <stack guard>
  496. // %2 = load StackGuardSlot
  497. // %3 = icmp ne i1 %1, %2
  498. // br i1 %3, label %CallStackCheckFailBlk, label %SP_return
  499. //
  500. // SP_return:
  501. // ret ...
  502. //
  503. // CallStackCheckFailBlk:
  504. // call void @__stack_chk_fail()
  505. // unreachable
  506. // Create the FailBB. We duplicate the BB every time since the MI tail
  507. // merge pass will merge together all of the various BB into one including
  508. // fail BB generated by the stack protector pseudo instruction.
  509. if (!FailBB)
  510. FailBB = CreateFailBB();
  511. IRBuilder<> B(CheckLoc);
  512. Value *Guard = getStackGuard(TLI, M, B);
  513. LoadInst *LI2 = B.CreateLoad(B.getInt8PtrTy(), AI, true);
  514. auto *Cmp = cast<ICmpInst>(B.CreateICmpNE(Guard, LI2));
  515. auto SuccessProb =
  516. BranchProbabilityInfo::getBranchProbStackProtector(true);
  517. auto FailureProb =
  518. BranchProbabilityInfo::getBranchProbStackProtector(false);
  519. MDNode *Weights = MDBuilder(F->getContext())
  520. .createBranchWeights(FailureProb.getNumerator(),
  521. SuccessProb.getNumerator());
  522. SplitBlockAndInsertIfThen(Cmp, CheckLoc,
  523. /*Unreachable=*/false, Weights,
  524. DTU ? &*DTU : nullptr,
  525. /*LI=*/nullptr, /*ThenBlock=*/FailBB);
  526. auto *BI = cast<BranchInst>(Cmp->getParent()->getTerminator());
  527. BasicBlock *NewBB = BI->getSuccessor(1);
  528. NewBB->setName("SP_return");
  529. NewBB->moveAfter(&BB);
  530. Cmp->setPredicate(Cmp->getInversePredicate());
  531. BI->swapSuccessors();
  532. }
  533. }
  534. // Return if we didn't modify any basic blocks. i.e., there are no return
  535. // statements in the function.
  536. return HasPrologue;
  537. }
  538. /// CreateFailBB - Create a basic block to jump to when the stack protector
  539. /// check fails.
  540. BasicBlock *StackProtector::CreateFailBB() {
  541. LLVMContext &Context = F->getContext();
  542. BasicBlock *FailBB = BasicBlock::Create(Context, "CallStackCheckFailBlk", F);
  543. IRBuilder<> B(FailBB);
  544. if (F->getSubprogram())
  545. B.SetCurrentDebugLocation(
  546. DILocation::get(Context, 0, 0, F->getSubprogram()));
  547. if (Trip.isOSOpenBSD()) {
  548. FunctionCallee StackChkFail = M->getOrInsertFunction(
  549. "__stack_smash_handler", Type::getVoidTy(Context),
  550. Type::getInt8PtrTy(Context));
  551. B.CreateCall(StackChkFail, B.CreateGlobalStringPtr(F->getName(), "SSH"));
  552. } else {
  553. FunctionCallee StackChkFail =
  554. M->getOrInsertFunction("__stack_chk_fail", Type::getVoidTy(Context));
  555. B.CreateCall(StackChkFail, {});
  556. }
  557. B.CreateUnreachable();
  558. return FailBB;
  559. }
  560. bool StackProtector::shouldEmitSDCheck(const BasicBlock &BB) const {
  561. return HasPrologue && !HasIRCheck && isa<ReturnInst>(BB.getTerminator());
  562. }
  563. void StackProtector::copyToMachineFrameInfo(MachineFrameInfo &MFI) const {
  564. if (Layout.empty())
  565. return;
  566. for (int I = 0, E = MFI.getObjectIndexEnd(); I != E; ++I) {
  567. if (MFI.isDeadObjectIndex(I))
  568. continue;
  569. const AllocaInst *AI = MFI.getObjectAllocation(I);
  570. if (!AI)
  571. continue;
  572. SSPLayoutMap::const_iterator LI = Layout.find(AI);
  573. if (LI == Layout.end())
  574. continue;
  575. MFI.setObjectSSPLayout(I, LI->second);
  576. }
  577. }