CodeMetrics.cpp 7.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210
  1. //===- CodeMetrics.cpp - Code cost measurements ---------------------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file implements code cost measurement utilities.
  10. //
  11. //===----------------------------------------------------------------------===//
  12. #include "llvm/Analysis/CodeMetrics.h"
  13. #include "llvm/ADT/SmallPtrSet.h"
  14. #include "llvm/Analysis/AssumptionCache.h"
  15. #include "llvm/Analysis/LoopInfo.h"
  16. #include "llvm/Analysis/TargetTransformInfo.h"
  17. #include "llvm/Analysis/ValueTracking.h"
  18. #include "llvm/IR/Function.h"
  19. #include "llvm/Support/Debug.h"
  20. #include "llvm/Support/InstructionCost.h"
  21. #define DEBUG_TYPE "code-metrics"
  22. using namespace llvm;
  23. static void
  24. appendSpeculatableOperands(const Value *V,
  25. SmallPtrSetImpl<const Value *> &Visited,
  26. SmallVectorImpl<const Value *> &Worklist) {
  27. const User *U = dyn_cast<User>(V);
  28. if (!U)
  29. return;
  30. for (const Value *Operand : U->operands())
  31. if (Visited.insert(Operand).second)
  32. if (const auto *I = dyn_cast<Instruction>(Operand))
  33. if (!I->mayHaveSideEffects() && !I->isTerminator())
  34. Worklist.push_back(I);
  35. }
  36. static void completeEphemeralValues(SmallPtrSetImpl<const Value *> &Visited,
  37. SmallVectorImpl<const Value *> &Worklist,
  38. SmallPtrSetImpl<const Value *> &EphValues) {
  39. // Note: We don't speculate PHIs here, so we'll miss instruction chains kept
  40. // alive only by ephemeral values.
  41. // Walk the worklist using an index but without caching the size so we can
  42. // append more entries as we process the worklist. This forms a queue without
  43. // quadratic behavior by just leaving processed nodes at the head of the
  44. // worklist forever.
  45. for (int i = 0; i < (int)Worklist.size(); ++i) {
  46. const Value *V = Worklist[i];
  47. assert(Visited.count(V) &&
  48. "Failed to add a worklist entry to our visited set!");
  49. // If all uses of this value are ephemeral, then so is this value.
  50. if (!all_of(V->users(), [&](const User *U) { return EphValues.count(U); }))
  51. continue;
  52. EphValues.insert(V);
  53. LLVM_DEBUG(dbgs() << "Ephemeral Value: " << *V << "\n");
  54. // Append any more operands to consider.
  55. appendSpeculatableOperands(V, Visited, Worklist);
  56. }
  57. }
  58. // Find all ephemeral values.
  59. void CodeMetrics::collectEphemeralValues(
  60. const Loop *L, AssumptionCache *AC,
  61. SmallPtrSetImpl<const Value *> &EphValues) {
  62. SmallPtrSet<const Value *, 32> Visited;
  63. SmallVector<const Value *, 16> Worklist;
  64. for (auto &AssumeVH : AC->assumptions()) {
  65. if (!AssumeVH)
  66. continue;
  67. Instruction *I = cast<Instruction>(AssumeVH);
  68. // Filter out call sites outside of the loop so we don't do a function's
  69. // worth of work for each of its loops (and, in the common case, ephemeral
  70. // values in the loop are likely due to @llvm.assume calls in the loop).
  71. if (!L->contains(I->getParent()))
  72. continue;
  73. if (EphValues.insert(I).second)
  74. appendSpeculatableOperands(I, Visited, Worklist);
  75. }
  76. completeEphemeralValues(Visited, Worklist, EphValues);
  77. }
  78. void CodeMetrics::collectEphemeralValues(
  79. const Function *F, AssumptionCache *AC,
  80. SmallPtrSetImpl<const Value *> &EphValues) {
  81. SmallPtrSet<const Value *, 32> Visited;
  82. SmallVector<const Value *, 16> Worklist;
  83. for (auto &AssumeVH : AC->assumptions()) {
  84. if (!AssumeVH)
  85. continue;
  86. Instruction *I = cast<Instruction>(AssumeVH);
  87. assert(I->getParent()->getParent() == F &&
  88. "Found assumption for the wrong function!");
  89. if (EphValues.insert(I).second)
  90. appendSpeculatableOperands(I, Visited, Worklist);
  91. }
  92. completeEphemeralValues(Visited, Worklist, EphValues);
  93. }
  94. /// Fill in the current structure with information gleaned from the specified
  95. /// block.
  96. void CodeMetrics::analyzeBasicBlock(
  97. const BasicBlock *BB, const TargetTransformInfo &TTI,
  98. const SmallPtrSetImpl<const Value *> &EphValues, bool PrepareForLTO) {
  99. ++NumBlocks;
  100. // Use a proxy variable for NumInsts of type InstructionCost, so that it can
  101. // use InstructionCost's arithmetic properties such as saturation when this
  102. // feature is added to InstructionCost.
  103. // When storing the value back to NumInsts, we can assume all costs are Valid
  104. // because the IR should not contain any nodes that cannot be costed. If that
  105. // happens the cost-model is broken.
  106. InstructionCost NumInstsProxy = NumInsts;
  107. InstructionCost NumInstsBeforeThisBB = NumInsts;
  108. for (const Instruction &I : *BB) {
  109. // Skip ephemeral values.
  110. if (EphValues.count(&I))
  111. continue;
  112. // Special handling for calls.
  113. if (const auto *Call = dyn_cast<CallBase>(&I)) {
  114. if (const Function *F = Call->getCalledFunction()) {
  115. bool IsLoweredToCall = TTI.isLoweredToCall(F);
  116. // If a function is both internal and has a single use, then it is
  117. // extremely likely to get inlined in the future (it was probably
  118. // exposed by an interleaved devirtualization pass).
  119. // When preparing for LTO, liberally consider calls as inline
  120. // candidates.
  121. if (!Call->isNoInline() && IsLoweredToCall &&
  122. ((F->hasInternalLinkage() && F->hasOneUse()) || PrepareForLTO)) {
  123. ++NumInlineCandidates;
  124. }
  125. // If this call is to function itself, then the function is recursive.
  126. // Inlining it into other functions is a bad idea, because this is
  127. // basically just a form of loop peeling, and our metrics aren't useful
  128. // for that case.
  129. if (F == BB->getParent())
  130. isRecursive = true;
  131. if (IsLoweredToCall)
  132. ++NumCalls;
  133. } else {
  134. // We don't want inline asm to count as a call - that would prevent loop
  135. // unrolling. The argument setup cost is still real, though.
  136. if (!Call->isInlineAsm())
  137. ++NumCalls;
  138. }
  139. }
  140. if (const AllocaInst *AI = dyn_cast<AllocaInst>(&I)) {
  141. if (!AI->isStaticAlloca())
  142. this->usesDynamicAlloca = true;
  143. }
  144. if (isa<ExtractElementInst>(I) || I.getType()->isVectorTy())
  145. ++NumVectorInsts;
  146. if (I.getType()->isTokenTy() && I.isUsedOutsideOfBlock(BB))
  147. notDuplicatable = true;
  148. if (const CallInst *CI = dyn_cast<CallInst>(&I)) {
  149. if (CI->cannotDuplicate())
  150. notDuplicatable = true;
  151. if (CI->isConvergent())
  152. convergent = true;
  153. }
  154. if (const InvokeInst *InvI = dyn_cast<InvokeInst>(&I))
  155. if (InvI->cannotDuplicate())
  156. notDuplicatable = true;
  157. NumInstsProxy += TTI.getUserCost(&I, TargetTransformInfo::TCK_CodeSize);
  158. NumInsts = *NumInstsProxy.getValue();
  159. }
  160. if (isa<ReturnInst>(BB->getTerminator()))
  161. ++NumRets;
  162. // We never want to inline functions that contain an indirectbr. This is
  163. // incorrect because all the blockaddress's (in static global initializers
  164. // for example) would be referring to the original function, and this indirect
  165. // jump would jump from the inlined copy of the function into the original
  166. // function which is extremely undefined behavior.
  167. // FIXME: This logic isn't really right; we can safely inline functions
  168. // with indirectbr's as long as no other function or global references the
  169. // blockaddress of a block within the current function. And as a QOI issue,
  170. // if someone is using a blockaddress without an indirectbr, and that
  171. // reference somehow ends up in another function or global, we probably
  172. // don't want to inline this function.
  173. notDuplicatable |= isa<IndirectBrInst>(BB->getTerminator());
  174. // Remember NumInsts for this BB.
  175. InstructionCost NumInstsThisBB = NumInstsProxy - NumInstsBeforeThisBB;
  176. NumBBInsts[BB] = *NumInstsThisBB.getValue();
  177. }