IntrinsicInst.cpp 28 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833
  1. //===-- IntrinsicInst.cpp - Intrinsic Instruction Wrappers ---------------===//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. //
  9. // This file implements methods that make it really easy to deal with intrinsic
  10. // functions.
  11. //
  12. // All intrinsic function calls are instances of the call instruction, so these
  13. // are all subclasses of the CallInst class. Note that none of these classes
  14. // has state or virtual methods, which is an important part of this gross/neat
  15. // hack working.
  16. //
  17. // In some cases, arguments to intrinsics need to be generic and are defined as
  18. // type pointer to empty struct { }*. To access the real item of interest the
  19. // cast instruction needs to be stripped away.
  20. //
  21. //===----------------------------------------------------------------------===//
  22. #include "llvm/IR/IntrinsicInst.h"
  23. #include "llvm/ADT/StringSwitch.h"
  24. #include "llvm/IR/Constants.h"
  25. #include "llvm/IR/DebugInfoMetadata.h"
  26. #include "llvm/IR/Metadata.h"
  27. #include "llvm/IR/Module.h"
  28. #include "llvm/IR/Operator.h"
  29. #include "llvm/IR/PatternMatch.h"
  30. #include "llvm/IR/Statepoint.h"
  31. #include <optional>
  32. using namespace llvm;
  33. bool IntrinsicInst::mayLowerToFunctionCall(Intrinsic::ID IID) {
  34. switch (IID) {
  35. case Intrinsic::objc_autorelease:
  36. case Intrinsic::objc_autoreleasePoolPop:
  37. case Intrinsic::objc_autoreleasePoolPush:
  38. case Intrinsic::objc_autoreleaseReturnValue:
  39. case Intrinsic::objc_copyWeak:
  40. case Intrinsic::objc_destroyWeak:
  41. case Intrinsic::objc_initWeak:
  42. case Intrinsic::objc_loadWeak:
  43. case Intrinsic::objc_loadWeakRetained:
  44. case Intrinsic::objc_moveWeak:
  45. case Intrinsic::objc_release:
  46. case Intrinsic::objc_retain:
  47. case Intrinsic::objc_retainAutorelease:
  48. case Intrinsic::objc_retainAutoreleaseReturnValue:
  49. case Intrinsic::objc_retainAutoreleasedReturnValue:
  50. case Intrinsic::objc_retainBlock:
  51. case Intrinsic::objc_storeStrong:
  52. case Intrinsic::objc_storeWeak:
  53. case Intrinsic::objc_unsafeClaimAutoreleasedReturnValue:
  54. case Intrinsic::objc_retainedObject:
  55. case Intrinsic::objc_unretainedObject:
  56. case Intrinsic::objc_unretainedPointer:
  57. case Intrinsic::objc_retain_autorelease:
  58. case Intrinsic::objc_sync_enter:
  59. case Intrinsic::objc_sync_exit:
  60. return true;
  61. default:
  62. return false;
  63. }
  64. }
  65. //===----------------------------------------------------------------------===//
  66. /// DbgVariableIntrinsic - This is the common base class for debug info
  67. /// intrinsics for variables.
  68. ///
  69. iterator_range<DbgVariableIntrinsic::location_op_iterator>
  70. DbgVariableIntrinsic::location_ops() const {
  71. auto *MD = getRawLocation();
  72. assert(MD && "First operand of DbgVariableIntrinsic should be non-null.");
  73. // If operand is ValueAsMetadata, return a range over just that operand.
  74. if (auto *VAM = dyn_cast<ValueAsMetadata>(MD)) {
  75. return {location_op_iterator(VAM), location_op_iterator(VAM + 1)};
  76. }
  77. // If operand is DIArgList, return a range over its args.
  78. if (auto *AL = dyn_cast<DIArgList>(MD))
  79. return {location_op_iterator(AL->args_begin()),
  80. location_op_iterator(AL->args_end())};
  81. // Operand must be an empty metadata tuple, so return empty iterator.
  82. return {location_op_iterator(static_cast<ValueAsMetadata *>(nullptr)),
  83. location_op_iterator(static_cast<ValueAsMetadata *>(nullptr))};
  84. }
  85. Value *DbgVariableIntrinsic::getVariableLocationOp(unsigned OpIdx) const {
  86. auto *MD = getRawLocation();
  87. assert(MD && "First operand of DbgVariableIntrinsic should be non-null.");
  88. if (auto *AL = dyn_cast<DIArgList>(MD))
  89. return AL->getArgs()[OpIdx]->getValue();
  90. if (isa<MDNode>(MD))
  91. return nullptr;
  92. assert(
  93. isa<ValueAsMetadata>(MD) &&
  94. "Attempted to get location operand from DbgVariableIntrinsic with none.");
  95. auto *V = cast<ValueAsMetadata>(MD);
  96. assert(OpIdx == 0 && "Operand Index must be 0 for a debug intrinsic with a "
  97. "single location operand.");
  98. return V->getValue();
  99. }
  100. static ValueAsMetadata *getAsMetadata(Value *V) {
  101. return isa<MetadataAsValue>(V) ? dyn_cast<ValueAsMetadata>(
  102. cast<MetadataAsValue>(V)->getMetadata())
  103. : ValueAsMetadata::get(V);
  104. }
  105. void DbgVariableIntrinsic::replaceVariableLocationOp(Value *OldValue,
  106. Value *NewValue) {
  107. // If OldValue is used as the address part of a dbg.assign intrinsic replace
  108. // it with NewValue and return true.
  109. auto ReplaceDbgAssignAddress = [this, OldValue, NewValue]() -> bool {
  110. auto *DAI = dyn_cast<DbgAssignIntrinsic>(this);
  111. if (!DAI || OldValue != DAI->getAddress())
  112. return false;
  113. DAI->setAddress(NewValue);
  114. return true;
  115. };
  116. bool DbgAssignAddrReplaced = ReplaceDbgAssignAddress();
  117. (void)DbgAssignAddrReplaced;
  118. assert(NewValue && "Values must be non-null");
  119. auto Locations = location_ops();
  120. auto OldIt = find(Locations, OldValue);
  121. assert((OldIt != Locations.end() || DbgAssignAddrReplaced) &&
  122. "OldValue must be a current location");
  123. if (!hasArgList()) {
  124. // Additional check necessary to avoid unconditionally replacing this
  125. // operand when a dbg.assign address is replaced (DbgAssignAddrReplaced is
  126. // true).
  127. if (OldValue != getVariableLocationOp(0))
  128. return;
  129. Value *NewOperand = isa<MetadataAsValue>(NewValue)
  130. ? NewValue
  131. : MetadataAsValue::get(
  132. getContext(), ValueAsMetadata::get(NewValue));
  133. return setArgOperand(0, NewOperand);
  134. }
  135. SmallVector<ValueAsMetadata *, 4> MDs;
  136. ValueAsMetadata *NewOperand = getAsMetadata(NewValue);
  137. for (auto *VMD : Locations)
  138. MDs.push_back(VMD == *OldIt ? NewOperand : getAsMetadata(VMD));
  139. setArgOperand(
  140. 0, MetadataAsValue::get(getContext(), DIArgList::get(getContext(), MDs)));
  141. }
  142. void DbgVariableIntrinsic::replaceVariableLocationOp(unsigned OpIdx,
  143. Value *NewValue) {
  144. assert(OpIdx < getNumVariableLocationOps() && "Invalid Operand Index");
  145. if (!hasArgList()) {
  146. Value *NewOperand = isa<MetadataAsValue>(NewValue)
  147. ? NewValue
  148. : MetadataAsValue::get(
  149. getContext(), ValueAsMetadata::get(NewValue));
  150. return setArgOperand(0, NewOperand);
  151. }
  152. SmallVector<ValueAsMetadata *, 4> MDs;
  153. ValueAsMetadata *NewOperand = getAsMetadata(NewValue);
  154. for (unsigned Idx = 0; Idx < getNumVariableLocationOps(); ++Idx)
  155. MDs.push_back(Idx == OpIdx ? NewOperand
  156. : getAsMetadata(getVariableLocationOp(Idx)));
  157. setArgOperand(
  158. 0, MetadataAsValue::get(getContext(), DIArgList::get(getContext(), MDs)));
  159. }
  160. void DbgVariableIntrinsic::addVariableLocationOps(ArrayRef<Value *> NewValues,
  161. DIExpression *NewExpr) {
  162. assert(NewExpr->hasAllLocationOps(getNumVariableLocationOps() +
  163. NewValues.size()) &&
  164. "NewExpr for debug variable intrinsic does not reference every "
  165. "location operand.");
  166. assert(!is_contained(NewValues, nullptr) && "New values must be non-null");
  167. setArgOperand(2, MetadataAsValue::get(getContext(), NewExpr));
  168. SmallVector<ValueAsMetadata *, 4> MDs;
  169. for (auto *VMD : location_ops())
  170. MDs.push_back(getAsMetadata(VMD));
  171. for (auto *VMD : NewValues)
  172. MDs.push_back(getAsMetadata(VMD));
  173. setArgOperand(
  174. 0, MetadataAsValue::get(getContext(), DIArgList::get(getContext(), MDs)));
  175. }
  176. std::optional<uint64_t> DbgVariableIntrinsic::getFragmentSizeInBits() const {
  177. if (auto Fragment = getExpression()->getFragmentInfo())
  178. return Fragment->SizeInBits;
  179. return getVariable()->getSizeInBits();
  180. }
  181. Value *DbgAssignIntrinsic::getAddress() const {
  182. auto *MD = getRawAddress();
  183. if (auto *V = dyn_cast<ValueAsMetadata>(MD))
  184. return V->getValue();
  185. // When the value goes to null, it gets replaced by an empty MDNode.
  186. assert(!cast<MDNode>(MD)->getNumOperands() && "Expected an empty MDNode");
  187. return nullptr;
  188. }
  189. void DbgAssignIntrinsic::setAssignId(DIAssignID *New) {
  190. setOperand(OpAssignID, MetadataAsValue::get(getContext(), New));
  191. }
  192. void DbgAssignIntrinsic::setAddress(Value *V) {
  193. assert(V->getType()->isPointerTy() &&
  194. "Destination Component must be a pointer type");
  195. setOperand(OpAddress,
  196. MetadataAsValue::get(getContext(), ValueAsMetadata::get(V)));
  197. }
  198. void DbgAssignIntrinsic::setKillAddress() {
  199. if (isKillAddress())
  200. return;
  201. setAddress(UndefValue::get(getAddress()->getType()));
  202. }
  203. bool DbgAssignIntrinsic::isKillAddress() const {
  204. Value *Addr = getAddress();
  205. return !Addr || isa<UndefValue>(Addr);
  206. }
  207. void DbgAssignIntrinsic::setValue(Value *V) {
  208. setOperand(OpValue,
  209. MetadataAsValue::get(getContext(), ValueAsMetadata::get(V)));
  210. }
  211. int llvm::Intrinsic::lookupLLVMIntrinsicByName(ArrayRef<const char *> NameTable,
  212. StringRef Name) {
  213. assert(Name.startswith("llvm."));
  214. // Do successive binary searches of the dotted name components. For
  215. // "llvm.gc.experimental.statepoint.p1i8.p1i32", we will find the range of
  216. // intrinsics starting with "llvm.gc", then "llvm.gc.experimental", then
  217. // "llvm.gc.experimental.statepoint", and then we will stop as the range is
  218. // size 1. During the search, we can skip the prefix that we already know is
  219. // identical. By using strncmp we consider names with differing suffixes to
  220. // be part of the equal range.
  221. size_t CmpEnd = 4; // Skip the "llvm" component.
  222. const char *const *Low = NameTable.begin();
  223. const char *const *High = NameTable.end();
  224. const char *const *LastLow = Low;
  225. while (CmpEnd < Name.size() && High - Low > 0) {
  226. size_t CmpStart = CmpEnd;
  227. CmpEnd = Name.find('.', CmpStart + 1);
  228. CmpEnd = CmpEnd == StringRef::npos ? Name.size() : CmpEnd;
  229. auto Cmp = [CmpStart, CmpEnd](const char *LHS, const char *RHS) {
  230. return strncmp(LHS + CmpStart, RHS + CmpStart, CmpEnd - CmpStart) < 0;
  231. };
  232. LastLow = Low;
  233. std::tie(Low, High) = std::equal_range(Low, High, Name.data(), Cmp);
  234. }
  235. if (High - Low > 0)
  236. LastLow = Low;
  237. if (LastLow == NameTable.end())
  238. return -1;
  239. StringRef NameFound = *LastLow;
  240. if (Name == NameFound ||
  241. (Name.startswith(NameFound) && Name[NameFound.size()] == '.'))
  242. return LastLow - NameTable.begin();
  243. return -1;
  244. }
  245. ConstantInt *InstrProfInstBase::getNumCounters() const {
  246. if (InstrProfValueProfileInst::classof(this))
  247. llvm_unreachable("InstrProfValueProfileInst does not have counters!");
  248. return cast<ConstantInt>(const_cast<Value *>(getArgOperand(2)));
  249. }
  250. ConstantInt *InstrProfInstBase::getIndex() const {
  251. if (InstrProfValueProfileInst::classof(this))
  252. llvm_unreachable("Please use InstrProfValueProfileInst::getIndex()");
  253. return cast<ConstantInt>(const_cast<Value *>(getArgOperand(3)));
  254. }
  255. Value *InstrProfIncrementInst::getStep() const {
  256. if (InstrProfIncrementInstStep::classof(this)) {
  257. return const_cast<Value *>(getArgOperand(4));
  258. }
  259. const Module *M = getModule();
  260. LLVMContext &Context = M->getContext();
  261. return ConstantInt::get(Type::getInt64Ty(Context), 1);
  262. }
  263. std::optional<RoundingMode> ConstrainedFPIntrinsic::getRoundingMode() const {
  264. unsigned NumOperands = arg_size();
  265. Metadata *MD = nullptr;
  266. auto *MAV = dyn_cast<MetadataAsValue>(getArgOperand(NumOperands - 2));
  267. if (MAV)
  268. MD = MAV->getMetadata();
  269. if (!MD || !isa<MDString>(MD))
  270. return std::nullopt;
  271. return convertStrToRoundingMode(cast<MDString>(MD)->getString());
  272. }
  273. std::optional<fp::ExceptionBehavior>
  274. ConstrainedFPIntrinsic::getExceptionBehavior() const {
  275. unsigned NumOperands = arg_size();
  276. Metadata *MD = nullptr;
  277. auto *MAV = dyn_cast<MetadataAsValue>(getArgOperand(NumOperands - 1));
  278. if (MAV)
  279. MD = MAV->getMetadata();
  280. if (!MD || !isa<MDString>(MD))
  281. return std::nullopt;
  282. return convertStrToExceptionBehavior(cast<MDString>(MD)->getString());
  283. }
  284. bool ConstrainedFPIntrinsic::isDefaultFPEnvironment() const {
  285. std::optional<fp::ExceptionBehavior> Except = getExceptionBehavior();
  286. if (Except) {
  287. if (*Except != fp::ebIgnore)
  288. return false;
  289. }
  290. std::optional<RoundingMode> Rounding = getRoundingMode();
  291. if (Rounding) {
  292. if (*Rounding != RoundingMode::NearestTiesToEven)
  293. return false;
  294. }
  295. return true;
  296. }
  297. static FCmpInst::Predicate getFPPredicateFromMD(const Value *Op) {
  298. Metadata *MD = cast<MetadataAsValue>(Op)->getMetadata();
  299. if (!MD || !isa<MDString>(MD))
  300. return FCmpInst::BAD_FCMP_PREDICATE;
  301. return StringSwitch<FCmpInst::Predicate>(cast<MDString>(MD)->getString())
  302. .Case("oeq", FCmpInst::FCMP_OEQ)
  303. .Case("ogt", FCmpInst::FCMP_OGT)
  304. .Case("oge", FCmpInst::FCMP_OGE)
  305. .Case("olt", FCmpInst::FCMP_OLT)
  306. .Case("ole", FCmpInst::FCMP_OLE)
  307. .Case("one", FCmpInst::FCMP_ONE)
  308. .Case("ord", FCmpInst::FCMP_ORD)
  309. .Case("uno", FCmpInst::FCMP_UNO)
  310. .Case("ueq", FCmpInst::FCMP_UEQ)
  311. .Case("ugt", FCmpInst::FCMP_UGT)
  312. .Case("uge", FCmpInst::FCMP_UGE)
  313. .Case("ult", FCmpInst::FCMP_ULT)
  314. .Case("ule", FCmpInst::FCMP_ULE)
  315. .Case("une", FCmpInst::FCMP_UNE)
  316. .Default(FCmpInst::BAD_FCMP_PREDICATE);
  317. }
  318. FCmpInst::Predicate ConstrainedFPCmpIntrinsic::getPredicate() const {
  319. return getFPPredicateFromMD(getArgOperand(2));
  320. }
  321. bool ConstrainedFPIntrinsic::isUnaryOp() const {
  322. switch (getIntrinsicID()) {
  323. default:
  324. return false;
  325. #define INSTRUCTION(NAME, NARG, ROUND_MODE, INTRINSIC) \
  326. case Intrinsic::INTRINSIC: \
  327. return NARG == 1;
  328. #include "llvm/IR/ConstrainedOps.def"
  329. }
  330. }
  331. bool ConstrainedFPIntrinsic::isTernaryOp() const {
  332. switch (getIntrinsicID()) {
  333. default:
  334. return false;
  335. #define INSTRUCTION(NAME, NARG, ROUND_MODE, INTRINSIC) \
  336. case Intrinsic::INTRINSIC: \
  337. return NARG == 3;
  338. #include "llvm/IR/ConstrainedOps.def"
  339. }
  340. }
  341. bool ConstrainedFPIntrinsic::classof(const IntrinsicInst *I) {
  342. switch (I->getIntrinsicID()) {
  343. #define INSTRUCTION(NAME, NARGS, ROUND_MODE, INTRINSIC) \
  344. case Intrinsic::INTRINSIC:
  345. #include "llvm/IR/ConstrainedOps.def"
  346. return true;
  347. default:
  348. return false;
  349. }
  350. }
  351. ElementCount VPIntrinsic::getStaticVectorLength() const {
  352. auto GetVectorLengthOfType = [](const Type *T) -> ElementCount {
  353. const auto *VT = cast<VectorType>(T);
  354. auto ElemCount = VT->getElementCount();
  355. return ElemCount;
  356. };
  357. Value *VPMask = getMaskParam();
  358. if (!VPMask) {
  359. assert((getIntrinsicID() == Intrinsic::vp_merge ||
  360. getIntrinsicID() == Intrinsic::vp_select) &&
  361. "Unexpected VP intrinsic without mask operand");
  362. return GetVectorLengthOfType(getType());
  363. }
  364. return GetVectorLengthOfType(VPMask->getType());
  365. }
  366. Value *VPIntrinsic::getMaskParam() const {
  367. if (auto MaskPos = getMaskParamPos(getIntrinsicID()))
  368. return getArgOperand(*MaskPos);
  369. return nullptr;
  370. }
  371. void VPIntrinsic::setMaskParam(Value *NewMask) {
  372. auto MaskPos = getMaskParamPos(getIntrinsicID());
  373. setArgOperand(*MaskPos, NewMask);
  374. }
  375. Value *VPIntrinsic::getVectorLengthParam() const {
  376. if (auto EVLPos = getVectorLengthParamPos(getIntrinsicID()))
  377. return getArgOperand(*EVLPos);
  378. return nullptr;
  379. }
  380. void VPIntrinsic::setVectorLengthParam(Value *NewEVL) {
  381. auto EVLPos = getVectorLengthParamPos(getIntrinsicID());
  382. setArgOperand(*EVLPos, NewEVL);
  383. }
  384. std::optional<unsigned>
  385. VPIntrinsic::getMaskParamPos(Intrinsic::ID IntrinsicID) {
  386. switch (IntrinsicID) {
  387. default:
  388. return std::nullopt;
  389. #define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS) \
  390. case Intrinsic::VPID: \
  391. return MASKPOS;
  392. #include "llvm/IR/VPIntrinsics.def"
  393. }
  394. }
  395. std::optional<unsigned>
  396. VPIntrinsic::getVectorLengthParamPos(Intrinsic::ID IntrinsicID) {
  397. switch (IntrinsicID) {
  398. default:
  399. return std::nullopt;
  400. #define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS) \
  401. case Intrinsic::VPID: \
  402. return VLENPOS;
  403. #include "llvm/IR/VPIntrinsics.def"
  404. }
  405. }
  406. /// \return the alignment of the pointer used by this load/store/gather or
  407. /// scatter.
  408. MaybeAlign VPIntrinsic::getPointerAlignment() const {
  409. std::optional<unsigned> PtrParamOpt =
  410. getMemoryPointerParamPos(getIntrinsicID());
  411. assert(PtrParamOpt && "no pointer argument!");
  412. return getParamAlign(*PtrParamOpt);
  413. }
  414. /// \return The pointer operand of this load,store, gather or scatter.
  415. Value *VPIntrinsic::getMemoryPointerParam() const {
  416. if (auto PtrParamOpt = getMemoryPointerParamPos(getIntrinsicID()))
  417. return getArgOperand(*PtrParamOpt);
  418. return nullptr;
  419. }
  420. std::optional<unsigned>
  421. VPIntrinsic::getMemoryPointerParamPos(Intrinsic::ID VPID) {
  422. switch (VPID) {
  423. default:
  424. break;
  425. #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
  426. #define VP_PROPERTY_MEMOP(POINTERPOS, ...) return POINTERPOS;
  427. #define END_REGISTER_VP_INTRINSIC(VPID) break;
  428. #include "llvm/IR/VPIntrinsics.def"
  429. }
  430. return std::nullopt;
  431. }
  432. /// \return The data (payload) operand of this store or scatter.
  433. Value *VPIntrinsic::getMemoryDataParam() const {
  434. auto DataParamOpt = getMemoryDataParamPos(getIntrinsicID());
  435. if (!DataParamOpt)
  436. return nullptr;
  437. return getArgOperand(*DataParamOpt);
  438. }
  439. std::optional<unsigned> VPIntrinsic::getMemoryDataParamPos(Intrinsic::ID VPID) {
  440. switch (VPID) {
  441. default:
  442. break;
  443. #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
  444. #define VP_PROPERTY_MEMOP(POINTERPOS, DATAPOS) return DATAPOS;
  445. #define END_REGISTER_VP_INTRINSIC(VPID) break;
  446. #include "llvm/IR/VPIntrinsics.def"
  447. }
  448. return std::nullopt;
  449. }
  450. bool VPIntrinsic::isVPIntrinsic(Intrinsic::ID ID) {
  451. switch (ID) {
  452. default:
  453. break;
  454. #define BEGIN_REGISTER_VP_INTRINSIC(VPID, MASKPOS, VLENPOS) \
  455. case Intrinsic::VPID: \
  456. return true;
  457. #include "llvm/IR/VPIntrinsics.def"
  458. }
  459. return false;
  460. }
  461. // Equivalent non-predicated opcode
  462. std::optional<unsigned>
  463. VPIntrinsic::getFunctionalOpcodeForVP(Intrinsic::ID ID) {
  464. switch (ID) {
  465. default:
  466. break;
  467. #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
  468. #define VP_PROPERTY_FUNCTIONAL_OPC(OPC) return Instruction::OPC;
  469. #define END_REGISTER_VP_INTRINSIC(VPID) break;
  470. #include "llvm/IR/VPIntrinsics.def"
  471. }
  472. return std::nullopt;
  473. }
  474. Intrinsic::ID VPIntrinsic::getForOpcode(unsigned IROPC) {
  475. switch (IROPC) {
  476. default:
  477. break;
  478. #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) break;
  479. #define VP_PROPERTY_FUNCTIONAL_OPC(OPC) case Instruction::OPC:
  480. #define END_REGISTER_VP_INTRINSIC(VPID) return Intrinsic::VPID;
  481. #include "llvm/IR/VPIntrinsics.def"
  482. }
  483. return Intrinsic::not_intrinsic;
  484. }
  485. bool VPIntrinsic::canIgnoreVectorLengthParam() const {
  486. using namespace PatternMatch;
  487. ElementCount EC = getStaticVectorLength();
  488. // No vlen param - no lanes masked-off by it.
  489. auto *VLParam = getVectorLengthParam();
  490. if (!VLParam)
  491. return true;
  492. // Note that the VP intrinsic causes undefined behavior if the Explicit Vector
  493. // Length parameter is strictly greater-than the number of vector elements of
  494. // the operation. This function returns true when this is detected statically
  495. // in the IR.
  496. // Check whether "W == vscale * EC.getKnownMinValue()"
  497. if (EC.isScalable()) {
  498. // Undig the DL
  499. const auto *ParMod = this->getModule();
  500. if (!ParMod)
  501. return false;
  502. const auto &DL = ParMod->getDataLayout();
  503. // Compare vscale patterns
  504. uint64_t VScaleFactor;
  505. if (match(VLParam, m_c_Mul(m_ConstantInt(VScaleFactor), m_VScale(DL))))
  506. return VScaleFactor >= EC.getKnownMinValue();
  507. return (EC.getKnownMinValue() == 1) && match(VLParam, m_VScale(DL));
  508. }
  509. // standard SIMD operation
  510. const auto *VLConst = dyn_cast<ConstantInt>(VLParam);
  511. if (!VLConst)
  512. return false;
  513. uint64_t VLNum = VLConst->getZExtValue();
  514. if (VLNum >= EC.getKnownMinValue())
  515. return true;
  516. return false;
  517. }
  518. Function *VPIntrinsic::getDeclarationForParams(Module *M, Intrinsic::ID VPID,
  519. Type *ReturnType,
  520. ArrayRef<Value *> Params) {
  521. assert(isVPIntrinsic(VPID) && "not a VP intrinsic");
  522. Function *VPFunc;
  523. switch (VPID) {
  524. default: {
  525. Type *OverloadTy = Params[0]->getType();
  526. if (VPReductionIntrinsic::isVPReduction(VPID))
  527. OverloadTy =
  528. Params[*VPReductionIntrinsic::getVectorParamPos(VPID)]->getType();
  529. VPFunc = Intrinsic::getDeclaration(M, VPID, OverloadTy);
  530. break;
  531. }
  532. case Intrinsic::vp_trunc:
  533. case Intrinsic::vp_sext:
  534. case Intrinsic::vp_zext:
  535. case Intrinsic::vp_fptoui:
  536. case Intrinsic::vp_fptosi:
  537. case Intrinsic::vp_uitofp:
  538. case Intrinsic::vp_sitofp:
  539. case Intrinsic::vp_fptrunc:
  540. case Intrinsic::vp_fpext:
  541. case Intrinsic::vp_ptrtoint:
  542. case Intrinsic::vp_inttoptr:
  543. VPFunc =
  544. Intrinsic::getDeclaration(M, VPID, {ReturnType, Params[0]->getType()});
  545. break;
  546. case Intrinsic::vp_merge:
  547. case Intrinsic::vp_select:
  548. VPFunc = Intrinsic::getDeclaration(M, VPID, {Params[1]->getType()});
  549. break;
  550. case Intrinsic::vp_load:
  551. VPFunc = Intrinsic::getDeclaration(
  552. M, VPID, {ReturnType, Params[0]->getType()});
  553. break;
  554. case Intrinsic::experimental_vp_strided_load:
  555. VPFunc = Intrinsic::getDeclaration(
  556. M, VPID, {ReturnType, Params[0]->getType(), Params[1]->getType()});
  557. break;
  558. case Intrinsic::vp_gather:
  559. VPFunc = Intrinsic::getDeclaration(
  560. M, VPID, {ReturnType, Params[0]->getType()});
  561. break;
  562. case Intrinsic::vp_store:
  563. VPFunc = Intrinsic::getDeclaration(
  564. M, VPID, {Params[0]->getType(), Params[1]->getType()});
  565. break;
  566. case Intrinsic::experimental_vp_strided_store:
  567. VPFunc = Intrinsic::getDeclaration(
  568. M, VPID,
  569. {Params[0]->getType(), Params[1]->getType(), Params[2]->getType()});
  570. break;
  571. case Intrinsic::vp_scatter:
  572. VPFunc = Intrinsic::getDeclaration(
  573. M, VPID, {Params[0]->getType(), Params[1]->getType()});
  574. break;
  575. }
  576. assert(VPFunc && "Could not declare VP intrinsic");
  577. return VPFunc;
  578. }
  579. bool VPReductionIntrinsic::isVPReduction(Intrinsic::ID ID) {
  580. switch (ID) {
  581. default:
  582. break;
  583. #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
  584. #define VP_PROPERTY_REDUCTION(STARTPOS, ...) return true;
  585. #define END_REGISTER_VP_INTRINSIC(VPID) break;
  586. #include "llvm/IR/VPIntrinsics.def"
  587. }
  588. return false;
  589. }
  590. bool VPCastIntrinsic::isVPCast(Intrinsic::ID ID) {
  591. switch (ID) {
  592. default:
  593. break;
  594. #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
  595. #define VP_PROPERTY_CASTOP return true;
  596. #define END_REGISTER_VP_INTRINSIC(VPID) break;
  597. #include "llvm/IR/VPIntrinsics.def"
  598. }
  599. return false;
  600. }
  601. bool VPCmpIntrinsic::isVPCmp(Intrinsic::ID ID) {
  602. switch (ID) {
  603. default:
  604. break;
  605. #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
  606. #define VP_PROPERTY_CMP(CCPOS, ...) return true;
  607. #define END_REGISTER_VP_INTRINSIC(VPID) break;
  608. #include "llvm/IR/VPIntrinsics.def"
  609. }
  610. return false;
  611. }
  612. static ICmpInst::Predicate getIntPredicateFromMD(const Value *Op) {
  613. Metadata *MD = cast<MetadataAsValue>(Op)->getMetadata();
  614. if (!MD || !isa<MDString>(MD))
  615. return ICmpInst::BAD_ICMP_PREDICATE;
  616. return StringSwitch<ICmpInst::Predicate>(cast<MDString>(MD)->getString())
  617. .Case("eq", ICmpInst::ICMP_EQ)
  618. .Case("ne", ICmpInst::ICMP_NE)
  619. .Case("ugt", ICmpInst::ICMP_UGT)
  620. .Case("uge", ICmpInst::ICMP_UGE)
  621. .Case("ult", ICmpInst::ICMP_ULT)
  622. .Case("ule", ICmpInst::ICMP_ULE)
  623. .Case("sgt", ICmpInst::ICMP_SGT)
  624. .Case("sge", ICmpInst::ICMP_SGE)
  625. .Case("slt", ICmpInst::ICMP_SLT)
  626. .Case("sle", ICmpInst::ICMP_SLE)
  627. .Default(ICmpInst::BAD_ICMP_PREDICATE);
  628. }
  629. CmpInst::Predicate VPCmpIntrinsic::getPredicate() const {
  630. bool IsFP = true;
  631. std::optional<unsigned> CCArgIdx;
  632. switch (getIntrinsicID()) {
  633. default:
  634. break;
  635. #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
  636. #define VP_PROPERTY_CMP(CCPOS, ISFP) \
  637. CCArgIdx = CCPOS; \
  638. IsFP = ISFP; \
  639. break;
  640. #define END_REGISTER_VP_INTRINSIC(VPID) break;
  641. #include "llvm/IR/VPIntrinsics.def"
  642. }
  643. assert(CCArgIdx && "Unexpected vector-predicated comparison");
  644. return IsFP ? getFPPredicateFromMD(getArgOperand(*CCArgIdx))
  645. : getIntPredicateFromMD(getArgOperand(*CCArgIdx));
  646. }
  647. unsigned VPReductionIntrinsic::getVectorParamPos() const {
  648. return *VPReductionIntrinsic::getVectorParamPos(getIntrinsicID());
  649. }
  650. unsigned VPReductionIntrinsic::getStartParamPos() const {
  651. return *VPReductionIntrinsic::getStartParamPos(getIntrinsicID());
  652. }
  653. std::optional<unsigned>
  654. VPReductionIntrinsic::getVectorParamPos(Intrinsic::ID ID) {
  655. switch (ID) {
  656. #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
  657. #define VP_PROPERTY_REDUCTION(STARTPOS, VECTORPOS) return VECTORPOS;
  658. #define END_REGISTER_VP_INTRINSIC(VPID) break;
  659. #include "llvm/IR/VPIntrinsics.def"
  660. default:
  661. break;
  662. }
  663. return std::nullopt;
  664. }
  665. std::optional<unsigned>
  666. VPReductionIntrinsic::getStartParamPos(Intrinsic::ID ID) {
  667. switch (ID) {
  668. #define BEGIN_REGISTER_VP_INTRINSIC(VPID, ...) case Intrinsic::VPID:
  669. #define VP_PROPERTY_REDUCTION(STARTPOS, VECTORPOS) return STARTPOS;
  670. #define END_REGISTER_VP_INTRINSIC(VPID) break;
  671. #include "llvm/IR/VPIntrinsics.def"
  672. default:
  673. break;
  674. }
  675. return std::nullopt;
  676. }
  677. Instruction::BinaryOps BinaryOpIntrinsic::getBinaryOp() const {
  678. switch (getIntrinsicID()) {
  679. case Intrinsic::uadd_with_overflow:
  680. case Intrinsic::sadd_with_overflow:
  681. case Intrinsic::uadd_sat:
  682. case Intrinsic::sadd_sat:
  683. return Instruction::Add;
  684. case Intrinsic::usub_with_overflow:
  685. case Intrinsic::ssub_with_overflow:
  686. case Intrinsic::usub_sat:
  687. case Intrinsic::ssub_sat:
  688. return Instruction::Sub;
  689. case Intrinsic::umul_with_overflow:
  690. case Intrinsic::smul_with_overflow:
  691. return Instruction::Mul;
  692. default:
  693. llvm_unreachable("Invalid intrinsic");
  694. }
  695. }
  696. bool BinaryOpIntrinsic::isSigned() const {
  697. switch (getIntrinsicID()) {
  698. case Intrinsic::sadd_with_overflow:
  699. case Intrinsic::ssub_with_overflow:
  700. case Intrinsic::smul_with_overflow:
  701. case Intrinsic::sadd_sat:
  702. case Intrinsic::ssub_sat:
  703. return true;
  704. default:
  705. return false;
  706. }
  707. }
  708. unsigned BinaryOpIntrinsic::getNoWrapKind() const {
  709. if (isSigned())
  710. return OverflowingBinaryOperator::NoSignedWrap;
  711. else
  712. return OverflowingBinaryOperator::NoUnsignedWrap;
  713. }
  714. const Value *GCProjectionInst::getStatepoint() const {
  715. const Value *Token = getArgOperand(0);
  716. if (isa<UndefValue>(Token))
  717. return Token;
  718. // This takes care both of relocates for call statepoints and relocates
  719. // on normal path of invoke statepoint.
  720. if (!isa<LandingPadInst>(Token))
  721. return cast<GCStatepointInst>(Token);
  722. // This relocate is on exceptional path of an invoke statepoint
  723. const BasicBlock *InvokeBB =
  724. cast<Instruction>(Token)->getParent()->getUniquePredecessor();
  725. assert(InvokeBB && "safepoints should have unique landingpads");
  726. assert(InvokeBB->getTerminator() &&
  727. "safepoint block should be well formed");
  728. return cast<GCStatepointInst>(InvokeBB->getTerminator());
  729. }
  730. Value *GCRelocateInst::getBasePtr() const {
  731. auto Statepoint = getStatepoint();
  732. if (isa<UndefValue>(Statepoint))
  733. return UndefValue::get(Statepoint->getType());
  734. auto *GCInst = cast<GCStatepointInst>(Statepoint);
  735. if (auto Opt = GCInst->getOperandBundle(LLVMContext::OB_gc_live))
  736. return *(Opt->Inputs.begin() + getBasePtrIndex());
  737. return *(GCInst->arg_begin() + getBasePtrIndex());
  738. }
  739. Value *GCRelocateInst::getDerivedPtr() const {
  740. auto *Statepoint = getStatepoint();
  741. if (isa<UndefValue>(Statepoint))
  742. return UndefValue::get(Statepoint->getType());
  743. auto *GCInst = cast<GCStatepointInst>(Statepoint);
  744. if (auto Opt = GCInst->getOperandBundle(LLVMContext::OB_gc_live))
  745. return *(Opt->Inputs.begin() + getDerivedPtrIndex());
  746. return *(GCInst->arg_begin() + getDerivedPtrIndex());
  747. }