MachineIRBuilder.cpp 50 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276
  1. //===-- llvm/CodeGen/GlobalISel/MachineIRBuilder.cpp - MIBuilder--*- C++ -*-==//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. /// \file
  9. /// This file implements the MachineIRBuidler class.
  10. //===----------------------------------------------------------------------===//
  11. #include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h"
  12. #include "llvm/Analysis/MemoryLocation.h"
  13. #include "llvm/CodeGen/GlobalISel/GISelChangeObserver.h"
  14. #include "llvm/CodeGen/MachineFunction.h"
  15. #include "llvm/CodeGen/MachineInstr.h"
  16. #include "llvm/CodeGen/MachineInstrBuilder.h"
  17. #include "llvm/CodeGen/MachineRegisterInfo.h"
  18. #include "llvm/CodeGen/TargetInstrInfo.h"
  19. #include "llvm/CodeGen/TargetLowering.h"
  20. #include "llvm/CodeGen/TargetOpcodes.h"
  21. #include "llvm/CodeGen/TargetSubtargetInfo.h"
  22. #include "llvm/IR/DebugInfo.h"
  23. using namespace llvm;
  24. void MachineIRBuilder::setMF(MachineFunction &MF) {
  25. State.MF = &MF;
  26. State.MBB = nullptr;
  27. State.MRI = &MF.getRegInfo();
  28. State.TII = MF.getSubtarget().getInstrInfo();
  29. State.DL = DebugLoc();
  30. State.II = MachineBasicBlock::iterator();
  31. State.Observer = nullptr;
  32. }
  33. //------------------------------------------------------------------------------
  34. // Build instruction variants.
  35. //------------------------------------------------------------------------------
  36. MachineInstrBuilder MachineIRBuilder::buildInstrNoInsert(unsigned Opcode) {
  37. MachineInstrBuilder MIB = BuildMI(getMF(), getDL(), getTII().get(Opcode));
  38. return MIB;
  39. }
  40. MachineInstrBuilder MachineIRBuilder::insertInstr(MachineInstrBuilder MIB) {
  41. getMBB().insert(getInsertPt(), MIB);
  42. recordInsertion(MIB);
  43. return MIB;
  44. }
  45. MachineInstrBuilder
  46. MachineIRBuilder::buildDirectDbgValue(Register Reg, const MDNode *Variable,
  47. const MDNode *Expr) {
  48. assert(isa<DILocalVariable>(Variable) && "not a variable");
  49. assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
  50. assert(
  51. cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
  52. "Expected inlined-at fields to agree");
  53. return insertInstr(BuildMI(getMF(), getDL(),
  54. getTII().get(TargetOpcode::DBG_VALUE),
  55. /*IsIndirect*/ false, Reg, Variable, Expr));
  56. }
  57. MachineInstrBuilder
  58. MachineIRBuilder::buildIndirectDbgValue(Register Reg, const MDNode *Variable,
  59. const MDNode *Expr) {
  60. assert(isa<DILocalVariable>(Variable) && "not a variable");
  61. assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
  62. assert(
  63. cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
  64. "Expected inlined-at fields to agree");
  65. return insertInstr(BuildMI(getMF(), getDL(),
  66. getTII().get(TargetOpcode::DBG_VALUE),
  67. /*IsIndirect*/ true, Reg, Variable, Expr));
  68. }
  69. MachineInstrBuilder MachineIRBuilder::buildFIDbgValue(int FI,
  70. const MDNode *Variable,
  71. const MDNode *Expr) {
  72. assert(isa<DILocalVariable>(Variable) && "not a variable");
  73. assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
  74. assert(
  75. cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
  76. "Expected inlined-at fields to agree");
  77. return buildInstr(TargetOpcode::DBG_VALUE)
  78. .addFrameIndex(FI)
  79. .addImm(0)
  80. .addMetadata(Variable)
  81. .addMetadata(Expr);
  82. }
  83. MachineInstrBuilder MachineIRBuilder::buildConstDbgValue(const Constant &C,
  84. const MDNode *Variable,
  85. const MDNode *Expr) {
  86. assert(isa<DILocalVariable>(Variable) && "not a variable");
  87. assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
  88. assert(
  89. cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
  90. "Expected inlined-at fields to agree");
  91. auto MIB = buildInstrNoInsert(TargetOpcode::DBG_VALUE);
  92. if (auto *CI = dyn_cast<ConstantInt>(&C)) {
  93. if (CI->getBitWidth() > 64)
  94. MIB.addCImm(CI);
  95. else
  96. MIB.addImm(CI->getZExtValue());
  97. } else if (auto *CFP = dyn_cast<ConstantFP>(&C)) {
  98. MIB.addFPImm(CFP);
  99. } else {
  100. // Insert $noreg if we didn't find a usable constant and had to drop it.
  101. MIB.addReg(Register());
  102. }
  103. MIB.addImm(0).addMetadata(Variable).addMetadata(Expr);
  104. return insertInstr(MIB);
  105. }
  106. MachineInstrBuilder MachineIRBuilder::buildDbgLabel(const MDNode *Label) {
  107. assert(isa<DILabel>(Label) && "not a label");
  108. assert(cast<DILabel>(Label)->isValidLocationForIntrinsic(State.DL) &&
  109. "Expected inlined-at fields to agree");
  110. auto MIB = buildInstr(TargetOpcode::DBG_LABEL);
  111. return MIB.addMetadata(Label);
  112. }
  113. MachineInstrBuilder MachineIRBuilder::buildDynStackAlloc(const DstOp &Res,
  114. const SrcOp &Size,
  115. Align Alignment) {
  116. assert(Res.getLLTTy(*getMRI()).isPointer() && "expected ptr dst type");
  117. auto MIB = buildInstr(TargetOpcode::G_DYN_STACKALLOC);
  118. Res.addDefToMIB(*getMRI(), MIB);
  119. Size.addSrcToMIB(MIB);
  120. MIB.addImm(Alignment.value());
  121. return MIB;
  122. }
  123. MachineInstrBuilder MachineIRBuilder::buildFrameIndex(const DstOp &Res,
  124. int Idx) {
  125. assert(Res.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
  126. auto MIB = buildInstr(TargetOpcode::G_FRAME_INDEX);
  127. Res.addDefToMIB(*getMRI(), MIB);
  128. MIB.addFrameIndex(Idx);
  129. return MIB;
  130. }
  131. MachineInstrBuilder MachineIRBuilder::buildGlobalValue(const DstOp &Res,
  132. const GlobalValue *GV) {
  133. assert(Res.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
  134. assert(Res.getLLTTy(*getMRI()).getAddressSpace() ==
  135. GV->getType()->getAddressSpace() &&
  136. "address space mismatch");
  137. auto MIB = buildInstr(TargetOpcode::G_GLOBAL_VALUE);
  138. Res.addDefToMIB(*getMRI(), MIB);
  139. MIB.addGlobalAddress(GV);
  140. return MIB;
  141. }
  142. MachineInstrBuilder MachineIRBuilder::buildJumpTable(const LLT PtrTy,
  143. unsigned JTI) {
  144. return buildInstr(TargetOpcode::G_JUMP_TABLE, {PtrTy}, {})
  145. .addJumpTableIndex(JTI);
  146. }
  147. void MachineIRBuilder::validateUnaryOp(const LLT Res, const LLT Op0) {
  148. assert((Res.isScalar() || Res.isVector()) && "invalid operand type");
  149. assert((Res == Op0) && "type mismatch");
  150. }
  151. void MachineIRBuilder::validateBinaryOp(const LLT Res, const LLT Op0,
  152. const LLT Op1) {
  153. assert((Res.isScalar() || Res.isVector()) && "invalid operand type");
  154. assert((Res == Op0 && Res == Op1) && "type mismatch");
  155. }
  156. void MachineIRBuilder::validateShiftOp(const LLT Res, const LLT Op0,
  157. const LLT Op1) {
  158. assert((Res.isScalar() || Res.isVector()) && "invalid operand type");
  159. assert((Res == Op0) && "type mismatch");
  160. }
  161. MachineInstrBuilder MachineIRBuilder::buildPtrAdd(const DstOp &Res,
  162. const SrcOp &Op0,
  163. const SrcOp &Op1) {
  164. assert(Res.getLLTTy(*getMRI()).getScalarType().isPointer() &&
  165. Res.getLLTTy(*getMRI()) == Op0.getLLTTy(*getMRI()) && "type mismatch");
  166. assert(Op1.getLLTTy(*getMRI()).getScalarType().isScalar() && "invalid offset type");
  167. return buildInstr(TargetOpcode::G_PTR_ADD, {Res}, {Op0, Op1});
  168. }
  169. Optional<MachineInstrBuilder>
  170. MachineIRBuilder::materializePtrAdd(Register &Res, Register Op0,
  171. const LLT ValueTy, uint64_t Value) {
  172. assert(Res == 0 && "Res is a result argument");
  173. assert(ValueTy.isScalar() && "invalid offset type");
  174. if (Value == 0) {
  175. Res = Op0;
  176. return None;
  177. }
  178. Res = getMRI()->createGenericVirtualRegister(getMRI()->getType(Op0));
  179. auto Cst = buildConstant(ValueTy, Value);
  180. return buildPtrAdd(Res, Op0, Cst.getReg(0));
  181. }
  182. MachineInstrBuilder MachineIRBuilder::buildMaskLowPtrBits(const DstOp &Res,
  183. const SrcOp &Op0,
  184. uint32_t NumBits) {
  185. LLT PtrTy = Res.getLLTTy(*getMRI());
  186. LLT MaskTy = LLT::scalar(PtrTy.getSizeInBits());
  187. Register MaskReg = getMRI()->createGenericVirtualRegister(MaskTy);
  188. buildConstant(MaskReg, maskTrailingZeros<uint64_t>(NumBits));
  189. return buildPtrMask(Res, Op0, MaskReg);
  190. }
  191. MachineInstrBuilder
  192. MachineIRBuilder::buildPadVectorWithUndefElements(const DstOp &Res,
  193. const SrcOp &Op0) {
  194. LLT ResTy = Res.getLLTTy(*getMRI());
  195. LLT Op0Ty = Op0.getLLTTy(*getMRI());
  196. assert((ResTy.isVector() && Op0Ty.isVector()) && "Non vector type");
  197. assert((ResTy.getElementType() == Op0Ty.getElementType()) &&
  198. "Different vector element types");
  199. assert((ResTy.getNumElements() > Op0Ty.getNumElements()) &&
  200. "Op0 has more elements");
  201. auto Unmerge = buildUnmerge(Op0Ty.getElementType(), Op0);
  202. SmallVector<Register, 8> Regs;
  203. for (auto Op : Unmerge.getInstr()->defs())
  204. Regs.push_back(Op.getReg());
  205. Register Undef = buildUndef(Op0Ty.getElementType()).getReg(0);
  206. unsigned NumberOfPadElts = ResTy.getNumElements() - Regs.size();
  207. for (unsigned i = 0; i < NumberOfPadElts; ++i)
  208. Regs.push_back(Undef);
  209. return buildMerge(Res, Regs);
  210. }
  211. MachineInstrBuilder
  212. MachineIRBuilder::buildDeleteTrailingVectorElements(const DstOp &Res,
  213. const SrcOp &Op0) {
  214. LLT ResTy = Res.getLLTTy(*getMRI());
  215. LLT Op0Ty = Op0.getLLTTy(*getMRI());
  216. assert((ResTy.isVector() && Op0Ty.isVector()) && "Non vector type");
  217. assert((ResTy.getElementType() == Op0Ty.getElementType()) &&
  218. "Different vector element types");
  219. assert((ResTy.getNumElements() < Op0Ty.getNumElements()) &&
  220. "Op0 has fewer elements");
  221. SmallVector<Register, 8> Regs;
  222. auto Unmerge = buildUnmerge(Op0Ty.getElementType(), Op0);
  223. for (unsigned i = 0; i < ResTy.getNumElements(); ++i)
  224. Regs.push_back(Unmerge.getReg(i));
  225. return buildMerge(Res, Regs);
  226. }
  227. MachineInstrBuilder MachineIRBuilder::buildBr(MachineBasicBlock &Dest) {
  228. return buildInstr(TargetOpcode::G_BR).addMBB(&Dest);
  229. }
  230. MachineInstrBuilder MachineIRBuilder::buildBrIndirect(Register Tgt) {
  231. assert(getMRI()->getType(Tgt).isPointer() && "invalid branch destination");
  232. return buildInstr(TargetOpcode::G_BRINDIRECT).addUse(Tgt);
  233. }
  234. MachineInstrBuilder MachineIRBuilder::buildBrJT(Register TablePtr,
  235. unsigned JTI,
  236. Register IndexReg) {
  237. assert(getMRI()->getType(TablePtr).isPointer() &&
  238. "Table reg must be a pointer");
  239. return buildInstr(TargetOpcode::G_BRJT)
  240. .addUse(TablePtr)
  241. .addJumpTableIndex(JTI)
  242. .addUse(IndexReg);
  243. }
  244. MachineInstrBuilder MachineIRBuilder::buildCopy(const DstOp &Res,
  245. const SrcOp &Op) {
  246. return buildInstr(TargetOpcode::COPY, Res, Op);
  247. }
  248. MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
  249. const ConstantInt &Val) {
  250. LLT Ty = Res.getLLTTy(*getMRI());
  251. LLT EltTy = Ty.getScalarType();
  252. assert(EltTy.getScalarSizeInBits() == Val.getBitWidth() &&
  253. "creating constant with the wrong size");
  254. if (Ty.isVector()) {
  255. auto Const = buildInstr(TargetOpcode::G_CONSTANT)
  256. .addDef(getMRI()->createGenericVirtualRegister(EltTy))
  257. .addCImm(&Val);
  258. return buildSplatVector(Res, Const);
  259. }
  260. auto Const = buildInstr(TargetOpcode::G_CONSTANT);
  261. Const->setDebugLoc(DebugLoc());
  262. Res.addDefToMIB(*getMRI(), Const);
  263. Const.addCImm(&Val);
  264. return Const;
  265. }
  266. MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
  267. int64_t Val) {
  268. auto IntN = IntegerType::get(getMF().getFunction().getContext(),
  269. Res.getLLTTy(*getMRI()).getScalarSizeInBits());
  270. ConstantInt *CI = ConstantInt::get(IntN, Val, true);
  271. return buildConstant(Res, *CI);
  272. }
  273. MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
  274. const ConstantFP &Val) {
  275. LLT Ty = Res.getLLTTy(*getMRI());
  276. LLT EltTy = Ty.getScalarType();
  277. assert(APFloat::getSizeInBits(Val.getValueAPF().getSemantics())
  278. == EltTy.getSizeInBits() &&
  279. "creating fconstant with the wrong size");
  280. assert(!Ty.isPointer() && "invalid operand type");
  281. if (Ty.isVector()) {
  282. auto Const = buildInstr(TargetOpcode::G_FCONSTANT)
  283. .addDef(getMRI()->createGenericVirtualRegister(EltTy))
  284. .addFPImm(&Val);
  285. return buildSplatVector(Res, Const);
  286. }
  287. auto Const = buildInstr(TargetOpcode::G_FCONSTANT);
  288. Const->setDebugLoc(DebugLoc());
  289. Res.addDefToMIB(*getMRI(), Const);
  290. Const.addFPImm(&Val);
  291. return Const;
  292. }
  293. MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
  294. const APInt &Val) {
  295. ConstantInt *CI = ConstantInt::get(getMF().getFunction().getContext(), Val);
  296. return buildConstant(Res, *CI);
  297. }
  298. MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
  299. double Val) {
  300. LLT DstTy = Res.getLLTTy(*getMRI());
  301. auto &Ctx = getMF().getFunction().getContext();
  302. auto *CFP =
  303. ConstantFP::get(Ctx, getAPFloatFromSize(Val, DstTy.getScalarSizeInBits()));
  304. return buildFConstant(Res, *CFP);
  305. }
  306. MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
  307. const APFloat &Val) {
  308. auto &Ctx = getMF().getFunction().getContext();
  309. auto *CFP = ConstantFP::get(Ctx, Val);
  310. return buildFConstant(Res, *CFP);
  311. }
  312. MachineInstrBuilder MachineIRBuilder::buildBrCond(const SrcOp &Tst,
  313. MachineBasicBlock &Dest) {
  314. assert(Tst.getLLTTy(*getMRI()).isScalar() && "invalid operand type");
  315. auto MIB = buildInstr(TargetOpcode::G_BRCOND);
  316. Tst.addSrcToMIB(MIB);
  317. MIB.addMBB(&Dest);
  318. return MIB;
  319. }
  320. MachineInstrBuilder
  321. MachineIRBuilder::buildLoad(const DstOp &Dst, const SrcOp &Addr,
  322. MachinePointerInfo PtrInfo, Align Alignment,
  323. MachineMemOperand::Flags MMOFlags,
  324. const AAMDNodes &AAInfo) {
  325. MMOFlags |= MachineMemOperand::MOLoad;
  326. assert((MMOFlags & MachineMemOperand::MOStore) == 0);
  327. LLT Ty = Dst.getLLTTy(*getMRI());
  328. MachineMemOperand *MMO =
  329. getMF().getMachineMemOperand(PtrInfo, MMOFlags, Ty, Alignment, AAInfo);
  330. return buildLoad(Dst, Addr, *MMO);
  331. }
  332. MachineInstrBuilder MachineIRBuilder::buildLoadInstr(unsigned Opcode,
  333. const DstOp &Res,
  334. const SrcOp &Addr,
  335. MachineMemOperand &MMO) {
  336. assert(Res.getLLTTy(*getMRI()).isValid() && "invalid operand type");
  337. assert(Addr.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
  338. auto MIB = buildInstr(Opcode);
  339. Res.addDefToMIB(*getMRI(), MIB);
  340. Addr.addSrcToMIB(MIB);
  341. MIB.addMemOperand(&MMO);
  342. return MIB;
  343. }
  344. MachineInstrBuilder MachineIRBuilder::buildLoadFromOffset(
  345. const DstOp &Dst, const SrcOp &BasePtr,
  346. MachineMemOperand &BaseMMO, int64_t Offset) {
  347. LLT LoadTy = Dst.getLLTTy(*getMRI());
  348. MachineMemOperand *OffsetMMO =
  349. getMF().getMachineMemOperand(&BaseMMO, Offset, LoadTy);
  350. if (Offset == 0) // This may be a size or type changing load.
  351. return buildLoad(Dst, BasePtr, *OffsetMMO);
  352. LLT PtrTy = BasePtr.getLLTTy(*getMRI());
  353. LLT OffsetTy = LLT::scalar(PtrTy.getSizeInBits());
  354. auto ConstOffset = buildConstant(OffsetTy, Offset);
  355. auto Ptr = buildPtrAdd(PtrTy, BasePtr, ConstOffset);
  356. return buildLoad(Dst, Ptr, *OffsetMMO);
  357. }
  358. MachineInstrBuilder MachineIRBuilder::buildStore(const SrcOp &Val,
  359. const SrcOp &Addr,
  360. MachineMemOperand &MMO) {
  361. assert(Val.getLLTTy(*getMRI()).isValid() && "invalid operand type");
  362. assert(Addr.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
  363. auto MIB = buildInstr(TargetOpcode::G_STORE);
  364. Val.addSrcToMIB(MIB);
  365. Addr.addSrcToMIB(MIB);
  366. MIB.addMemOperand(&MMO);
  367. return MIB;
  368. }
  369. MachineInstrBuilder
  370. MachineIRBuilder::buildStore(const SrcOp &Val, const SrcOp &Addr,
  371. MachinePointerInfo PtrInfo, Align Alignment,
  372. MachineMemOperand::Flags MMOFlags,
  373. const AAMDNodes &AAInfo) {
  374. MMOFlags |= MachineMemOperand::MOStore;
  375. assert((MMOFlags & MachineMemOperand::MOLoad) == 0);
  376. LLT Ty = Val.getLLTTy(*getMRI());
  377. MachineMemOperand *MMO =
  378. getMF().getMachineMemOperand(PtrInfo, MMOFlags, Ty, Alignment, AAInfo);
  379. return buildStore(Val, Addr, *MMO);
  380. }
  381. MachineInstrBuilder MachineIRBuilder::buildAnyExt(const DstOp &Res,
  382. const SrcOp &Op) {
  383. return buildInstr(TargetOpcode::G_ANYEXT, Res, Op);
  384. }
  385. MachineInstrBuilder MachineIRBuilder::buildSExt(const DstOp &Res,
  386. const SrcOp &Op) {
  387. return buildInstr(TargetOpcode::G_SEXT, Res, Op);
  388. }
  389. MachineInstrBuilder MachineIRBuilder::buildZExt(const DstOp &Res,
  390. const SrcOp &Op) {
  391. return buildInstr(TargetOpcode::G_ZEXT, Res, Op);
  392. }
  393. unsigned MachineIRBuilder::getBoolExtOp(bool IsVec, bool IsFP) const {
  394. const auto *TLI = getMF().getSubtarget().getTargetLowering();
  395. switch (TLI->getBooleanContents(IsVec, IsFP)) {
  396. case TargetLoweringBase::ZeroOrNegativeOneBooleanContent:
  397. return TargetOpcode::G_SEXT;
  398. case TargetLoweringBase::ZeroOrOneBooleanContent:
  399. return TargetOpcode::G_ZEXT;
  400. default:
  401. return TargetOpcode::G_ANYEXT;
  402. }
  403. }
  404. MachineInstrBuilder MachineIRBuilder::buildBoolExt(const DstOp &Res,
  405. const SrcOp &Op,
  406. bool IsFP) {
  407. unsigned ExtOp = getBoolExtOp(getMRI()->getType(Op.getReg()).isVector(), IsFP);
  408. return buildInstr(ExtOp, Res, Op);
  409. }
  410. MachineInstrBuilder MachineIRBuilder::buildExtOrTrunc(unsigned ExtOpc,
  411. const DstOp &Res,
  412. const SrcOp &Op) {
  413. assert((TargetOpcode::G_ANYEXT == ExtOpc || TargetOpcode::G_ZEXT == ExtOpc ||
  414. TargetOpcode::G_SEXT == ExtOpc) &&
  415. "Expecting Extending Opc");
  416. assert(Res.getLLTTy(*getMRI()).isScalar() ||
  417. Res.getLLTTy(*getMRI()).isVector());
  418. assert(Res.getLLTTy(*getMRI()).isScalar() ==
  419. Op.getLLTTy(*getMRI()).isScalar());
  420. unsigned Opcode = TargetOpcode::COPY;
  421. if (Res.getLLTTy(*getMRI()).getSizeInBits() >
  422. Op.getLLTTy(*getMRI()).getSizeInBits())
  423. Opcode = ExtOpc;
  424. else if (Res.getLLTTy(*getMRI()).getSizeInBits() <
  425. Op.getLLTTy(*getMRI()).getSizeInBits())
  426. Opcode = TargetOpcode::G_TRUNC;
  427. else
  428. assert(Res.getLLTTy(*getMRI()) == Op.getLLTTy(*getMRI()));
  429. return buildInstr(Opcode, Res, Op);
  430. }
  431. MachineInstrBuilder MachineIRBuilder::buildSExtOrTrunc(const DstOp &Res,
  432. const SrcOp &Op) {
  433. return buildExtOrTrunc(TargetOpcode::G_SEXT, Res, Op);
  434. }
  435. MachineInstrBuilder MachineIRBuilder::buildZExtOrTrunc(const DstOp &Res,
  436. const SrcOp &Op) {
  437. return buildExtOrTrunc(TargetOpcode::G_ZEXT, Res, Op);
  438. }
  439. MachineInstrBuilder MachineIRBuilder::buildAnyExtOrTrunc(const DstOp &Res,
  440. const SrcOp &Op) {
  441. return buildExtOrTrunc(TargetOpcode::G_ANYEXT, Res, Op);
  442. }
  443. MachineInstrBuilder MachineIRBuilder::buildZExtInReg(const DstOp &Res,
  444. const SrcOp &Op,
  445. int64_t ImmOp) {
  446. LLT ResTy = Res.getLLTTy(*getMRI());
  447. auto Mask = buildConstant(
  448. ResTy, APInt::getLowBitsSet(ResTy.getScalarSizeInBits(), ImmOp));
  449. return buildAnd(Res, Op, Mask);
  450. }
  451. MachineInstrBuilder MachineIRBuilder::buildCast(const DstOp &Dst,
  452. const SrcOp &Src) {
  453. LLT SrcTy = Src.getLLTTy(*getMRI());
  454. LLT DstTy = Dst.getLLTTy(*getMRI());
  455. if (SrcTy == DstTy)
  456. return buildCopy(Dst, Src);
  457. unsigned Opcode;
  458. if (SrcTy.isPointer() && DstTy.isScalar())
  459. Opcode = TargetOpcode::G_PTRTOINT;
  460. else if (DstTy.isPointer() && SrcTy.isScalar())
  461. Opcode = TargetOpcode::G_INTTOPTR;
  462. else {
  463. assert(!SrcTy.isPointer() && !DstTy.isPointer() && "n G_ADDRCAST yet");
  464. Opcode = TargetOpcode::G_BITCAST;
  465. }
  466. return buildInstr(Opcode, Dst, Src);
  467. }
  468. MachineInstrBuilder MachineIRBuilder::buildExtract(const DstOp &Dst,
  469. const SrcOp &Src,
  470. uint64_t Index) {
  471. LLT SrcTy = Src.getLLTTy(*getMRI());
  472. LLT DstTy = Dst.getLLTTy(*getMRI());
  473. #ifndef NDEBUG
  474. assert(SrcTy.isValid() && "invalid operand type");
  475. assert(DstTy.isValid() && "invalid operand type");
  476. assert(Index + DstTy.getSizeInBits() <= SrcTy.getSizeInBits() &&
  477. "extracting off end of register");
  478. #endif
  479. if (DstTy.getSizeInBits() == SrcTy.getSizeInBits()) {
  480. assert(Index == 0 && "insertion past the end of a register");
  481. return buildCast(Dst, Src);
  482. }
  483. auto Extract = buildInstr(TargetOpcode::G_EXTRACT);
  484. Dst.addDefToMIB(*getMRI(), Extract);
  485. Src.addSrcToMIB(Extract);
  486. Extract.addImm(Index);
  487. return Extract;
  488. }
  489. void MachineIRBuilder::buildSequence(Register Res, ArrayRef<Register> Ops,
  490. ArrayRef<uint64_t> Indices) {
  491. #ifndef NDEBUG
  492. assert(Ops.size() == Indices.size() && "incompatible args");
  493. assert(!Ops.empty() && "invalid trivial sequence");
  494. assert(llvm::is_sorted(Indices) &&
  495. "sequence offsets must be in ascending order");
  496. assert(getMRI()->getType(Res).isValid() && "invalid operand type");
  497. for (auto Op : Ops)
  498. assert(getMRI()->getType(Op).isValid() && "invalid operand type");
  499. #endif
  500. LLT ResTy = getMRI()->getType(Res);
  501. LLT OpTy = getMRI()->getType(Ops[0]);
  502. unsigned OpSize = OpTy.getSizeInBits();
  503. bool MaybeMerge = true;
  504. for (unsigned i = 0; i < Ops.size(); ++i) {
  505. if (getMRI()->getType(Ops[i]) != OpTy || Indices[i] != i * OpSize) {
  506. MaybeMerge = false;
  507. break;
  508. }
  509. }
  510. if (MaybeMerge && Ops.size() * OpSize == ResTy.getSizeInBits()) {
  511. buildMerge(Res, Ops);
  512. return;
  513. }
  514. Register ResIn = getMRI()->createGenericVirtualRegister(ResTy);
  515. buildUndef(ResIn);
  516. for (unsigned i = 0; i < Ops.size(); ++i) {
  517. Register ResOut = i + 1 == Ops.size()
  518. ? Res
  519. : getMRI()->createGenericVirtualRegister(ResTy);
  520. buildInsert(ResOut, ResIn, Ops[i], Indices[i]);
  521. ResIn = ResOut;
  522. }
  523. }
  524. MachineInstrBuilder MachineIRBuilder::buildUndef(const DstOp &Res) {
  525. return buildInstr(TargetOpcode::G_IMPLICIT_DEF, {Res}, {});
  526. }
  527. MachineInstrBuilder MachineIRBuilder::buildMerge(const DstOp &Res,
  528. ArrayRef<Register> Ops) {
  529. // Unfortunately to convert from ArrayRef<LLT> to ArrayRef<SrcOp>,
  530. // we need some temporary storage for the DstOp objects. Here we use a
  531. // sufficiently large SmallVector to not go through the heap.
  532. SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
  533. assert(TmpVec.size() > 1);
  534. return buildInstr(TargetOpcode::G_MERGE_VALUES, Res, TmpVec);
  535. }
  536. MachineInstrBuilder
  537. MachineIRBuilder::buildMerge(const DstOp &Res,
  538. std::initializer_list<SrcOp> Ops) {
  539. assert(Ops.size() > 1);
  540. return buildInstr(TargetOpcode::G_MERGE_VALUES, Res, Ops);
  541. }
  542. MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<LLT> Res,
  543. const SrcOp &Op) {
  544. // Unfortunately to convert from ArrayRef<LLT> to ArrayRef<DstOp>,
  545. // we need some temporary storage for the DstOp objects. Here we use a
  546. // sufficiently large SmallVector to not go through the heap.
  547. SmallVector<DstOp, 8> TmpVec(Res.begin(), Res.end());
  548. assert(TmpVec.size() > 1);
  549. return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op);
  550. }
  551. MachineInstrBuilder MachineIRBuilder::buildUnmerge(LLT Res,
  552. const SrcOp &Op) {
  553. unsigned NumReg = Op.getLLTTy(*getMRI()).getSizeInBits() / Res.getSizeInBits();
  554. SmallVector<DstOp, 8> TmpVec(NumReg, Res);
  555. return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op);
  556. }
  557. MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<Register> Res,
  558. const SrcOp &Op) {
  559. // Unfortunately to convert from ArrayRef<Register> to ArrayRef<DstOp>,
  560. // we need some temporary storage for the DstOp objects. Here we use a
  561. // sufficiently large SmallVector to not go through the heap.
  562. SmallVector<DstOp, 8> TmpVec(Res.begin(), Res.end());
  563. assert(TmpVec.size() > 1);
  564. return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op);
  565. }
  566. MachineInstrBuilder MachineIRBuilder::buildBuildVector(const DstOp &Res,
  567. ArrayRef<Register> Ops) {
  568. // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
  569. // we need some temporary storage for the DstOp objects. Here we use a
  570. // sufficiently large SmallVector to not go through the heap.
  571. SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
  572. return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
  573. }
  574. MachineInstrBuilder MachineIRBuilder::buildSplatVector(const DstOp &Res,
  575. const SrcOp &Src) {
  576. SmallVector<SrcOp, 8> TmpVec(Res.getLLTTy(*getMRI()).getNumElements(), Src);
  577. return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
  578. }
  579. MachineInstrBuilder
  580. MachineIRBuilder::buildBuildVectorTrunc(const DstOp &Res,
  581. ArrayRef<Register> Ops) {
  582. // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
  583. // we need some temporary storage for the DstOp objects. Here we use a
  584. // sufficiently large SmallVector to not go through the heap.
  585. SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
  586. return buildInstr(TargetOpcode::G_BUILD_VECTOR_TRUNC, Res, TmpVec);
  587. }
  588. MachineInstrBuilder MachineIRBuilder::buildShuffleSplat(const DstOp &Res,
  589. const SrcOp &Src) {
  590. LLT DstTy = Res.getLLTTy(*getMRI());
  591. assert(Src.getLLTTy(*getMRI()) == DstTy.getElementType() &&
  592. "Expected Src to match Dst elt ty");
  593. auto UndefVec = buildUndef(DstTy);
  594. auto Zero = buildConstant(LLT::scalar(64), 0);
  595. auto InsElt = buildInsertVectorElement(DstTy, UndefVec, Src, Zero);
  596. SmallVector<int, 16> ZeroMask(DstTy.getNumElements());
  597. return buildShuffleVector(DstTy, InsElt, UndefVec, ZeroMask);
  598. }
  599. MachineInstrBuilder MachineIRBuilder::buildShuffleVector(const DstOp &Res,
  600. const SrcOp &Src1,
  601. const SrcOp &Src2,
  602. ArrayRef<int> Mask) {
  603. LLT DstTy = Res.getLLTTy(*getMRI());
  604. LLT Src1Ty = Src1.getLLTTy(*getMRI());
  605. LLT Src2Ty = Src2.getLLTTy(*getMRI());
  606. assert((size_t)(Src1Ty.getNumElements() + Src2Ty.getNumElements()) >=
  607. Mask.size());
  608. assert(DstTy.getElementType() == Src1Ty.getElementType() &&
  609. DstTy.getElementType() == Src2Ty.getElementType());
  610. (void)DstTy;
  611. (void)Src1Ty;
  612. (void)Src2Ty;
  613. ArrayRef<int> MaskAlloc = getMF().allocateShuffleMask(Mask);
  614. return buildInstr(TargetOpcode::G_SHUFFLE_VECTOR, {Res}, {Src1, Src2})
  615. .addShuffleMask(MaskAlloc);
  616. }
  617. MachineInstrBuilder
  618. MachineIRBuilder::buildConcatVectors(const DstOp &Res, ArrayRef<Register> Ops) {
  619. // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
  620. // we need some temporary storage for the DstOp objects. Here we use a
  621. // sufficiently large SmallVector to not go through the heap.
  622. SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
  623. return buildInstr(TargetOpcode::G_CONCAT_VECTORS, Res, TmpVec);
  624. }
  625. MachineInstrBuilder MachineIRBuilder::buildInsert(const DstOp &Res,
  626. const SrcOp &Src,
  627. const SrcOp &Op,
  628. unsigned Index) {
  629. assert(Index + Op.getLLTTy(*getMRI()).getSizeInBits() <=
  630. Res.getLLTTy(*getMRI()).getSizeInBits() &&
  631. "insertion past the end of a register");
  632. if (Res.getLLTTy(*getMRI()).getSizeInBits() ==
  633. Op.getLLTTy(*getMRI()).getSizeInBits()) {
  634. return buildCast(Res, Op);
  635. }
  636. return buildInstr(TargetOpcode::G_INSERT, Res, {Src, Op, uint64_t(Index)});
  637. }
  638. MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID,
  639. ArrayRef<Register> ResultRegs,
  640. bool HasSideEffects) {
  641. auto MIB =
  642. buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
  643. : TargetOpcode::G_INTRINSIC);
  644. for (unsigned ResultReg : ResultRegs)
  645. MIB.addDef(ResultReg);
  646. MIB.addIntrinsicID(ID);
  647. return MIB;
  648. }
  649. MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID,
  650. ArrayRef<DstOp> Results,
  651. bool HasSideEffects) {
  652. auto MIB =
  653. buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
  654. : TargetOpcode::G_INTRINSIC);
  655. for (DstOp Result : Results)
  656. Result.addDefToMIB(*getMRI(), MIB);
  657. MIB.addIntrinsicID(ID);
  658. return MIB;
  659. }
  660. MachineInstrBuilder MachineIRBuilder::buildTrunc(const DstOp &Res,
  661. const SrcOp &Op) {
  662. return buildInstr(TargetOpcode::G_TRUNC, Res, Op);
  663. }
  664. MachineInstrBuilder MachineIRBuilder::buildFPTrunc(const DstOp &Res,
  665. const SrcOp &Op,
  666. Optional<unsigned> Flags) {
  667. return buildInstr(TargetOpcode::G_FPTRUNC, Res, Op, Flags);
  668. }
  669. MachineInstrBuilder MachineIRBuilder::buildICmp(CmpInst::Predicate Pred,
  670. const DstOp &Res,
  671. const SrcOp &Op0,
  672. const SrcOp &Op1) {
  673. return buildInstr(TargetOpcode::G_ICMP, Res, {Pred, Op0, Op1});
  674. }
  675. MachineInstrBuilder MachineIRBuilder::buildFCmp(CmpInst::Predicate Pred,
  676. const DstOp &Res,
  677. const SrcOp &Op0,
  678. const SrcOp &Op1,
  679. Optional<unsigned> Flags) {
  680. return buildInstr(TargetOpcode::G_FCMP, Res, {Pred, Op0, Op1}, Flags);
  681. }
  682. MachineInstrBuilder MachineIRBuilder::buildSelect(const DstOp &Res,
  683. const SrcOp &Tst,
  684. const SrcOp &Op0,
  685. const SrcOp &Op1,
  686. Optional<unsigned> Flags) {
  687. return buildInstr(TargetOpcode::G_SELECT, {Res}, {Tst, Op0, Op1}, Flags);
  688. }
  689. MachineInstrBuilder
  690. MachineIRBuilder::buildInsertVectorElement(const DstOp &Res, const SrcOp &Val,
  691. const SrcOp &Elt, const SrcOp &Idx) {
  692. return buildInstr(TargetOpcode::G_INSERT_VECTOR_ELT, Res, {Val, Elt, Idx});
  693. }
  694. MachineInstrBuilder
  695. MachineIRBuilder::buildExtractVectorElement(const DstOp &Res, const SrcOp &Val,
  696. const SrcOp &Idx) {
  697. return buildInstr(TargetOpcode::G_EXTRACT_VECTOR_ELT, Res, {Val, Idx});
  698. }
  699. MachineInstrBuilder MachineIRBuilder::buildAtomicCmpXchgWithSuccess(
  700. Register OldValRes, Register SuccessRes, Register Addr, Register CmpVal,
  701. Register NewVal, MachineMemOperand &MMO) {
  702. #ifndef NDEBUG
  703. LLT OldValResTy = getMRI()->getType(OldValRes);
  704. LLT SuccessResTy = getMRI()->getType(SuccessRes);
  705. LLT AddrTy = getMRI()->getType(Addr);
  706. LLT CmpValTy = getMRI()->getType(CmpVal);
  707. LLT NewValTy = getMRI()->getType(NewVal);
  708. assert(OldValResTy.isScalar() && "invalid operand type");
  709. assert(SuccessResTy.isScalar() && "invalid operand type");
  710. assert(AddrTy.isPointer() && "invalid operand type");
  711. assert(CmpValTy.isValid() && "invalid operand type");
  712. assert(NewValTy.isValid() && "invalid operand type");
  713. assert(OldValResTy == CmpValTy && "type mismatch");
  714. assert(OldValResTy == NewValTy && "type mismatch");
  715. #endif
  716. return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG_WITH_SUCCESS)
  717. .addDef(OldValRes)
  718. .addDef(SuccessRes)
  719. .addUse(Addr)
  720. .addUse(CmpVal)
  721. .addUse(NewVal)
  722. .addMemOperand(&MMO);
  723. }
  724. MachineInstrBuilder
  725. MachineIRBuilder::buildAtomicCmpXchg(Register OldValRes, Register Addr,
  726. Register CmpVal, Register NewVal,
  727. MachineMemOperand &MMO) {
  728. #ifndef NDEBUG
  729. LLT OldValResTy = getMRI()->getType(OldValRes);
  730. LLT AddrTy = getMRI()->getType(Addr);
  731. LLT CmpValTy = getMRI()->getType(CmpVal);
  732. LLT NewValTy = getMRI()->getType(NewVal);
  733. assert(OldValResTy.isScalar() && "invalid operand type");
  734. assert(AddrTy.isPointer() && "invalid operand type");
  735. assert(CmpValTy.isValid() && "invalid operand type");
  736. assert(NewValTy.isValid() && "invalid operand type");
  737. assert(OldValResTy == CmpValTy && "type mismatch");
  738. assert(OldValResTy == NewValTy && "type mismatch");
  739. #endif
  740. return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG)
  741. .addDef(OldValRes)
  742. .addUse(Addr)
  743. .addUse(CmpVal)
  744. .addUse(NewVal)
  745. .addMemOperand(&MMO);
  746. }
  747. MachineInstrBuilder MachineIRBuilder::buildAtomicRMW(
  748. unsigned Opcode, const DstOp &OldValRes,
  749. const SrcOp &Addr, const SrcOp &Val,
  750. MachineMemOperand &MMO) {
  751. #ifndef NDEBUG
  752. LLT OldValResTy = OldValRes.getLLTTy(*getMRI());
  753. LLT AddrTy = Addr.getLLTTy(*getMRI());
  754. LLT ValTy = Val.getLLTTy(*getMRI());
  755. assert(OldValResTy.isScalar() && "invalid operand type");
  756. assert(AddrTy.isPointer() && "invalid operand type");
  757. assert(ValTy.isValid() && "invalid operand type");
  758. assert(OldValResTy == ValTy && "type mismatch");
  759. assert(MMO.isAtomic() && "not atomic mem operand");
  760. #endif
  761. auto MIB = buildInstr(Opcode);
  762. OldValRes.addDefToMIB(*getMRI(), MIB);
  763. Addr.addSrcToMIB(MIB);
  764. Val.addSrcToMIB(MIB);
  765. MIB.addMemOperand(&MMO);
  766. return MIB;
  767. }
  768. MachineInstrBuilder
  769. MachineIRBuilder::buildAtomicRMWXchg(Register OldValRes, Register Addr,
  770. Register Val, MachineMemOperand &MMO) {
  771. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XCHG, OldValRes, Addr, Val,
  772. MMO);
  773. }
  774. MachineInstrBuilder
  775. MachineIRBuilder::buildAtomicRMWAdd(Register OldValRes, Register Addr,
  776. Register Val, MachineMemOperand &MMO) {
  777. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_ADD, OldValRes, Addr, Val,
  778. MMO);
  779. }
  780. MachineInstrBuilder
  781. MachineIRBuilder::buildAtomicRMWSub(Register OldValRes, Register Addr,
  782. Register Val, MachineMemOperand &MMO) {
  783. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_SUB, OldValRes, Addr, Val,
  784. MMO);
  785. }
  786. MachineInstrBuilder
  787. MachineIRBuilder::buildAtomicRMWAnd(Register OldValRes, Register Addr,
  788. Register Val, MachineMemOperand &MMO) {
  789. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_AND, OldValRes, Addr, Val,
  790. MMO);
  791. }
  792. MachineInstrBuilder
  793. MachineIRBuilder::buildAtomicRMWNand(Register OldValRes, Register Addr,
  794. Register Val, MachineMemOperand &MMO) {
  795. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_NAND, OldValRes, Addr, Val,
  796. MMO);
  797. }
  798. MachineInstrBuilder MachineIRBuilder::buildAtomicRMWOr(Register OldValRes,
  799. Register Addr,
  800. Register Val,
  801. MachineMemOperand &MMO) {
  802. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_OR, OldValRes, Addr, Val,
  803. MMO);
  804. }
  805. MachineInstrBuilder
  806. MachineIRBuilder::buildAtomicRMWXor(Register OldValRes, Register Addr,
  807. Register Val, MachineMemOperand &MMO) {
  808. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XOR, OldValRes, Addr, Val,
  809. MMO);
  810. }
  811. MachineInstrBuilder
  812. MachineIRBuilder::buildAtomicRMWMax(Register OldValRes, Register Addr,
  813. Register Val, MachineMemOperand &MMO) {
  814. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MAX, OldValRes, Addr, Val,
  815. MMO);
  816. }
  817. MachineInstrBuilder
  818. MachineIRBuilder::buildAtomicRMWMin(Register OldValRes, Register Addr,
  819. Register Val, MachineMemOperand &MMO) {
  820. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MIN, OldValRes, Addr, Val,
  821. MMO);
  822. }
  823. MachineInstrBuilder
  824. MachineIRBuilder::buildAtomicRMWUmax(Register OldValRes, Register Addr,
  825. Register Val, MachineMemOperand &MMO) {
  826. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMAX, OldValRes, Addr, Val,
  827. MMO);
  828. }
  829. MachineInstrBuilder
  830. MachineIRBuilder::buildAtomicRMWUmin(Register OldValRes, Register Addr,
  831. Register Val, MachineMemOperand &MMO) {
  832. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMIN, OldValRes, Addr, Val,
  833. MMO);
  834. }
  835. MachineInstrBuilder
  836. MachineIRBuilder::buildAtomicRMWFAdd(
  837. const DstOp &OldValRes, const SrcOp &Addr, const SrcOp &Val,
  838. MachineMemOperand &MMO) {
  839. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FADD, OldValRes, Addr, Val,
  840. MMO);
  841. }
  842. MachineInstrBuilder
  843. MachineIRBuilder::buildAtomicRMWFSub(const DstOp &OldValRes, const SrcOp &Addr, const SrcOp &Val,
  844. MachineMemOperand &MMO) {
  845. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FSUB, OldValRes, Addr, Val,
  846. MMO);
  847. }
  848. MachineInstrBuilder
  849. MachineIRBuilder::buildFence(unsigned Ordering, unsigned Scope) {
  850. return buildInstr(TargetOpcode::G_FENCE)
  851. .addImm(Ordering)
  852. .addImm(Scope);
  853. }
  854. MachineInstrBuilder
  855. MachineIRBuilder::buildBlockAddress(Register Res, const BlockAddress *BA) {
  856. #ifndef NDEBUG
  857. assert(getMRI()->getType(Res).isPointer() && "invalid res type");
  858. #endif
  859. return buildInstr(TargetOpcode::G_BLOCK_ADDR).addDef(Res).addBlockAddress(BA);
  860. }
  861. void MachineIRBuilder::validateTruncExt(const LLT DstTy, const LLT SrcTy,
  862. bool IsExtend) {
  863. #ifndef NDEBUG
  864. if (DstTy.isVector()) {
  865. assert(SrcTy.isVector() && "mismatched cast between vector and non-vector");
  866. assert(SrcTy.getNumElements() == DstTy.getNumElements() &&
  867. "different number of elements in a trunc/ext");
  868. } else
  869. assert(DstTy.isScalar() && SrcTy.isScalar() && "invalid extend/trunc");
  870. if (IsExtend)
  871. assert(DstTy.getSizeInBits() > SrcTy.getSizeInBits() &&
  872. "invalid narrowing extend");
  873. else
  874. assert(DstTy.getSizeInBits() < SrcTy.getSizeInBits() &&
  875. "invalid widening trunc");
  876. #endif
  877. }
  878. void MachineIRBuilder::validateSelectOp(const LLT ResTy, const LLT TstTy,
  879. const LLT Op0Ty, const LLT Op1Ty) {
  880. #ifndef NDEBUG
  881. assert((ResTy.isScalar() || ResTy.isVector() || ResTy.isPointer()) &&
  882. "invalid operand type");
  883. assert((ResTy == Op0Ty && ResTy == Op1Ty) && "type mismatch");
  884. if (ResTy.isScalar() || ResTy.isPointer())
  885. assert(TstTy.isScalar() && "type mismatch");
  886. else
  887. assert((TstTy.isScalar() ||
  888. (TstTy.isVector() &&
  889. TstTy.getNumElements() == Op0Ty.getNumElements())) &&
  890. "type mismatch");
  891. #endif
  892. }
  893. MachineInstrBuilder MachineIRBuilder::buildInstr(unsigned Opc,
  894. ArrayRef<DstOp> DstOps,
  895. ArrayRef<SrcOp> SrcOps,
  896. Optional<unsigned> Flags) {
  897. switch (Opc) {
  898. default:
  899. break;
  900. case TargetOpcode::G_SELECT: {
  901. assert(DstOps.size() == 1 && "Invalid select");
  902. assert(SrcOps.size() == 3 && "Invalid select");
  903. validateSelectOp(
  904. DstOps[0].getLLTTy(*getMRI()), SrcOps[0].getLLTTy(*getMRI()),
  905. SrcOps[1].getLLTTy(*getMRI()), SrcOps[2].getLLTTy(*getMRI()));
  906. break;
  907. }
  908. case TargetOpcode::G_FNEG:
  909. case TargetOpcode::G_ABS:
  910. // All these are unary ops.
  911. assert(DstOps.size() == 1 && "Invalid Dst");
  912. assert(SrcOps.size() == 1 && "Invalid Srcs");
  913. validateUnaryOp(DstOps[0].getLLTTy(*getMRI()),
  914. SrcOps[0].getLLTTy(*getMRI()));
  915. break;
  916. case TargetOpcode::G_ADD:
  917. case TargetOpcode::G_AND:
  918. case TargetOpcode::G_MUL:
  919. case TargetOpcode::G_OR:
  920. case TargetOpcode::G_SUB:
  921. case TargetOpcode::G_XOR:
  922. case TargetOpcode::G_UDIV:
  923. case TargetOpcode::G_SDIV:
  924. case TargetOpcode::G_UREM:
  925. case TargetOpcode::G_SREM:
  926. case TargetOpcode::G_SMIN:
  927. case TargetOpcode::G_SMAX:
  928. case TargetOpcode::G_UMIN:
  929. case TargetOpcode::G_UMAX:
  930. case TargetOpcode::G_UADDSAT:
  931. case TargetOpcode::G_SADDSAT:
  932. case TargetOpcode::G_USUBSAT:
  933. case TargetOpcode::G_SSUBSAT: {
  934. // All these are binary ops.
  935. assert(DstOps.size() == 1 && "Invalid Dst");
  936. assert(SrcOps.size() == 2 && "Invalid Srcs");
  937. validateBinaryOp(DstOps[0].getLLTTy(*getMRI()),
  938. SrcOps[0].getLLTTy(*getMRI()),
  939. SrcOps[1].getLLTTy(*getMRI()));
  940. break;
  941. }
  942. case TargetOpcode::G_SHL:
  943. case TargetOpcode::G_ASHR:
  944. case TargetOpcode::G_LSHR:
  945. case TargetOpcode::G_USHLSAT:
  946. case TargetOpcode::G_SSHLSAT: {
  947. assert(DstOps.size() == 1 && "Invalid Dst");
  948. assert(SrcOps.size() == 2 && "Invalid Srcs");
  949. validateShiftOp(DstOps[0].getLLTTy(*getMRI()),
  950. SrcOps[0].getLLTTy(*getMRI()),
  951. SrcOps[1].getLLTTy(*getMRI()));
  952. break;
  953. }
  954. case TargetOpcode::G_SEXT:
  955. case TargetOpcode::G_ZEXT:
  956. case TargetOpcode::G_ANYEXT:
  957. assert(DstOps.size() == 1 && "Invalid Dst");
  958. assert(SrcOps.size() == 1 && "Invalid Srcs");
  959. validateTruncExt(DstOps[0].getLLTTy(*getMRI()),
  960. SrcOps[0].getLLTTy(*getMRI()), true);
  961. break;
  962. case TargetOpcode::G_TRUNC:
  963. case TargetOpcode::G_FPTRUNC: {
  964. assert(DstOps.size() == 1 && "Invalid Dst");
  965. assert(SrcOps.size() == 1 && "Invalid Srcs");
  966. validateTruncExt(DstOps[0].getLLTTy(*getMRI()),
  967. SrcOps[0].getLLTTy(*getMRI()), false);
  968. break;
  969. }
  970. case TargetOpcode::G_BITCAST: {
  971. assert(DstOps.size() == 1 && "Invalid Dst");
  972. assert(SrcOps.size() == 1 && "Invalid Srcs");
  973. assert(DstOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
  974. SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() && "invalid bitcast");
  975. break;
  976. }
  977. case TargetOpcode::COPY:
  978. assert(DstOps.size() == 1 && "Invalid Dst");
  979. // If the caller wants to add a subreg source it has to be done separately
  980. // so we may not have any SrcOps at this point yet.
  981. break;
  982. case TargetOpcode::G_FCMP:
  983. case TargetOpcode::G_ICMP: {
  984. assert(DstOps.size() == 1 && "Invalid Dst Operands");
  985. assert(SrcOps.size() == 3 && "Invalid Src Operands");
  986. // For F/ICMP, the first src operand is the predicate, followed by
  987. // the two comparands.
  988. assert(SrcOps[0].getSrcOpKind() == SrcOp::SrcType::Ty_Predicate &&
  989. "Expecting predicate");
  990. assert([&]() -> bool {
  991. CmpInst::Predicate Pred = SrcOps[0].getPredicate();
  992. return Opc == TargetOpcode::G_ICMP ? CmpInst::isIntPredicate(Pred)
  993. : CmpInst::isFPPredicate(Pred);
  994. }() && "Invalid predicate");
  995. assert(SrcOps[1].getLLTTy(*getMRI()) == SrcOps[2].getLLTTy(*getMRI()) &&
  996. "Type mismatch");
  997. assert([&]() -> bool {
  998. LLT Op0Ty = SrcOps[1].getLLTTy(*getMRI());
  999. LLT DstTy = DstOps[0].getLLTTy(*getMRI());
  1000. if (Op0Ty.isScalar() || Op0Ty.isPointer())
  1001. return DstTy.isScalar();
  1002. else
  1003. return DstTy.isVector() &&
  1004. DstTy.getNumElements() == Op0Ty.getNumElements();
  1005. }() && "Type Mismatch");
  1006. break;
  1007. }
  1008. case TargetOpcode::G_UNMERGE_VALUES: {
  1009. assert(!DstOps.empty() && "Invalid trivial sequence");
  1010. assert(SrcOps.size() == 1 && "Invalid src for Unmerge");
  1011. assert(llvm::all_of(DstOps,
  1012. [&, this](const DstOp &Op) {
  1013. return Op.getLLTTy(*getMRI()) ==
  1014. DstOps[0].getLLTTy(*getMRI());
  1015. }) &&
  1016. "type mismatch in output list");
  1017. assert((TypeSize::ScalarTy)DstOps.size() *
  1018. DstOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
  1019. SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
  1020. "input operands do not cover output register");
  1021. break;
  1022. }
  1023. case TargetOpcode::G_MERGE_VALUES: {
  1024. assert(!SrcOps.empty() && "invalid trivial sequence");
  1025. assert(DstOps.size() == 1 && "Invalid Dst");
  1026. assert(llvm::all_of(SrcOps,
  1027. [&, this](const SrcOp &Op) {
  1028. return Op.getLLTTy(*getMRI()) ==
  1029. SrcOps[0].getLLTTy(*getMRI());
  1030. }) &&
  1031. "type mismatch in input list");
  1032. assert((TypeSize::ScalarTy)SrcOps.size() *
  1033. SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
  1034. DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
  1035. "input operands do not cover output register");
  1036. if (SrcOps.size() == 1)
  1037. return buildCast(DstOps[0], SrcOps[0]);
  1038. if (DstOps[0].getLLTTy(*getMRI()).isVector()) {
  1039. if (SrcOps[0].getLLTTy(*getMRI()).isVector())
  1040. return buildInstr(TargetOpcode::G_CONCAT_VECTORS, DstOps, SrcOps);
  1041. return buildInstr(TargetOpcode::G_BUILD_VECTOR, DstOps, SrcOps);
  1042. }
  1043. break;
  1044. }
  1045. case TargetOpcode::G_EXTRACT_VECTOR_ELT: {
  1046. assert(DstOps.size() == 1 && "Invalid Dst size");
  1047. assert(SrcOps.size() == 2 && "Invalid Src size");
  1048. assert(SrcOps[0].getLLTTy(*getMRI()).isVector() && "Invalid operand type");
  1049. assert((DstOps[0].getLLTTy(*getMRI()).isScalar() ||
  1050. DstOps[0].getLLTTy(*getMRI()).isPointer()) &&
  1051. "Invalid operand type");
  1052. assert(SrcOps[1].getLLTTy(*getMRI()).isScalar() && "Invalid operand type");
  1053. assert(SrcOps[0].getLLTTy(*getMRI()).getElementType() ==
  1054. DstOps[0].getLLTTy(*getMRI()) &&
  1055. "Type mismatch");
  1056. break;
  1057. }
  1058. case TargetOpcode::G_INSERT_VECTOR_ELT: {
  1059. assert(DstOps.size() == 1 && "Invalid dst size");
  1060. assert(SrcOps.size() == 3 && "Invalid src size");
  1061. assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
  1062. SrcOps[0].getLLTTy(*getMRI()).isVector() && "Invalid operand type");
  1063. assert(DstOps[0].getLLTTy(*getMRI()).getElementType() ==
  1064. SrcOps[1].getLLTTy(*getMRI()) &&
  1065. "Type mismatch");
  1066. assert(SrcOps[2].getLLTTy(*getMRI()).isScalar() && "Invalid index");
  1067. assert(DstOps[0].getLLTTy(*getMRI()).getNumElements() ==
  1068. SrcOps[0].getLLTTy(*getMRI()).getNumElements() &&
  1069. "Type mismatch");
  1070. break;
  1071. }
  1072. case TargetOpcode::G_BUILD_VECTOR: {
  1073. assert((!SrcOps.empty() || SrcOps.size() < 2) &&
  1074. "Must have at least 2 operands");
  1075. assert(DstOps.size() == 1 && "Invalid DstOps");
  1076. assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
  1077. "Res type must be a vector");
  1078. assert(llvm::all_of(SrcOps,
  1079. [&, this](const SrcOp &Op) {
  1080. return Op.getLLTTy(*getMRI()) ==
  1081. SrcOps[0].getLLTTy(*getMRI());
  1082. }) &&
  1083. "type mismatch in input list");
  1084. assert((TypeSize::ScalarTy)SrcOps.size() *
  1085. SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
  1086. DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
  1087. "input scalars do not exactly cover the output vector register");
  1088. break;
  1089. }
  1090. case TargetOpcode::G_BUILD_VECTOR_TRUNC: {
  1091. assert((!SrcOps.empty() || SrcOps.size() < 2) &&
  1092. "Must have at least 2 operands");
  1093. assert(DstOps.size() == 1 && "Invalid DstOps");
  1094. assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
  1095. "Res type must be a vector");
  1096. assert(llvm::all_of(SrcOps,
  1097. [&, this](const SrcOp &Op) {
  1098. return Op.getLLTTy(*getMRI()) ==
  1099. SrcOps[0].getLLTTy(*getMRI());
  1100. }) &&
  1101. "type mismatch in input list");
  1102. if (SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
  1103. DstOps[0].getLLTTy(*getMRI()).getElementType().getSizeInBits())
  1104. return buildInstr(TargetOpcode::G_BUILD_VECTOR, DstOps, SrcOps);
  1105. break;
  1106. }
  1107. case TargetOpcode::G_CONCAT_VECTORS: {
  1108. assert(DstOps.size() == 1 && "Invalid DstOps");
  1109. assert((!SrcOps.empty() || SrcOps.size() < 2) &&
  1110. "Must have at least 2 operands");
  1111. assert(llvm::all_of(SrcOps,
  1112. [&, this](const SrcOp &Op) {
  1113. return (Op.getLLTTy(*getMRI()).isVector() &&
  1114. Op.getLLTTy(*getMRI()) ==
  1115. SrcOps[0].getLLTTy(*getMRI()));
  1116. }) &&
  1117. "type mismatch in input list");
  1118. assert((TypeSize::ScalarTy)SrcOps.size() *
  1119. SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
  1120. DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
  1121. "input vectors do not exactly cover the output vector register");
  1122. break;
  1123. }
  1124. case TargetOpcode::G_UADDE: {
  1125. assert(DstOps.size() == 2 && "Invalid no of dst operands");
  1126. assert(SrcOps.size() == 3 && "Invalid no of src operands");
  1127. assert(DstOps[0].getLLTTy(*getMRI()).isScalar() && "Invalid operand");
  1128. assert((DstOps[0].getLLTTy(*getMRI()) == SrcOps[0].getLLTTy(*getMRI())) &&
  1129. (DstOps[0].getLLTTy(*getMRI()) == SrcOps[1].getLLTTy(*getMRI())) &&
  1130. "Invalid operand");
  1131. assert(DstOps[1].getLLTTy(*getMRI()).isScalar() && "Invalid operand");
  1132. assert(DstOps[1].getLLTTy(*getMRI()) == SrcOps[2].getLLTTy(*getMRI()) &&
  1133. "type mismatch");
  1134. break;
  1135. }
  1136. }
  1137. auto MIB = buildInstr(Opc);
  1138. for (const DstOp &Op : DstOps)
  1139. Op.addDefToMIB(*getMRI(), MIB);
  1140. for (const SrcOp &Op : SrcOps)
  1141. Op.addSrcToMIB(MIB);
  1142. if (Flags)
  1143. MIB->setFlags(*Flags);
  1144. return MIB;
  1145. }