MachineIRBuilder.cpp 51 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301
  1. //===-- llvm/CodeGen/GlobalISel/MachineIRBuilder.cpp - MIBuilder--*- C++ -*-==//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. /// \file
  9. /// This file implements the MachineIRBuidler class.
  10. //===----------------------------------------------------------------------===//
  11. #include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h"
  12. #include "llvm/CodeGen/MachineFunction.h"
  13. #include "llvm/CodeGen/MachineInstr.h"
  14. #include "llvm/CodeGen/MachineInstrBuilder.h"
  15. #include "llvm/CodeGen/MachineRegisterInfo.h"
  16. #include "llvm/CodeGen/TargetInstrInfo.h"
  17. #include "llvm/CodeGen/TargetLowering.h"
  18. #include "llvm/CodeGen/TargetOpcodes.h"
  19. #include "llvm/CodeGen/TargetSubtargetInfo.h"
  20. #include "llvm/IR/DebugInfoMetadata.h"
  21. using namespace llvm;
  22. void MachineIRBuilder::setMF(MachineFunction &MF) {
  23. State.MF = &MF;
  24. State.MBB = nullptr;
  25. State.MRI = &MF.getRegInfo();
  26. State.TII = MF.getSubtarget().getInstrInfo();
  27. State.DL = DebugLoc();
  28. State.PCSections = nullptr;
  29. State.II = MachineBasicBlock::iterator();
  30. State.Observer = nullptr;
  31. }
  32. //------------------------------------------------------------------------------
  33. // Build instruction variants.
  34. //------------------------------------------------------------------------------
  35. MachineInstrBuilder MachineIRBuilder::buildInstrNoInsert(unsigned Opcode) {
  36. return BuildMI(getMF(), {getDL(), getPCSections()}, getTII().get(Opcode));
  37. }
  38. MachineInstrBuilder MachineIRBuilder::insertInstr(MachineInstrBuilder MIB) {
  39. getMBB().insert(getInsertPt(), MIB);
  40. recordInsertion(MIB);
  41. return MIB;
  42. }
  43. MachineInstrBuilder
  44. MachineIRBuilder::buildDirectDbgValue(Register Reg, const MDNode *Variable,
  45. const MDNode *Expr) {
  46. assert(isa<DILocalVariable>(Variable) && "not a variable");
  47. assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
  48. assert(
  49. cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
  50. "Expected inlined-at fields to agree");
  51. return insertInstr(BuildMI(getMF(), getDL(),
  52. getTII().get(TargetOpcode::DBG_VALUE),
  53. /*IsIndirect*/ false, Reg, Variable, Expr));
  54. }
  55. MachineInstrBuilder
  56. MachineIRBuilder::buildIndirectDbgValue(Register Reg, const MDNode *Variable,
  57. const MDNode *Expr) {
  58. assert(isa<DILocalVariable>(Variable) && "not a variable");
  59. assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
  60. assert(
  61. cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
  62. "Expected inlined-at fields to agree");
  63. return insertInstr(BuildMI(getMF(), getDL(),
  64. getTII().get(TargetOpcode::DBG_VALUE),
  65. /*IsIndirect*/ true, Reg, Variable, Expr));
  66. }
  67. MachineInstrBuilder MachineIRBuilder::buildFIDbgValue(int FI,
  68. const MDNode *Variable,
  69. const MDNode *Expr) {
  70. assert(isa<DILocalVariable>(Variable) && "not a variable");
  71. assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
  72. assert(
  73. cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
  74. "Expected inlined-at fields to agree");
  75. return buildInstr(TargetOpcode::DBG_VALUE)
  76. .addFrameIndex(FI)
  77. .addImm(0)
  78. .addMetadata(Variable)
  79. .addMetadata(Expr);
  80. }
  81. MachineInstrBuilder MachineIRBuilder::buildConstDbgValue(const Constant &C,
  82. const MDNode *Variable,
  83. const MDNode *Expr) {
  84. assert(isa<DILocalVariable>(Variable) && "not a variable");
  85. assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
  86. assert(
  87. cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
  88. "Expected inlined-at fields to agree");
  89. auto MIB = buildInstrNoInsert(TargetOpcode::DBG_VALUE);
  90. auto *NumericConstant = [&] () -> const Constant* {
  91. if (const auto *CE = dyn_cast<ConstantExpr>(&C))
  92. if (CE->getOpcode() == Instruction::IntToPtr)
  93. return CE->getOperand(0);
  94. return &C;
  95. }();
  96. if (auto *CI = dyn_cast<ConstantInt>(NumericConstant)) {
  97. if (CI->getBitWidth() > 64)
  98. MIB.addCImm(CI);
  99. else
  100. MIB.addImm(CI->getZExtValue());
  101. } else if (auto *CFP = dyn_cast<ConstantFP>(NumericConstant)) {
  102. MIB.addFPImm(CFP);
  103. } else if (isa<ConstantPointerNull>(NumericConstant)) {
  104. MIB.addImm(0);
  105. } else {
  106. // Insert $noreg if we didn't find a usable constant and had to drop it.
  107. MIB.addReg(Register());
  108. }
  109. MIB.addImm(0).addMetadata(Variable).addMetadata(Expr);
  110. return insertInstr(MIB);
  111. }
  112. MachineInstrBuilder MachineIRBuilder::buildDbgLabel(const MDNode *Label) {
  113. assert(isa<DILabel>(Label) && "not a label");
  114. assert(cast<DILabel>(Label)->isValidLocationForIntrinsic(State.DL) &&
  115. "Expected inlined-at fields to agree");
  116. auto MIB = buildInstr(TargetOpcode::DBG_LABEL);
  117. return MIB.addMetadata(Label);
  118. }
  119. MachineInstrBuilder MachineIRBuilder::buildDynStackAlloc(const DstOp &Res,
  120. const SrcOp &Size,
  121. Align Alignment) {
  122. assert(Res.getLLTTy(*getMRI()).isPointer() && "expected ptr dst type");
  123. auto MIB = buildInstr(TargetOpcode::G_DYN_STACKALLOC);
  124. Res.addDefToMIB(*getMRI(), MIB);
  125. Size.addSrcToMIB(MIB);
  126. MIB.addImm(Alignment.value());
  127. return MIB;
  128. }
  129. MachineInstrBuilder MachineIRBuilder::buildFrameIndex(const DstOp &Res,
  130. int Idx) {
  131. assert(Res.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
  132. auto MIB = buildInstr(TargetOpcode::G_FRAME_INDEX);
  133. Res.addDefToMIB(*getMRI(), MIB);
  134. MIB.addFrameIndex(Idx);
  135. return MIB;
  136. }
  137. MachineInstrBuilder MachineIRBuilder::buildGlobalValue(const DstOp &Res,
  138. const GlobalValue *GV) {
  139. assert(Res.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
  140. assert(Res.getLLTTy(*getMRI()).getAddressSpace() ==
  141. GV->getType()->getAddressSpace() &&
  142. "address space mismatch");
  143. auto MIB = buildInstr(TargetOpcode::G_GLOBAL_VALUE);
  144. Res.addDefToMIB(*getMRI(), MIB);
  145. MIB.addGlobalAddress(GV);
  146. return MIB;
  147. }
  148. MachineInstrBuilder MachineIRBuilder::buildJumpTable(const LLT PtrTy,
  149. unsigned JTI) {
  150. return buildInstr(TargetOpcode::G_JUMP_TABLE, {PtrTy}, {})
  151. .addJumpTableIndex(JTI);
  152. }
  153. void MachineIRBuilder::validateUnaryOp(const LLT Res, const LLT Op0) {
  154. assert((Res.isScalar() || Res.isVector()) && "invalid operand type");
  155. assert((Res == Op0) && "type mismatch");
  156. }
  157. void MachineIRBuilder::validateBinaryOp(const LLT Res, const LLT Op0,
  158. const LLT Op1) {
  159. assert((Res.isScalar() || Res.isVector()) && "invalid operand type");
  160. assert((Res == Op0 && Res == Op1) && "type mismatch");
  161. }
  162. void MachineIRBuilder::validateShiftOp(const LLT Res, const LLT Op0,
  163. const LLT Op1) {
  164. assert((Res.isScalar() || Res.isVector()) && "invalid operand type");
  165. assert((Res == Op0) && "type mismatch");
  166. }
  167. MachineInstrBuilder MachineIRBuilder::buildPtrAdd(const DstOp &Res,
  168. const SrcOp &Op0,
  169. const SrcOp &Op1) {
  170. assert(Res.getLLTTy(*getMRI()).getScalarType().isPointer() &&
  171. Res.getLLTTy(*getMRI()) == Op0.getLLTTy(*getMRI()) && "type mismatch");
  172. assert(Op1.getLLTTy(*getMRI()).getScalarType().isScalar() && "invalid offset type");
  173. return buildInstr(TargetOpcode::G_PTR_ADD, {Res}, {Op0, Op1});
  174. }
  175. std::optional<MachineInstrBuilder>
  176. MachineIRBuilder::materializePtrAdd(Register &Res, Register Op0,
  177. const LLT ValueTy, uint64_t Value) {
  178. assert(Res == 0 && "Res is a result argument");
  179. assert(ValueTy.isScalar() && "invalid offset type");
  180. if (Value == 0) {
  181. Res = Op0;
  182. return std::nullopt;
  183. }
  184. Res = getMRI()->createGenericVirtualRegister(getMRI()->getType(Op0));
  185. auto Cst = buildConstant(ValueTy, Value);
  186. return buildPtrAdd(Res, Op0, Cst.getReg(0));
  187. }
  188. MachineInstrBuilder MachineIRBuilder::buildMaskLowPtrBits(const DstOp &Res,
  189. const SrcOp &Op0,
  190. uint32_t NumBits) {
  191. LLT PtrTy = Res.getLLTTy(*getMRI());
  192. LLT MaskTy = LLT::scalar(PtrTy.getSizeInBits());
  193. Register MaskReg = getMRI()->createGenericVirtualRegister(MaskTy);
  194. buildConstant(MaskReg, maskTrailingZeros<uint64_t>(NumBits));
  195. return buildPtrMask(Res, Op0, MaskReg);
  196. }
  197. MachineInstrBuilder
  198. MachineIRBuilder::buildPadVectorWithUndefElements(const DstOp &Res,
  199. const SrcOp &Op0) {
  200. LLT ResTy = Res.getLLTTy(*getMRI());
  201. LLT Op0Ty = Op0.getLLTTy(*getMRI());
  202. assert((ResTy.isVector() && Op0Ty.isVector()) && "Non vector type");
  203. assert((ResTy.getElementType() == Op0Ty.getElementType()) &&
  204. "Different vector element types");
  205. assert((ResTy.getNumElements() > Op0Ty.getNumElements()) &&
  206. "Op0 has more elements");
  207. auto Unmerge = buildUnmerge(Op0Ty.getElementType(), Op0);
  208. SmallVector<Register, 8> Regs;
  209. for (auto Op : Unmerge.getInstr()->defs())
  210. Regs.push_back(Op.getReg());
  211. Register Undef = buildUndef(Op0Ty.getElementType()).getReg(0);
  212. unsigned NumberOfPadElts = ResTy.getNumElements() - Regs.size();
  213. for (unsigned i = 0; i < NumberOfPadElts; ++i)
  214. Regs.push_back(Undef);
  215. return buildMergeLikeInstr(Res, Regs);
  216. }
  217. MachineInstrBuilder
  218. MachineIRBuilder::buildDeleteTrailingVectorElements(const DstOp &Res,
  219. const SrcOp &Op0) {
  220. LLT ResTy = Res.getLLTTy(*getMRI());
  221. LLT Op0Ty = Op0.getLLTTy(*getMRI());
  222. assert((ResTy.isVector() && Op0Ty.isVector()) && "Non vector type");
  223. assert((ResTy.getElementType() == Op0Ty.getElementType()) &&
  224. "Different vector element types");
  225. assert((ResTy.getNumElements() < Op0Ty.getNumElements()) &&
  226. "Op0 has fewer elements");
  227. SmallVector<Register, 8> Regs;
  228. auto Unmerge = buildUnmerge(Op0Ty.getElementType(), Op0);
  229. for (unsigned i = 0; i < ResTy.getNumElements(); ++i)
  230. Regs.push_back(Unmerge.getReg(i));
  231. return buildMergeLikeInstr(Res, Regs);
  232. }
  233. MachineInstrBuilder MachineIRBuilder::buildBr(MachineBasicBlock &Dest) {
  234. return buildInstr(TargetOpcode::G_BR).addMBB(&Dest);
  235. }
  236. MachineInstrBuilder MachineIRBuilder::buildBrIndirect(Register Tgt) {
  237. assert(getMRI()->getType(Tgt).isPointer() && "invalid branch destination");
  238. return buildInstr(TargetOpcode::G_BRINDIRECT).addUse(Tgt);
  239. }
  240. MachineInstrBuilder MachineIRBuilder::buildBrJT(Register TablePtr,
  241. unsigned JTI,
  242. Register IndexReg) {
  243. assert(getMRI()->getType(TablePtr).isPointer() &&
  244. "Table reg must be a pointer");
  245. return buildInstr(TargetOpcode::G_BRJT)
  246. .addUse(TablePtr)
  247. .addJumpTableIndex(JTI)
  248. .addUse(IndexReg);
  249. }
  250. MachineInstrBuilder MachineIRBuilder::buildCopy(const DstOp &Res,
  251. const SrcOp &Op) {
  252. return buildInstr(TargetOpcode::COPY, Res, Op);
  253. }
  254. MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
  255. const ConstantInt &Val) {
  256. LLT Ty = Res.getLLTTy(*getMRI());
  257. LLT EltTy = Ty.getScalarType();
  258. assert(EltTy.getScalarSizeInBits() == Val.getBitWidth() &&
  259. "creating constant with the wrong size");
  260. if (Ty.isVector()) {
  261. auto Const = buildInstr(TargetOpcode::G_CONSTANT)
  262. .addDef(getMRI()->createGenericVirtualRegister(EltTy))
  263. .addCImm(&Val);
  264. return buildSplatVector(Res, Const);
  265. }
  266. auto Const = buildInstr(TargetOpcode::G_CONSTANT);
  267. Const->setDebugLoc(DebugLoc());
  268. Res.addDefToMIB(*getMRI(), Const);
  269. Const.addCImm(&Val);
  270. return Const;
  271. }
  272. MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
  273. int64_t Val) {
  274. auto IntN = IntegerType::get(getMF().getFunction().getContext(),
  275. Res.getLLTTy(*getMRI()).getScalarSizeInBits());
  276. ConstantInt *CI = ConstantInt::get(IntN, Val, true);
  277. return buildConstant(Res, *CI);
  278. }
  279. MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
  280. const ConstantFP &Val) {
  281. LLT Ty = Res.getLLTTy(*getMRI());
  282. LLT EltTy = Ty.getScalarType();
  283. assert(APFloat::getSizeInBits(Val.getValueAPF().getSemantics())
  284. == EltTy.getSizeInBits() &&
  285. "creating fconstant with the wrong size");
  286. assert(!Ty.isPointer() && "invalid operand type");
  287. if (Ty.isVector()) {
  288. auto Const = buildInstr(TargetOpcode::G_FCONSTANT)
  289. .addDef(getMRI()->createGenericVirtualRegister(EltTy))
  290. .addFPImm(&Val);
  291. return buildSplatVector(Res, Const);
  292. }
  293. auto Const = buildInstr(TargetOpcode::G_FCONSTANT);
  294. Const->setDebugLoc(DebugLoc());
  295. Res.addDefToMIB(*getMRI(), Const);
  296. Const.addFPImm(&Val);
  297. return Const;
  298. }
  299. MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
  300. const APInt &Val) {
  301. ConstantInt *CI = ConstantInt::get(getMF().getFunction().getContext(), Val);
  302. return buildConstant(Res, *CI);
  303. }
  304. MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
  305. double Val) {
  306. LLT DstTy = Res.getLLTTy(*getMRI());
  307. auto &Ctx = getMF().getFunction().getContext();
  308. auto *CFP =
  309. ConstantFP::get(Ctx, getAPFloatFromSize(Val, DstTy.getScalarSizeInBits()));
  310. return buildFConstant(Res, *CFP);
  311. }
  312. MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
  313. const APFloat &Val) {
  314. auto &Ctx = getMF().getFunction().getContext();
  315. auto *CFP = ConstantFP::get(Ctx, Val);
  316. return buildFConstant(Res, *CFP);
  317. }
  318. MachineInstrBuilder MachineIRBuilder::buildBrCond(const SrcOp &Tst,
  319. MachineBasicBlock &Dest) {
  320. assert(Tst.getLLTTy(*getMRI()).isScalar() && "invalid operand type");
  321. auto MIB = buildInstr(TargetOpcode::G_BRCOND);
  322. Tst.addSrcToMIB(MIB);
  323. MIB.addMBB(&Dest);
  324. return MIB;
  325. }
  326. MachineInstrBuilder
  327. MachineIRBuilder::buildLoad(const DstOp &Dst, const SrcOp &Addr,
  328. MachinePointerInfo PtrInfo, Align Alignment,
  329. MachineMemOperand::Flags MMOFlags,
  330. const AAMDNodes &AAInfo) {
  331. MMOFlags |= MachineMemOperand::MOLoad;
  332. assert((MMOFlags & MachineMemOperand::MOStore) == 0);
  333. LLT Ty = Dst.getLLTTy(*getMRI());
  334. MachineMemOperand *MMO =
  335. getMF().getMachineMemOperand(PtrInfo, MMOFlags, Ty, Alignment, AAInfo);
  336. return buildLoad(Dst, Addr, *MMO);
  337. }
  338. MachineInstrBuilder MachineIRBuilder::buildLoadInstr(unsigned Opcode,
  339. const DstOp &Res,
  340. const SrcOp &Addr,
  341. MachineMemOperand &MMO) {
  342. assert(Res.getLLTTy(*getMRI()).isValid() && "invalid operand type");
  343. assert(Addr.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
  344. auto MIB = buildInstr(Opcode);
  345. Res.addDefToMIB(*getMRI(), MIB);
  346. Addr.addSrcToMIB(MIB);
  347. MIB.addMemOperand(&MMO);
  348. return MIB;
  349. }
  350. MachineInstrBuilder MachineIRBuilder::buildLoadFromOffset(
  351. const DstOp &Dst, const SrcOp &BasePtr,
  352. MachineMemOperand &BaseMMO, int64_t Offset) {
  353. LLT LoadTy = Dst.getLLTTy(*getMRI());
  354. MachineMemOperand *OffsetMMO =
  355. getMF().getMachineMemOperand(&BaseMMO, Offset, LoadTy);
  356. if (Offset == 0) // This may be a size or type changing load.
  357. return buildLoad(Dst, BasePtr, *OffsetMMO);
  358. LLT PtrTy = BasePtr.getLLTTy(*getMRI());
  359. LLT OffsetTy = LLT::scalar(PtrTy.getSizeInBits());
  360. auto ConstOffset = buildConstant(OffsetTy, Offset);
  361. auto Ptr = buildPtrAdd(PtrTy, BasePtr, ConstOffset);
  362. return buildLoad(Dst, Ptr, *OffsetMMO);
  363. }
  364. MachineInstrBuilder MachineIRBuilder::buildStore(const SrcOp &Val,
  365. const SrcOp &Addr,
  366. MachineMemOperand &MMO) {
  367. assert(Val.getLLTTy(*getMRI()).isValid() && "invalid operand type");
  368. assert(Addr.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
  369. auto MIB = buildInstr(TargetOpcode::G_STORE);
  370. Val.addSrcToMIB(MIB);
  371. Addr.addSrcToMIB(MIB);
  372. MIB.addMemOperand(&MMO);
  373. return MIB;
  374. }
  375. MachineInstrBuilder
  376. MachineIRBuilder::buildStore(const SrcOp &Val, const SrcOp &Addr,
  377. MachinePointerInfo PtrInfo, Align Alignment,
  378. MachineMemOperand::Flags MMOFlags,
  379. const AAMDNodes &AAInfo) {
  380. MMOFlags |= MachineMemOperand::MOStore;
  381. assert((MMOFlags & MachineMemOperand::MOLoad) == 0);
  382. LLT Ty = Val.getLLTTy(*getMRI());
  383. MachineMemOperand *MMO =
  384. getMF().getMachineMemOperand(PtrInfo, MMOFlags, Ty, Alignment, AAInfo);
  385. return buildStore(Val, Addr, *MMO);
  386. }
  387. MachineInstrBuilder MachineIRBuilder::buildAnyExt(const DstOp &Res,
  388. const SrcOp &Op) {
  389. return buildInstr(TargetOpcode::G_ANYEXT, Res, Op);
  390. }
  391. MachineInstrBuilder MachineIRBuilder::buildSExt(const DstOp &Res,
  392. const SrcOp &Op) {
  393. return buildInstr(TargetOpcode::G_SEXT, Res, Op);
  394. }
  395. MachineInstrBuilder MachineIRBuilder::buildZExt(const DstOp &Res,
  396. const SrcOp &Op) {
  397. return buildInstr(TargetOpcode::G_ZEXT, Res, Op);
  398. }
  399. unsigned MachineIRBuilder::getBoolExtOp(bool IsVec, bool IsFP) const {
  400. const auto *TLI = getMF().getSubtarget().getTargetLowering();
  401. switch (TLI->getBooleanContents(IsVec, IsFP)) {
  402. case TargetLoweringBase::ZeroOrNegativeOneBooleanContent:
  403. return TargetOpcode::G_SEXT;
  404. case TargetLoweringBase::ZeroOrOneBooleanContent:
  405. return TargetOpcode::G_ZEXT;
  406. default:
  407. return TargetOpcode::G_ANYEXT;
  408. }
  409. }
  410. MachineInstrBuilder MachineIRBuilder::buildBoolExt(const DstOp &Res,
  411. const SrcOp &Op,
  412. bool IsFP) {
  413. unsigned ExtOp = getBoolExtOp(getMRI()->getType(Op.getReg()).isVector(), IsFP);
  414. return buildInstr(ExtOp, Res, Op);
  415. }
  416. MachineInstrBuilder MachineIRBuilder::buildBoolExtInReg(const DstOp &Res,
  417. const SrcOp &Op,
  418. bool IsVector,
  419. bool IsFP) {
  420. const auto *TLI = getMF().getSubtarget().getTargetLowering();
  421. switch (TLI->getBooleanContents(IsVector, IsFP)) {
  422. case TargetLoweringBase::ZeroOrNegativeOneBooleanContent:
  423. return buildSExtInReg(Res, Op, 1);
  424. case TargetLoweringBase::ZeroOrOneBooleanContent:
  425. return buildZExtInReg(Res, Op, 1);
  426. case TargetLoweringBase::UndefinedBooleanContent:
  427. return buildCopy(Res, Op);
  428. }
  429. llvm_unreachable("unexpected BooleanContent");
  430. }
  431. MachineInstrBuilder MachineIRBuilder::buildExtOrTrunc(unsigned ExtOpc,
  432. const DstOp &Res,
  433. const SrcOp &Op) {
  434. assert((TargetOpcode::G_ANYEXT == ExtOpc || TargetOpcode::G_ZEXT == ExtOpc ||
  435. TargetOpcode::G_SEXT == ExtOpc) &&
  436. "Expecting Extending Opc");
  437. assert(Res.getLLTTy(*getMRI()).isScalar() ||
  438. Res.getLLTTy(*getMRI()).isVector());
  439. assert(Res.getLLTTy(*getMRI()).isScalar() ==
  440. Op.getLLTTy(*getMRI()).isScalar());
  441. unsigned Opcode = TargetOpcode::COPY;
  442. if (Res.getLLTTy(*getMRI()).getSizeInBits() >
  443. Op.getLLTTy(*getMRI()).getSizeInBits())
  444. Opcode = ExtOpc;
  445. else if (Res.getLLTTy(*getMRI()).getSizeInBits() <
  446. Op.getLLTTy(*getMRI()).getSizeInBits())
  447. Opcode = TargetOpcode::G_TRUNC;
  448. else
  449. assert(Res.getLLTTy(*getMRI()) == Op.getLLTTy(*getMRI()));
  450. return buildInstr(Opcode, Res, Op);
  451. }
  452. MachineInstrBuilder MachineIRBuilder::buildSExtOrTrunc(const DstOp &Res,
  453. const SrcOp &Op) {
  454. return buildExtOrTrunc(TargetOpcode::G_SEXT, Res, Op);
  455. }
  456. MachineInstrBuilder MachineIRBuilder::buildZExtOrTrunc(const DstOp &Res,
  457. const SrcOp &Op) {
  458. return buildExtOrTrunc(TargetOpcode::G_ZEXT, Res, Op);
  459. }
  460. MachineInstrBuilder MachineIRBuilder::buildAnyExtOrTrunc(const DstOp &Res,
  461. const SrcOp &Op) {
  462. return buildExtOrTrunc(TargetOpcode::G_ANYEXT, Res, Op);
  463. }
  464. MachineInstrBuilder MachineIRBuilder::buildZExtInReg(const DstOp &Res,
  465. const SrcOp &Op,
  466. int64_t ImmOp) {
  467. LLT ResTy = Res.getLLTTy(*getMRI());
  468. auto Mask = buildConstant(
  469. ResTy, APInt::getLowBitsSet(ResTy.getScalarSizeInBits(), ImmOp));
  470. return buildAnd(Res, Op, Mask);
  471. }
  472. MachineInstrBuilder MachineIRBuilder::buildCast(const DstOp &Dst,
  473. const SrcOp &Src) {
  474. LLT SrcTy = Src.getLLTTy(*getMRI());
  475. LLT DstTy = Dst.getLLTTy(*getMRI());
  476. if (SrcTy == DstTy)
  477. return buildCopy(Dst, Src);
  478. unsigned Opcode;
  479. if (SrcTy.isPointer() && DstTy.isScalar())
  480. Opcode = TargetOpcode::G_PTRTOINT;
  481. else if (DstTy.isPointer() && SrcTy.isScalar())
  482. Opcode = TargetOpcode::G_INTTOPTR;
  483. else {
  484. assert(!SrcTy.isPointer() && !DstTy.isPointer() && "n G_ADDRCAST yet");
  485. Opcode = TargetOpcode::G_BITCAST;
  486. }
  487. return buildInstr(Opcode, Dst, Src);
  488. }
  489. MachineInstrBuilder MachineIRBuilder::buildExtract(const DstOp &Dst,
  490. const SrcOp &Src,
  491. uint64_t Index) {
  492. LLT SrcTy = Src.getLLTTy(*getMRI());
  493. LLT DstTy = Dst.getLLTTy(*getMRI());
  494. #ifndef NDEBUG
  495. assert(SrcTy.isValid() && "invalid operand type");
  496. assert(DstTy.isValid() && "invalid operand type");
  497. assert(Index + DstTy.getSizeInBits() <= SrcTy.getSizeInBits() &&
  498. "extracting off end of register");
  499. #endif
  500. if (DstTy.getSizeInBits() == SrcTy.getSizeInBits()) {
  501. assert(Index == 0 && "insertion past the end of a register");
  502. return buildCast(Dst, Src);
  503. }
  504. auto Extract = buildInstr(TargetOpcode::G_EXTRACT);
  505. Dst.addDefToMIB(*getMRI(), Extract);
  506. Src.addSrcToMIB(Extract);
  507. Extract.addImm(Index);
  508. return Extract;
  509. }
  510. MachineInstrBuilder MachineIRBuilder::buildUndef(const DstOp &Res) {
  511. return buildInstr(TargetOpcode::G_IMPLICIT_DEF, {Res}, {});
  512. }
  513. MachineInstrBuilder MachineIRBuilder::buildMergeValues(const DstOp &Res,
  514. ArrayRef<Register> Ops) {
  515. // Unfortunately to convert from ArrayRef<LLT> to ArrayRef<SrcOp>,
  516. // we need some temporary storage for the DstOp objects. Here we use a
  517. // sufficiently large SmallVector to not go through the heap.
  518. SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
  519. assert(TmpVec.size() > 1);
  520. return buildInstr(TargetOpcode::G_MERGE_VALUES, Res, TmpVec);
  521. }
  522. MachineInstrBuilder
  523. MachineIRBuilder::buildMergeLikeInstr(const DstOp &Res,
  524. ArrayRef<Register> Ops) {
  525. // Unfortunately to convert from ArrayRef<LLT> to ArrayRef<SrcOp>,
  526. // we need some temporary storage for the DstOp objects. Here we use a
  527. // sufficiently large SmallVector to not go through the heap.
  528. SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
  529. assert(TmpVec.size() > 1);
  530. return buildInstr(getOpcodeForMerge(Res, TmpVec), Res, TmpVec);
  531. }
  532. MachineInstrBuilder
  533. MachineIRBuilder::buildMergeLikeInstr(const DstOp &Res,
  534. std::initializer_list<SrcOp> Ops) {
  535. assert(Ops.size() > 1);
  536. return buildInstr(getOpcodeForMerge(Res, Ops), Res, Ops);
  537. }
  538. unsigned MachineIRBuilder::getOpcodeForMerge(const DstOp &DstOp,
  539. ArrayRef<SrcOp> SrcOps) const {
  540. if (DstOp.getLLTTy(*getMRI()).isVector()) {
  541. if (SrcOps[0].getLLTTy(*getMRI()).isVector())
  542. return TargetOpcode::G_CONCAT_VECTORS;
  543. return TargetOpcode::G_BUILD_VECTOR;
  544. }
  545. return TargetOpcode::G_MERGE_VALUES;
  546. }
  547. MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<LLT> Res,
  548. const SrcOp &Op) {
  549. // Unfortunately to convert from ArrayRef<LLT> to ArrayRef<DstOp>,
  550. // we need some temporary storage for the DstOp objects. Here we use a
  551. // sufficiently large SmallVector to not go through the heap.
  552. SmallVector<DstOp, 8> TmpVec(Res.begin(), Res.end());
  553. assert(TmpVec.size() > 1);
  554. return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op);
  555. }
  556. MachineInstrBuilder MachineIRBuilder::buildUnmerge(LLT Res,
  557. const SrcOp &Op) {
  558. unsigned NumReg = Op.getLLTTy(*getMRI()).getSizeInBits() / Res.getSizeInBits();
  559. SmallVector<DstOp, 8> TmpVec(NumReg, Res);
  560. return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op);
  561. }
  562. MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<Register> Res,
  563. const SrcOp &Op) {
  564. // Unfortunately to convert from ArrayRef<Register> to ArrayRef<DstOp>,
  565. // we need some temporary storage for the DstOp objects. Here we use a
  566. // sufficiently large SmallVector to not go through the heap.
  567. SmallVector<DstOp, 8> TmpVec(Res.begin(), Res.end());
  568. assert(TmpVec.size() > 1);
  569. return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op);
  570. }
  571. MachineInstrBuilder MachineIRBuilder::buildBuildVector(const DstOp &Res,
  572. ArrayRef<Register> Ops) {
  573. // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
  574. // we need some temporary storage for the DstOp objects. Here we use a
  575. // sufficiently large SmallVector to not go through the heap.
  576. SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
  577. return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
  578. }
  579. MachineInstrBuilder
  580. MachineIRBuilder::buildBuildVectorConstant(const DstOp &Res,
  581. ArrayRef<APInt> Ops) {
  582. SmallVector<SrcOp> TmpVec;
  583. TmpVec.reserve(Ops.size());
  584. LLT EltTy = Res.getLLTTy(*getMRI()).getElementType();
  585. for (const auto &Op : Ops)
  586. TmpVec.push_back(buildConstant(EltTy, Op));
  587. return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
  588. }
  589. MachineInstrBuilder MachineIRBuilder::buildSplatVector(const DstOp &Res,
  590. const SrcOp &Src) {
  591. SmallVector<SrcOp, 8> TmpVec(Res.getLLTTy(*getMRI()).getNumElements(), Src);
  592. return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
  593. }
  594. MachineInstrBuilder
  595. MachineIRBuilder::buildBuildVectorTrunc(const DstOp &Res,
  596. ArrayRef<Register> Ops) {
  597. // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
  598. // we need some temporary storage for the DstOp objects. Here we use a
  599. // sufficiently large SmallVector to not go through the heap.
  600. SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
  601. if (TmpVec[0].getLLTTy(*getMRI()).getSizeInBits() ==
  602. Res.getLLTTy(*getMRI()).getElementType().getSizeInBits())
  603. return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
  604. return buildInstr(TargetOpcode::G_BUILD_VECTOR_TRUNC, Res, TmpVec);
  605. }
  606. MachineInstrBuilder MachineIRBuilder::buildShuffleSplat(const DstOp &Res,
  607. const SrcOp &Src) {
  608. LLT DstTy = Res.getLLTTy(*getMRI());
  609. assert(Src.getLLTTy(*getMRI()) == DstTy.getElementType() &&
  610. "Expected Src to match Dst elt ty");
  611. auto UndefVec = buildUndef(DstTy);
  612. auto Zero = buildConstant(LLT::scalar(64), 0);
  613. auto InsElt = buildInsertVectorElement(DstTy, UndefVec, Src, Zero);
  614. SmallVector<int, 16> ZeroMask(DstTy.getNumElements());
  615. return buildShuffleVector(DstTy, InsElt, UndefVec, ZeroMask);
  616. }
  617. MachineInstrBuilder MachineIRBuilder::buildShuffleVector(const DstOp &Res,
  618. const SrcOp &Src1,
  619. const SrcOp &Src2,
  620. ArrayRef<int> Mask) {
  621. LLT DstTy = Res.getLLTTy(*getMRI());
  622. LLT Src1Ty = Src1.getLLTTy(*getMRI());
  623. LLT Src2Ty = Src2.getLLTTy(*getMRI());
  624. assert((size_t)(Src1Ty.getNumElements() + Src2Ty.getNumElements()) >=
  625. Mask.size());
  626. assert(DstTy.getElementType() == Src1Ty.getElementType() &&
  627. DstTy.getElementType() == Src2Ty.getElementType());
  628. (void)DstTy;
  629. (void)Src1Ty;
  630. (void)Src2Ty;
  631. ArrayRef<int> MaskAlloc = getMF().allocateShuffleMask(Mask);
  632. return buildInstr(TargetOpcode::G_SHUFFLE_VECTOR, {Res}, {Src1, Src2})
  633. .addShuffleMask(MaskAlloc);
  634. }
  635. MachineInstrBuilder
  636. MachineIRBuilder::buildConcatVectors(const DstOp &Res, ArrayRef<Register> Ops) {
  637. // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
  638. // we need some temporary storage for the DstOp objects. Here we use a
  639. // sufficiently large SmallVector to not go through the heap.
  640. SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
  641. return buildInstr(TargetOpcode::G_CONCAT_VECTORS, Res, TmpVec);
  642. }
  643. MachineInstrBuilder MachineIRBuilder::buildInsert(const DstOp &Res,
  644. const SrcOp &Src,
  645. const SrcOp &Op,
  646. unsigned Index) {
  647. assert(Index + Op.getLLTTy(*getMRI()).getSizeInBits() <=
  648. Res.getLLTTy(*getMRI()).getSizeInBits() &&
  649. "insertion past the end of a register");
  650. if (Res.getLLTTy(*getMRI()).getSizeInBits() ==
  651. Op.getLLTTy(*getMRI()).getSizeInBits()) {
  652. return buildCast(Res, Op);
  653. }
  654. return buildInstr(TargetOpcode::G_INSERT, Res, {Src, Op, uint64_t(Index)});
  655. }
  656. MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID,
  657. ArrayRef<Register> ResultRegs,
  658. bool HasSideEffects) {
  659. auto MIB =
  660. buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
  661. : TargetOpcode::G_INTRINSIC);
  662. for (unsigned ResultReg : ResultRegs)
  663. MIB.addDef(ResultReg);
  664. MIB.addIntrinsicID(ID);
  665. return MIB;
  666. }
  667. MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID,
  668. ArrayRef<DstOp> Results,
  669. bool HasSideEffects) {
  670. auto MIB =
  671. buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
  672. : TargetOpcode::G_INTRINSIC);
  673. for (DstOp Result : Results)
  674. Result.addDefToMIB(*getMRI(), MIB);
  675. MIB.addIntrinsicID(ID);
  676. return MIB;
  677. }
  678. MachineInstrBuilder MachineIRBuilder::buildTrunc(const DstOp &Res,
  679. const SrcOp &Op) {
  680. return buildInstr(TargetOpcode::G_TRUNC, Res, Op);
  681. }
  682. MachineInstrBuilder
  683. MachineIRBuilder::buildFPTrunc(const DstOp &Res, const SrcOp &Op,
  684. std::optional<unsigned> Flags) {
  685. return buildInstr(TargetOpcode::G_FPTRUNC, Res, Op, Flags);
  686. }
  687. MachineInstrBuilder MachineIRBuilder::buildICmp(CmpInst::Predicate Pred,
  688. const DstOp &Res,
  689. const SrcOp &Op0,
  690. const SrcOp &Op1) {
  691. return buildInstr(TargetOpcode::G_ICMP, Res, {Pred, Op0, Op1});
  692. }
  693. MachineInstrBuilder MachineIRBuilder::buildFCmp(CmpInst::Predicate Pred,
  694. const DstOp &Res,
  695. const SrcOp &Op0,
  696. const SrcOp &Op1,
  697. std::optional<unsigned> Flags) {
  698. return buildInstr(TargetOpcode::G_FCMP, Res, {Pred, Op0, Op1}, Flags);
  699. }
  700. MachineInstrBuilder
  701. MachineIRBuilder::buildSelect(const DstOp &Res, const SrcOp &Tst,
  702. const SrcOp &Op0, const SrcOp &Op1,
  703. std::optional<unsigned> Flags) {
  704. return buildInstr(TargetOpcode::G_SELECT, {Res}, {Tst, Op0, Op1}, Flags);
  705. }
  706. MachineInstrBuilder
  707. MachineIRBuilder::buildInsertVectorElement(const DstOp &Res, const SrcOp &Val,
  708. const SrcOp &Elt, const SrcOp &Idx) {
  709. return buildInstr(TargetOpcode::G_INSERT_VECTOR_ELT, Res, {Val, Elt, Idx});
  710. }
  711. MachineInstrBuilder
  712. MachineIRBuilder::buildExtractVectorElement(const DstOp &Res, const SrcOp &Val,
  713. const SrcOp &Idx) {
  714. return buildInstr(TargetOpcode::G_EXTRACT_VECTOR_ELT, Res, {Val, Idx});
  715. }
  716. MachineInstrBuilder MachineIRBuilder::buildAtomicCmpXchgWithSuccess(
  717. Register OldValRes, Register SuccessRes, Register Addr, Register CmpVal,
  718. Register NewVal, MachineMemOperand &MMO) {
  719. #ifndef NDEBUG
  720. LLT OldValResTy = getMRI()->getType(OldValRes);
  721. LLT SuccessResTy = getMRI()->getType(SuccessRes);
  722. LLT AddrTy = getMRI()->getType(Addr);
  723. LLT CmpValTy = getMRI()->getType(CmpVal);
  724. LLT NewValTy = getMRI()->getType(NewVal);
  725. assert(OldValResTy.isScalar() && "invalid operand type");
  726. assert(SuccessResTy.isScalar() && "invalid operand type");
  727. assert(AddrTy.isPointer() && "invalid operand type");
  728. assert(CmpValTy.isValid() && "invalid operand type");
  729. assert(NewValTy.isValid() && "invalid operand type");
  730. assert(OldValResTy == CmpValTy && "type mismatch");
  731. assert(OldValResTy == NewValTy && "type mismatch");
  732. #endif
  733. return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG_WITH_SUCCESS)
  734. .addDef(OldValRes)
  735. .addDef(SuccessRes)
  736. .addUse(Addr)
  737. .addUse(CmpVal)
  738. .addUse(NewVal)
  739. .addMemOperand(&MMO);
  740. }
  741. MachineInstrBuilder
  742. MachineIRBuilder::buildAtomicCmpXchg(Register OldValRes, Register Addr,
  743. Register CmpVal, Register NewVal,
  744. MachineMemOperand &MMO) {
  745. #ifndef NDEBUG
  746. LLT OldValResTy = getMRI()->getType(OldValRes);
  747. LLT AddrTy = getMRI()->getType(Addr);
  748. LLT CmpValTy = getMRI()->getType(CmpVal);
  749. LLT NewValTy = getMRI()->getType(NewVal);
  750. assert(OldValResTy.isScalar() && "invalid operand type");
  751. assert(AddrTy.isPointer() && "invalid operand type");
  752. assert(CmpValTy.isValid() && "invalid operand type");
  753. assert(NewValTy.isValid() && "invalid operand type");
  754. assert(OldValResTy == CmpValTy && "type mismatch");
  755. assert(OldValResTy == NewValTy && "type mismatch");
  756. #endif
  757. return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG)
  758. .addDef(OldValRes)
  759. .addUse(Addr)
  760. .addUse(CmpVal)
  761. .addUse(NewVal)
  762. .addMemOperand(&MMO);
  763. }
  764. MachineInstrBuilder MachineIRBuilder::buildAtomicRMW(
  765. unsigned Opcode, const DstOp &OldValRes,
  766. const SrcOp &Addr, const SrcOp &Val,
  767. MachineMemOperand &MMO) {
  768. #ifndef NDEBUG
  769. LLT OldValResTy = OldValRes.getLLTTy(*getMRI());
  770. LLT AddrTy = Addr.getLLTTy(*getMRI());
  771. LLT ValTy = Val.getLLTTy(*getMRI());
  772. assert(OldValResTy.isScalar() && "invalid operand type");
  773. assert(AddrTy.isPointer() && "invalid operand type");
  774. assert(ValTy.isValid() && "invalid operand type");
  775. assert(OldValResTy == ValTy && "type mismatch");
  776. assert(MMO.isAtomic() && "not atomic mem operand");
  777. #endif
  778. auto MIB = buildInstr(Opcode);
  779. OldValRes.addDefToMIB(*getMRI(), MIB);
  780. Addr.addSrcToMIB(MIB);
  781. Val.addSrcToMIB(MIB);
  782. MIB.addMemOperand(&MMO);
  783. return MIB;
  784. }
  785. MachineInstrBuilder
  786. MachineIRBuilder::buildAtomicRMWXchg(Register OldValRes, Register Addr,
  787. Register Val, MachineMemOperand &MMO) {
  788. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XCHG, OldValRes, Addr, Val,
  789. MMO);
  790. }
  791. MachineInstrBuilder
  792. MachineIRBuilder::buildAtomicRMWAdd(Register OldValRes, Register Addr,
  793. Register Val, MachineMemOperand &MMO) {
  794. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_ADD, OldValRes, Addr, Val,
  795. MMO);
  796. }
  797. MachineInstrBuilder
  798. MachineIRBuilder::buildAtomicRMWSub(Register OldValRes, Register Addr,
  799. Register Val, MachineMemOperand &MMO) {
  800. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_SUB, OldValRes, Addr, Val,
  801. MMO);
  802. }
  803. MachineInstrBuilder
  804. MachineIRBuilder::buildAtomicRMWAnd(Register OldValRes, Register Addr,
  805. Register Val, MachineMemOperand &MMO) {
  806. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_AND, OldValRes, Addr, Val,
  807. MMO);
  808. }
  809. MachineInstrBuilder
  810. MachineIRBuilder::buildAtomicRMWNand(Register OldValRes, Register Addr,
  811. Register Val, MachineMemOperand &MMO) {
  812. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_NAND, OldValRes, Addr, Val,
  813. MMO);
  814. }
  815. MachineInstrBuilder MachineIRBuilder::buildAtomicRMWOr(Register OldValRes,
  816. Register Addr,
  817. Register Val,
  818. MachineMemOperand &MMO) {
  819. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_OR, OldValRes, Addr, Val,
  820. MMO);
  821. }
  822. MachineInstrBuilder
  823. MachineIRBuilder::buildAtomicRMWXor(Register OldValRes, Register Addr,
  824. Register Val, MachineMemOperand &MMO) {
  825. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XOR, OldValRes, Addr, Val,
  826. MMO);
  827. }
  828. MachineInstrBuilder
  829. MachineIRBuilder::buildAtomicRMWMax(Register OldValRes, Register Addr,
  830. Register Val, MachineMemOperand &MMO) {
  831. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MAX, OldValRes, Addr, Val,
  832. MMO);
  833. }
  834. MachineInstrBuilder
  835. MachineIRBuilder::buildAtomicRMWMin(Register OldValRes, Register Addr,
  836. Register Val, MachineMemOperand &MMO) {
  837. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MIN, OldValRes, Addr, Val,
  838. MMO);
  839. }
  840. MachineInstrBuilder
  841. MachineIRBuilder::buildAtomicRMWUmax(Register OldValRes, Register Addr,
  842. Register Val, MachineMemOperand &MMO) {
  843. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMAX, OldValRes, Addr, Val,
  844. MMO);
  845. }
  846. MachineInstrBuilder
  847. MachineIRBuilder::buildAtomicRMWUmin(Register OldValRes, Register Addr,
  848. Register Val, MachineMemOperand &MMO) {
  849. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMIN, OldValRes, Addr, Val,
  850. MMO);
  851. }
  852. MachineInstrBuilder
  853. MachineIRBuilder::buildAtomicRMWFAdd(
  854. const DstOp &OldValRes, const SrcOp &Addr, const SrcOp &Val,
  855. MachineMemOperand &MMO) {
  856. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FADD, OldValRes, Addr, Val,
  857. MMO);
  858. }
  859. MachineInstrBuilder
  860. MachineIRBuilder::buildAtomicRMWFSub(const DstOp &OldValRes, const SrcOp &Addr, const SrcOp &Val,
  861. MachineMemOperand &MMO) {
  862. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FSUB, OldValRes, Addr, Val,
  863. MMO);
  864. }
  865. MachineInstrBuilder
  866. MachineIRBuilder::buildAtomicRMWFMax(const DstOp &OldValRes, const SrcOp &Addr,
  867. const SrcOp &Val, MachineMemOperand &MMO) {
  868. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FMAX, OldValRes, Addr, Val,
  869. MMO);
  870. }
  871. MachineInstrBuilder
  872. MachineIRBuilder::buildAtomicRMWFMin(const DstOp &OldValRes, const SrcOp &Addr,
  873. const SrcOp &Val, MachineMemOperand &MMO) {
  874. return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FMIN, OldValRes, Addr, Val,
  875. MMO);
  876. }
  877. MachineInstrBuilder
  878. MachineIRBuilder::buildFence(unsigned Ordering, unsigned Scope) {
  879. return buildInstr(TargetOpcode::G_FENCE)
  880. .addImm(Ordering)
  881. .addImm(Scope);
  882. }
  883. MachineInstrBuilder
  884. MachineIRBuilder::buildBlockAddress(Register Res, const BlockAddress *BA) {
  885. #ifndef NDEBUG
  886. assert(getMRI()->getType(Res).isPointer() && "invalid res type");
  887. #endif
  888. return buildInstr(TargetOpcode::G_BLOCK_ADDR).addDef(Res).addBlockAddress(BA);
  889. }
  890. void MachineIRBuilder::validateTruncExt(const LLT DstTy, const LLT SrcTy,
  891. bool IsExtend) {
  892. #ifndef NDEBUG
  893. if (DstTy.isVector()) {
  894. assert(SrcTy.isVector() && "mismatched cast between vector and non-vector");
  895. assert(SrcTy.getNumElements() == DstTy.getNumElements() &&
  896. "different number of elements in a trunc/ext");
  897. } else
  898. assert(DstTy.isScalar() && SrcTy.isScalar() && "invalid extend/trunc");
  899. if (IsExtend)
  900. assert(DstTy.getSizeInBits() > SrcTy.getSizeInBits() &&
  901. "invalid narrowing extend");
  902. else
  903. assert(DstTy.getSizeInBits() < SrcTy.getSizeInBits() &&
  904. "invalid widening trunc");
  905. #endif
  906. }
  907. void MachineIRBuilder::validateSelectOp(const LLT ResTy, const LLT TstTy,
  908. const LLT Op0Ty, const LLT Op1Ty) {
  909. #ifndef NDEBUG
  910. assert((ResTy.isScalar() || ResTy.isVector() || ResTy.isPointer()) &&
  911. "invalid operand type");
  912. assert((ResTy == Op0Ty && ResTy == Op1Ty) && "type mismatch");
  913. if (ResTy.isScalar() || ResTy.isPointer())
  914. assert(TstTy.isScalar() && "type mismatch");
  915. else
  916. assert((TstTy.isScalar() ||
  917. (TstTy.isVector() &&
  918. TstTy.getNumElements() == Op0Ty.getNumElements())) &&
  919. "type mismatch");
  920. #endif
  921. }
  922. MachineInstrBuilder
  923. MachineIRBuilder::buildInstr(unsigned Opc, ArrayRef<DstOp> DstOps,
  924. ArrayRef<SrcOp> SrcOps,
  925. std::optional<unsigned> Flags) {
  926. switch (Opc) {
  927. default:
  928. break;
  929. case TargetOpcode::G_SELECT: {
  930. assert(DstOps.size() == 1 && "Invalid select");
  931. assert(SrcOps.size() == 3 && "Invalid select");
  932. validateSelectOp(
  933. DstOps[0].getLLTTy(*getMRI()), SrcOps[0].getLLTTy(*getMRI()),
  934. SrcOps[1].getLLTTy(*getMRI()), SrcOps[2].getLLTTy(*getMRI()));
  935. break;
  936. }
  937. case TargetOpcode::G_FNEG:
  938. case TargetOpcode::G_ABS:
  939. // All these are unary ops.
  940. assert(DstOps.size() == 1 && "Invalid Dst");
  941. assert(SrcOps.size() == 1 && "Invalid Srcs");
  942. validateUnaryOp(DstOps[0].getLLTTy(*getMRI()),
  943. SrcOps[0].getLLTTy(*getMRI()));
  944. break;
  945. case TargetOpcode::G_ADD:
  946. case TargetOpcode::G_AND:
  947. case TargetOpcode::G_MUL:
  948. case TargetOpcode::G_OR:
  949. case TargetOpcode::G_SUB:
  950. case TargetOpcode::G_XOR:
  951. case TargetOpcode::G_UDIV:
  952. case TargetOpcode::G_SDIV:
  953. case TargetOpcode::G_UREM:
  954. case TargetOpcode::G_SREM:
  955. case TargetOpcode::G_SMIN:
  956. case TargetOpcode::G_SMAX:
  957. case TargetOpcode::G_UMIN:
  958. case TargetOpcode::G_UMAX:
  959. case TargetOpcode::G_UADDSAT:
  960. case TargetOpcode::G_SADDSAT:
  961. case TargetOpcode::G_USUBSAT:
  962. case TargetOpcode::G_SSUBSAT: {
  963. // All these are binary ops.
  964. assert(DstOps.size() == 1 && "Invalid Dst");
  965. assert(SrcOps.size() == 2 && "Invalid Srcs");
  966. validateBinaryOp(DstOps[0].getLLTTy(*getMRI()),
  967. SrcOps[0].getLLTTy(*getMRI()),
  968. SrcOps[1].getLLTTy(*getMRI()));
  969. break;
  970. }
  971. case TargetOpcode::G_SHL:
  972. case TargetOpcode::G_ASHR:
  973. case TargetOpcode::G_LSHR:
  974. case TargetOpcode::G_USHLSAT:
  975. case TargetOpcode::G_SSHLSAT: {
  976. assert(DstOps.size() == 1 && "Invalid Dst");
  977. assert(SrcOps.size() == 2 && "Invalid Srcs");
  978. validateShiftOp(DstOps[0].getLLTTy(*getMRI()),
  979. SrcOps[0].getLLTTy(*getMRI()),
  980. SrcOps[1].getLLTTy(*getMRI()));
  981. break;
  982. }
  983. case TargetOpcode::G_SEXT:
  984. case TargetOpcode::G_ZEXT:
  985. case TargetOpcode::G_ANYEXT:
  986. assert(DstOps.size() == 1 && "Invalid Dst");
  987. assert(SrcOps.size() == 1 && "Invalid Srcs");
  988. validateTruncExt(DstOps[0].getLLTTy(*getMRI()),
  989. SrcOps[0].getLLTTy(*getMRI()), true);
  990. break;
  991. case TargetOpcode::G_TRUNC:
  992. case TargetOpcode::G_FPTRUNC: {
  993. assert(DstOps.size() == 1 && "Invalid Dst");
  994. assert(SrcOps.size() == 1 && "Invalid Srcs");
  995. validateTruncExt(DstOps[0].getLLTTy(*getMRI()),
  996. SrcOps[0].getLLTTy(*getMRI()), false);
  997. break;
  998. }
  999. case TargetOpcode::G_BITCAST: {
  1000. assert(DstOps.size() == 1 && "Invalid Dst");
  1001. assert(SrcOps.size() == 1 && "Invalid Srcs");
  1002. assert(DstOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
  1003. SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() && "invalid bitcast");
  1004. break;
  1005. }
  1006. case TargetOpcode::COPY:
  1007. assert(DstOps.size() == 1 && "Invalid Dst");
  1008. // If the caller wants to add a subreg source it has to be done separately
  1009. // so we may not have any SrcOps at this point yet.
  1010. break;
  1011. case TargetOpcode::G_FCMP:
  1012. case TargetOpcode::G_ICMP: {
  1013. assert(DstOps.size() == 1 && "Invalid Dst Operands");
  1014. assert(SrcOps.size() == 3 && "Invalid Src Operands");
  1015. // For F/ICMP, the first src operand is the predicate, followed by
  1016. // the two comparands.
  1017. assert(SrcOps[0].getSrcOpKind() == SrcOp::SrcType::Ty_Predicate &&
  1018. "Expecting predicate");
  1019. assert([&]() -> bool {
  1020. CmpInst::Predicate Pred = SrcOps[0].getPredicate();
  1021. return Opc == TargetOpcode::G_ICMP ? CmpInst::isIntPredicate(Pred)
  1022. : CmpInst::isFPPredicate(Pred);
  1023. }() && "Invalid predicate");
  1024. assert(SrcOps[1].getLLTTy(*getMRI()) == SrcOps[2].getLLTTy(*getMRI()) &&
  1025. "Type mismatch");
  1026. assert([&]() -> bool {
  1027. LLT Op0Ty = SrcOps[1].getLLTTy(*getMRI());
  1028. LLT DstTy = DstOps[0].getLLTTy(*getMRI());
  1029. if (Op0Ty.isScalar() || Op0Ty.isPointer())
  1030. return DstTy.isScalar();
  1031. else
  1032. return DstTy.isVector() &&
  1033. DstTy.getNumElements() == Op0Ty.getNumElements();
  1034. }() && "Type Mismatch");
  1035. break;
  1036. }
  1037. case TargetOpcode::G_UNMERGE_VALUES: {
  1038. assert(!DstOps.empty() && "Invalid trivial sequence");
  1039. assert(SrcOps.size() == 1 && "Invalid src for Unmerge");
  1040. assert(llvm::all_of(DstOps,
  1041. [&, this](const DstOp &Op) {
  1042. return Op.getLLTTy(*getMRI()) ==
  1043. DstOps[0].getLLTTy(*getMRI());
  1044. }) &&
  1045. "type mismatch in output list");
  1046. assert((TypeSize::ScalarTy)DstOps.size() *
  1047. DstOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
  1048. SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
  1049. "input operands do not cover output register");
  1050. break;
  1051. }
  1052. case TargetOpcode::G_MERGE_VALUES: {
  1053. assert(SrcOps.size() >= 2 && "invalid trivial sequence");
  1054. assert(DstOps.size() == 1 && "Invalid Dst");
  1055. assert(llvm::all_of(SrcOps,
  1056. [&, this](const SrcOp &Op) {
  1057. return Op.getLLTTy(*getMRI()) ==
  1058. SrcOps[0].getLLTTy(*getMRI());
  1059. }) &&
  1060. "type mismatch in input list");
  1061. assert((TypeSize::ScalarTy)SrcOps.size() *
  1062. SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
  1063. DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
  1064. "input operands do not cover output register");
  1065. assert(!DstOps[0].getLLTTy(*getMRI()).isVector() &&
  1066. "vectors should be built with G_CONCAT_VECTOR or G_BUILD_VECTOR");
  1067. break;
  1068. }
  1069. case TargetOpcode::G_EXTRACT_VECTOR_ELT: {
  1070. assert(DstOps.size() == 1 && "Invalid Dst size");
  1071. assert(SrcOps.size() == 2 && "Invalid Src size");
  1072. assert(SrcOps[0].getLLTTy(*getMRI()).isVector() && "Invalid operand type");
  1073. assert((DstOps[0].getLLTTy(*getMRI()).isScalar() ||
  1074. DstOps[0].getLLTTy(*getMRI()).isPointer()) &&
  1075. "Invalid operand type");
  1076. assert(SrcOps[1].getLLTTy(*getMRI()).isScalar() && "Invalid operand type");
  1077. assert(SrcOps[0].getLLTTy(*getMRI()).getElementType() ==
  1078. DstOps[0].getLLTTy(*getMRI()) &&
  1079. "Type mismatch");
  1080. break;
  1081. }
  1082. case TargetOpcode::G_INSERT_VECTOR_ELT: {
  1083. assert(DstOps.size() == 1 && "Invalid dst size");
  1084. assert(SrcOps.size() == 3 && "Invalid src size");
  1085. assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
  1086. SrcOps[0].getLLTTy(*getMRI()).isVector() && "Invalid operand type");
  1087. assert(DstOps[0].getLLTTy(*getMRI()).getElementType() ==
  1088. SrcOps[1].getLLTTy(*getMRI()) &&
  1089. "Type mismatch");
  1090. assert(SrcOps[2].getLLTTy(*getMRI()).isScalar() && "Invalid index");
  1091. assert(DstOps[0].getLLTTy(*getMRI()).getNumElements() ==
  1092. SrcOps[0].getLLTTy(*getMRI()).getNumElements() &&
  1093. "Type mismatch");
  1094. break;
  1095. }
  1096. case TargetOpcode::G_BUILD_VECTOR: {
  1097. assert((!SrcOps.empty() || SrcOps.size() < 2) &&
  1098. "Must have at least 2 operands");
  1099. assert(DstOps.size() == 1 && "Invalid DstOps");
  1100. assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
  1101. "Res type must be a vector");
  1102. assert(llvm::all_of(SrcOps,
  1103. [&, this](const SrcOp &Op) {
  1104. return Op.getLLTTy(*getMRI()) ==
  1105. SrcOps[0].getLLTTy(*getMRI());
  1106. }) &&
  1107. "type mismatch in input list");
  1108. assert((TypeSize::ScalarTy)SrcOps.size() *
  1109. SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
  1110. DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
  1111. "input scalars do not exactly cover the output vector register");
  1112. break;
  1113. }
  1114. case TargetOpcode::G_BUILD_VECTOR_TRUNC: {
  1115. assert((!SrcOps.empty() || SrcOps.size() < 2) &&
  1116. "Must have at least 2 operands");
  1117. assert(DstOps.size() == 1 && "Invalid DstOps");
  1118. assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
  1119. "Res type must be a vector");
  1120. assert(llvm::all_of(SrcOps,
  1121. [&, this](const SrcOp &Op) {
  1122. return Op.getLLTTy(*getMRI()) ==
  1123. SrcOps[0].getLLTTy(*getMRI());
  1124. }) &&
  1125. "type mismatch in input list");
  1126. break;
  1127. }
  1128. case TargetOpcode::G_CONCAT_VECTORS: {
  1129. assert(DstOps.size() == 1 && "Invalid DstOps");
  1130. assert((!SrcOps.empty() || SrcOps.size() < 2) &&
  1131. "Must have at least 2 operands");
  1132. assert(llvm::all_of(SrcOps,
  1133. [&, this](const SrcOp &Op) {
  1134. return (Op.getLLTTy(*getMRI()).isVector() &&
  1135. Op.getLLTTy(*getMRI()) ==
  1136. SrcOps[0].getLLTTy(*getMRI()));
  1137. }) &&
  1138. "type mismatch in input list");
  1139. assert((TypeSize::ScalarTy)SrcOps.size() *
  1140. SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
  1141. DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
  1142. "input vectors do not exactly cover the output vector register");
  1143. break;
  1144. }
  1145. case TargetOpcode::G_UADDE: {
  1146. assert(DstOps.size() == 2 && "Invalid no of dst operands");
  1147. assert(SrcOps.size() == 3 && "Invalid no of src operands");
  1148. assert(DstOps[0].getLLTTy(*getMRI()).isScalar() && "Invalid operand");
  1149. assert((DstOps[0].getLLTTy(*getMRI()) == SrcOps[0].getLLTTy(*getMRI())) &&
  1150. (DstOps[0].getLLTTy(*getMRI()) == SrcOps[1].getLLTTy(*getMRI())) &&
  1151. "Invalid operand");
  1152. assert(DstOps[1].getLLTTy(*getMRI()).isScalar() && "Invalid operand");
  1153. assert(DstOps[1].getLLTTy(*getMRI()) == SrcOps[2].getLLTTy(*getMRI()) &&
  1154. "type mismatch");
  1155. break;
  1156. }
  1157. }
  1158. auto MIB = buildInstr(Opc);
  1159. for (const DstOp &Op : DstOps)
  1160. Op.addDefToMIB(*getMRI(), MIB);
  1161. for (const SrcOp &Op : SrcOps)
  1162. Op.addSrcToMIB(MIB);
  1163. if (Flags)
  1164. MIB->setFlags(*Flags);
  1165. return MIB;
  1166. }