CSEMIRBuilder.cpp 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354
  1. //===-- llvm/CodeGen/GlobalISel/CSEMIRBuilder.cpp - MIBuilder--*- C++ -*-==//
  2. //
  3. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  4. // See https://llvm.org/LICENSE.txt for license information.
  5. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  6. //
  7. //===----------------------------------------------------------------------===//
  8. /// \file
  9. /// This file implements the CSEMIRBuilder class which CSEs as it builds
  10. /// instructions.
  11. //===----------------------------------------------------------------------===//
  12. //
  13. #include "llvm/CodeGen/GlobalISel/CSEMIRBuilder.h"
  14. #include "llvm/CodeGen/GlobalISel/CSEInfo.h"
  15. #include "llvm/CodeGen/GlobalISel/GISelChangeObserver.h"
  16. #include "llvm/CodeGen/GlobalISel/Utils.h"
  17. #include "llvm/CodeGen/MachineInstrBuilder.h"
  18. #include "llvm/IR/DebugInfoMetadata.h"
  19. using namespace llvm;
  20. bool CSEMIRBuilder::dominates(MachineBasicBlock::const_iterator A,
  21. MachineBasicBlock::const_iterator B) const {
  22. auto MBBEnd = getMBB().end();
  23. if (B == MBBEnd)
  24. return true;
  25. assert(A->getParent() == B->getParent() &&
  26. "Iterators should be in same block");
  27. const MachineBasicBlock *BBA = A->getParent();
  28. MachineBasicBlock::const_iterator I = BBA->begin();
  29. for (; &*I != A && &*I != B; ++I)
  30. ;
  31. return &*I == A;
  32. }
  33. MachineInstrBuilder
  34. CSEMIRBuilder::getDominatingInstrForID(FoldingSetNodeID &ID,
  35. void *&NodeInsertPos) {
  36. GISelCSEInfo *CSEInfo = getCSEInfo();
  37. assert(CSEInfo && "Can't get here without setting CSEInfo");
  38. MachineBasicBlock *CurMBB = &getMBB();
  39. MachineInstr *MI =
  40. CSEInfo->getMachineInstrIfExists(ID, CurMBB, NodeInsertPos);
  41. if (MI) {
  42. CSEInfo->countOpcodeHit(MI->getOpcode());
  43. auto CurrPos = getInsertPt();
  44. auto MII = MachineBasicBlock::iterator(MI);
  45. if (MII == CurrPos) {
  46. // Move the insert point ahead of the instruction so any future uses of
  47. // this builder will have the def ready.
  48. setInsertPt(*CurMBB, std::next(MII));
  49. } else if (!dominates(MI, CurrPos)) {
  50. CurMBB->splice(CurrPos, CurMBB, MI);
  51. }
  52. return MachineInstrBuilder(getMF(), MI);
  53. }
  54. return MachineInstrBuilder();
  55. }
  56. bool CSEMIRBuilder::canPerformCSEForOpc(unsigned Opc) const {
  57. const GISelCSEInfo *CSEInfo = getCSEInfo();
  58. if (!CSEInfo || !CSEInfo->shouldCSE(Opc))
  59. return false;
  60. return true;
  61. }
  62. void CSEMIRBuilder::profileDstOp(const DstOp &Op,
  63. GISelInstProfileBuilder &B) const {
  64. switch (Op.getDstOpKind()) {
  65. case DstOp::DstType::Ty_RC:
  66. B.addNodeIDRegType(Op.getRegClass());
  67. break;
  68. case DstOp::DstType::Ty_Reg: {
  69. // Regs can have LLT&(RB|RC). If those exist, profile them as well.
  70. B.addNodeIDReg(Op.getReg());
  71. break;
  72. }
  73. default:
  74. B.addNodeIDRegType(Op.getLLTTy(*getMRI()));
  75. break;
  76. }
  77. }
  78. void CSEMIRBuilder::profileSrcOp(const SrcOp &Op,
  79. GISelInstProfileBuilder &B) const {
  80. switch (Op.getSrcOpKind()) {
  81. case SrcOp::SrcType::Ty_Imm:
  82. B.addNodeIDImmediate(static_cast<int64_t>(Op.getImm()));
  83. break;
  84. case SrcOp::SrcType::Ty_Predicate:
  85. B.addNodeIDImmediate(static_cast<int64_t>(Op.getPredicate()));
  86. break;
  87. default:
  88. B.addNodeIDRegType(Op.getReg());
  89. break;
  90. }
  91. }
  92. void CSEMIRBuilder::profileMBBOpcode(GISelInstProfileBuilder &B,
  93. unsigned Opc) const {
  94. // First add the MBB (Local CSE).
  95. B.addNodeIDMBB(&getMBB());
  96. // Then add the opcode.
  97. B.addNodeIDOpcode(Opc);
  98. }
  99. void CSEMIRBuilder::profileEverything(unsigned Opc, ArrayRef<DstOp> DstOps,
  100. ArrayRef<SrcOp> SrcOps,
  101. std::optional<unsigned> Flags,
  102. GISelInstProfileBuilder &B) const {
  103. profileMBBOpcode(B, Opc);
  104. // Then add the DstOps.
  105. profileDstOps(DstOps, B);
  106. // Then add the SrcOps.
  107. profileSrcOps(SrcOps, B);
  108. // Add Flags if passed in.
  109. if (Flags)
  110. B.addNodeIDFlag(*Flags);
  111. }
  112. MachineInstrBuilder CSEMIRBuilder::memoizeMI(MachineInstrBuilder MIB,
  113. void *NodeInsertPos) {
  114. assert(canPerformCSEForOpc(MIB->getOpcode()) &&
  115. "Attempting to CSE illegal op");
  116. MachineInstr *MIBInstr = MIB;
  117. getCSEInfo()->insertInstr(MIBInstr, NodeInsertPos);
  118. return MIB;
  119. }
  120. bool CSEMIRBuilder::checkCopyToDefsPossible(ArrayRef<DstOp> DstOps) {
  121. if (DstOps.size() == 1)
  122. return true; // always possible to emit copy to just 1 vreg.
  123. return llvm::all_of(DstOps, [](const DstOp &Op) {
  124. DstOp::DstType DT = Op.getDstOpKind();
  125. return DT == DstOp::DstType::Ty_LLT || DT == DstOp::DstType::Ty_RC;
  126. });
  127. }
  128. MachineInstrBuilder
  129. CSEMIRBuilder::generateCopiesIfRequired(ArrayRef<DstOp> DstOps,
  130. MachineInstrBuilder &MIB) {
  131. assert(checkCopyToDefsPossible(DstOps) &&
  132. "Impossible return a single MIB with copies to multiple defs");
  133. if (DstOps.size() == 1) {
  134. const DstOp &Op = DstOps[0];
  135. if (Op.getDstOpKind() == DstOp::DstType::Ty_Reg)
  136. return buildCopy(Op.getReg(), MIB.getReg(0));
  137. }
  138. // If we didn't generate a copy then we're re-using an existing node directly
  139. // instead of emitting any code. Merge the debug location we wanted to emit
  140. // into the instruction we're CSE'ing with. Debug locations arent part of the
  141. // profile so we don't need to recompute it.
  142. if (getDebugLoc()) {
  143. GISelChangeObserver *Observer = getState().Observer;
  144. if (Observer)
  145. Observer->changingInstr(*MIB);
  146. MIB->setDebugLoc(
  147. DILocation::getMergedLocation(MIB->getDebugLoc(), getDebugLoc()));
  148. if (Observer)
  149. Observer->changedInstr(*MIB);
  150. }
  151. return MIB;
  152. }
  153. MachineInstrBuilder CSEMIRBuilder::buildInstr(unsigned Opc,
  154. ArrayRef<DstOp> DstOps,
  155. ArrayRef<SrcOp> SrcOps,
  156. std::optional<unsigned> Flag) {
  157. switch (Opc) {
  158. default:
  159. break;
  160. case TargetOpcode::G_ADD:
  161. case TargetOpcode::G_PTR_ADD:
  162. case TargetOpcode::G_AND:
  163. case TargetOpcode::G_ASHR:
  164. case TargetOpcode::G_LSHR:
  165. case TargetOpcode::G_MUL:
  166. case TargetOpcode::G_OR:
  167. case TargetOpcode::G_SHL:
  168. case TargetOpcode::G_SUB:
  169. case TargetOpcode::G_XOR:
  170. case TargetOpcode::G_UDIV:
  171. case TargetOpcode::G_SDIV:
  172. case TargetOpcode::G_UREM:
  173. case TargetOpcode::G_SREM:
  174. case TargetOpcode::G_SMIN:
  175. case TargetOpcode::G_SMAX:
  176. case TargetOpcode::G_UMIN:
  177. case TargetOpcode::G_UMAX: {
  178. // Try to constant fold these.
  179. assert(SrcOps.size() == 2 && "Invalid sources");
  180. assert(DstOps.size() == 1 && "Invalid dsts");
  181. LLT SrcTy = SrcOps[0].getLLTTy(*getMRI());
  182. if (Opc == TargetOpcode::G_PTR_ADD &&
  183. getDataLayout().isNonIntegralAddressSpace(SrcTy.getAddressSpace()))
  184. break;
  185. if (SrcTy.isVector()) {
  186. // Try to constant fold vector constants.
  187. SmallVector<APInt> VecCst = ConstantFoldVectorBinop(
  188. Opc, SrcOps[0].getReg(), SrcOps[1].getReg(), *getMRI());
  189. if (!VecCst.empty())
  190. return buildBuildVectorConstant(DstOps[0], VecCst);
  191. break;
  192. }
  193. if (std::optional<APInt> Cst = ConstantFoldBinOp(
  194. Opc, SrcOps[0].getReg(), SrcOps[1].getReg(), *getMRI()))
  195. return buildConstant(DstOps[0], *Cst);
  196. break;
  197. }
  198. case TargetOpcode::G_FADD:
  199. case TargetOpcode::G_FSUB:
  200. case TargetOpcode::G_FMUL:
  201. case TargetOpcode::G_FDIV:
  202. case TargetOpcode::G_FREM:
  203. case TargetOpcode::G_FMINNUM:
  204. case TargetOpcode::G_FMAXNUM:
  205. case TargetOpcode::G_FMINNUM_IEEE:
  206. case TargetOpcode::G_FMAXNUM_IEEE:
  207. case TargetOpcode::G_FMINIMUM:
  208. case TargetOpcode::G_FMAXIMUM:
  209. case TargetOpcode::G_FCOPYSIGN: {
  210. // Try to constant fold these.
  211. assert(SrcOps.size() == 2 && "Invalid sources");
  212. assert(DstOps.size() == 1 && "Invalid dsts");
  213. if (std::optional<APFloat> Cst = ConstantFoldFPBinOp(
  214. Opc, SrcOps[0].getReg(), SrcOps[1].getReg(), *getMRI()))
  215. return buildFConstant(DstOps[0], *Cst);
  216. break;
  217. }
  218. case TargetOpcode::G_SEXT_INREG: {
  219. assert(DstOps.size() == 1 && "Invalid dst ops");
  220. assert(SrcOps.size() == 2 && "Invalid src ops");
  221. const DstOp &Dst = DstOps[0];
  222. const SrcOp &Src0 = SrcOps[0];
  223. const SrcOp &Src1 = SrcOps[1];
  224. if (auto MaybeCst =
  225. ConstantFoldExtOp(Opc, Src0.getReg(), Src1.getImm(), *getMRI()))
  226. return buildConstant(Dst, *MaybeCst);
  227. break;
  228. }
  229. case TargetOpcode::G_SITOFP:
  230. case TargetOpcode::G_UITOFP: {
  231. // Try to constant fold these.
  232. assert(SrcOps.size() == 1 && "Invalid sources");
  233. assert(DstOps.size() == 1 && "Invalid dsts");
  234. if (std::optional<APFloat> Cst = ConstantFoldIntToFloat(
  235. Opc, DstOps[0].getLLTTy(*getMRI()), SrcOps[0].getReg(), *getMRI()))
  236. return buildFConstant(DstOps[0], *Cst);
  237. break;
  238. }
  239. case TargetOpcode::G_CTLZ: {
  240. assert(SrcOps.size() == 1 && "Expected one source");
  241. assert(DstOps.size() == 1 && "Expected one dest");
  242. auto MaybeCsts = ConstantFoldCTLZ(SrcOps[0].getReg(), *getMRI());
  243. if (!MaybeCsts)
  244. break;
  245. if (MaybeCsts->size() == 1)
  246. return buildConstant(DstOps[0], (*MaybeCsts)[0]);
  247. // This was a vector constant. Build a G_BUILD_VECTOR for them.
  248. SmallVector<Register> ConstantRegs;
  249. LLT VecTy = DstOps[0].getLLTTy(*getMRI());
  250. for (unsigned Cst : *MaybeCsts)
  251. ConstantRegs.emplace_back(
  252. buildConstant(VecTy.getScalarType(), Cst).getReg(0));
  253. return buildBuildVector(DstOps[0], ConstantRegs);
  254. }
  255. }
  256. bool CanCopy = checkCopyToDefsPossible(DstOps);
  257. if (!canPerformCSEForOpc(Opc))
  258. return MachineIRBuilder::buildInstr(Opc, DstOps, SrcOps, Flag);
  259. // If we can CSE this instruction, but involves generating copies to multiple
  260. // regs, give up. This frequently happens to UNMERGEs.
  261. if (!CanCopy) {
  262. auto MIB = MachineIRBuilder::buildInstr(Opc, DstOps, SrcOps, Flag);
  263. // CSEInfo would have tracked this instruction. Remove it from the temporary
  264. // insts.
  265. getCSEInfo()->handleRemoveInst(&*MIB);
  266. return MIB;
  267. }
  268. FoldingSetNodeID ID;
  269. GISelInstProfileBuilder ProfBuilder(ID, *getMRI());
  270. void *InsertPos = nullptr;
  271. profileEverything(Opc, DstOps, SrcOps, Flag, ProfBuilder);
  272. MachineInstrBuilder MIB = getDominatingInstrForID(ID, InsertPos);
  273. if (MIB) {
  274. // Handle generating copies here.
  275. return generateCopiesIfRequired(DstOps, MIB);
  276. }
  277. // This instruction does not exist in the CSEInfo. Build it and CSE it.
  278. MachineInstrBuilder NewMIB =
  279. MachineIRBuilder::buildInstr(Opc, DstOps, SrcOps, Flag);
  280. return memoizeMI(NewMIB, InsertPos);
  281. }
  282. MachineInstrBuilder CSEMIRBuilder::buildConstant(const DstOp &Res,
  283. const ConstantInt &Val) {
  284. constexpr unsigned Opc = TargetOpcode::G_CONSTANT;
  285. if (!canPerformCSEForOpc(Opc))
  286. return MachineIRBuilder::buildConstant(Res, Val);
  287. // For vectors, CSE the element only for now.
  288. LLT Ty = Res.getLLTTy(*getMRI());
  289. if (Ty.isVector())
  290. return buildSplatVector(Res, buildConstant(Ty.getElementType(), Val));
  291. FoldingSetNodeID ID;
  292. GISelInstProfileBuilder ProfBuilder(ID, *getMRI());
  293. void *InsertPos = nullptr;
  294. profileMBBOpcode(ProfBuilder, Opc);
  295. profileDstOp(Res, ProfBuilder);
  296. ProfBuilder.addNodeIDMachineOperand(MachineOperand::CreateCImm(&Val));
  297. MachineInstrBuilder MIB = getDominatingInstrForID(ID, InsertPos);
  298. if (MIB) {
  299. // Handle generating copies here.
  300. return generateCopiesIfRequired({Res}, MIB);
  301. }
  302. MachineInstrBuilder NewMIB = MachineIRBuilder::buildConstant(Res, Val);
  303. return memoizeMI(NewMIB, InsertPos);
  304. }
  305. MachineInstrBuilder CSEMIRBuilder::buildFConstant(const DstOp &Res,
  306. const ConstantFP &Val) {
  307. constexpr unsigned Opc = TargetOpcode::G_FCONSTANT;
  308. if (!canPerformCSEForOpc(Opc))
  309. return MachineIRBuilder::buildFConstant(Res, Val);
  310. // For vectors, CSE the element only for now.
  311. LLT Ty = Res.getLLTTy(*getMRI());
  312. if (Ty.isVector())
  313. return buildSplatVector(Res, buildFConstant(Ty.getElementType(), Val));
  314. FoldingSetNodeID ID;
  315. GISelInstProfileBuilder ProfBuilder(ID, *getMRI());
  316. void *InsertPos = nullptr;
  317. profileMBBOpcode(ProfBuilder, Opc);
  318. profileDstOp(Res, ProfBuilder);
  319. ProfBuilder.addNodeIDMachineOperand(MachineOperand::CreateFPImm(&Val));
  320. MachineInstrBuilder MIB = getDominatingInstrForID(ID, InsertPos);
  321. if (MIB) {
  322. // Handle generating copies here.
  323. return generateCopiesIfRequired({Res}, MIB);
  324. }
  325. MachineInstrBuilder NewMIB = MachineIRBuilder::buildFConstant(Res, Val);
  326. return memoizeMI(NewMIB, InsertPos);
  327. }