CallLowering.h 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602
  1. #pragma once
  2. #ifdef __GNUC__
  3. #pragma GCC diagnostic push
  4. #pragma GCC diagnostic ignored "-Wunused-parameter"
  5. #endif
  6. //===- llvm/CodeGen/GlobalISel/CallLowering.h - Call lowering ---*- C++ -*-===//
  7. //
  8. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  9. // See https://llvm.org/LICENSE.txt for license information.
  10. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  11. //
  12. //===----------------------------------------------------------------------===//
  13. ///
  14. /// \file
  15. /// This file describes how to lower LLVM calls to machine code calls.
  16. ///
  17. //===----------------------------------------------------------------------===//
  18. #ifndef LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H
  19. #define LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H
  20. #include "llvm/ADT/ArrayRef.h"
  21. #include "llvm/ADT/SmallVector.h"
  22. #include "llvm/CodeGen/CallingConvLower.h"
  23. #include "llvm/CodeGen/MachineFunction.h"
  24. #include "llvm/CodeGen/MachineOperand.h"
  25. #include "llvm/CodeGen/TargetCallingConv.h"
  26. #include "llvm/IR/Attributes.h"
  27. #include "llvm/IR/CallingConv.h"
  28. #include "llvm/IR/Type.h"
  29. #include "llvm/IR/Value.h"
  30. #include "llvm/Support/ErrorHandling.h"
  31. #include "llvm/Support/MachineValueType.h"
  32. #include <cstdint>
  33. #include <functional>
  34. namespace llvm {
  35. class CallBase;
  36. class DataLayout;
  37. class Function;
  38. class FunctionLoweringInfo;
  39. class MachineIRBuilder;
  40. struct MachinePointerInfo;
  41. class MachineRegisterInfo;
  42. class TargetLowering;
  43. class CallLowering {
  44. const TargetLowering *TLI;
  45. virtual void anchor();
  46. public:
  47. struct BaseArgInfo {
  48. Type *Ty;
  49. SmallVector<ISD::ArgFlagsTy, 4> Flags;
  50. bool IsFixed;
  51. BaseArgInfo(Type *Ty,
  52. ArrayRef<ISD::ArgFlagsTy> Flags = ArrayRef<ISD::ArgFlagsTy>(),
  53. bool IsFixed = true)
  54. : Ty(Ty), Flags(Flags.begin(), Flags.end()), IsFixed(IsFixed) {}
  55. BaseArgInfo() : Ty(nullptr), IsFixed(false) {}
  56. };
  57. struct ArgInfo : public BaseArgInfo {
  58. SmallVector<Register, 4> Regs;
  59. // If the argument had to be split into multiple parts according to the
  60. // target calling convention, then this contains the original vregs
  61. // if the argument was an incoming arg.
  62. SmallVector<Register, 2> OrigRegs;
  63. /// Optionally track the original IR value for the argument. This may not be
  64. /// meaningful in all contexts. This should only be used on for forwarding
  65. /// through to use for aliasing information in MachinePointerInfo for memory
  66. /// arguments.
  67. const Value *OrigValue = nullptr;
  68. /// Index original Function's argument.
  69. unsigned OrigArgIndex;
  70. /// Sentinel value for implicit machine-level input arguments.
  71. static const unsigned NoArgIndex = UINT_MAX;
  72. ArgInfo(ArrayRef<Register> Regs, Type *Ty, unsigned OrigIndex,
  73. ArrayRef<ISD::ArgFlagsTy> Flags = ArrayRef<ISD::ArgFlagsTy>(),
  74. bool IsFixed = true, const Value *OrigValue = nullptr)
  75. : BaseArgInfo(Ty, Flags, IsFixed), Regs(Regs.begin(), Regs.end()),
  76. OrigValue(OrigValue), OrigArgIndex(OrigIndex) {
  77. if (!Regs.empty() && Flags.empty())
  78. this->Flags.push_back(ISD::ArgFlagsTy());
  79. // FIXME: We should have just one way of saying "no register".
  80. assert(((Ty->isVoidTy() || Ty->isEmptyTy()) ==
  81. (Regs.empty() || Regs[0] == 0)) &&
  82. "only void types should have no register");
  83. }
  84. ArgInfo(ArrayRef<Register> Regs, const Value &OrigValue, unsigned OrigIndex,
  85. ArrayRef<ISD::ArgFlagsTy> Flags = ArrayRef<ISD::ArgFlagsTy>(),
  86. bool IsFixed = true)
  87. : ArgInfo(Regs, OrigValue.getType(), OrigIndex, Flags, IsFixed, &OrigValue) {}
  88. ArgInfo() = default;
  89. };
  90. struct CallLoweringInfo {
  91. /// Calling convention to be used for the call.
  92. CallingConv::ID CallConv = CallingConv::C;
  93. /// Destination of the call. It should be either a register, globaladdress,
  94. /// or externalsymbol.
  95. MachineOperand Callee = MachineOperand::CreateImm(0);
  96. /// Descriptor for the return type of the function.
  97. ArgInfo OrigRet;
  98. /// List of descriptors of the arguments passed to the function.
  99. SmallVector<ArgInfo, 32> OrigArgs;
  100. /// Valid if the call has a swifterror inout parameter, and contains the
  101. /// vreg that the swifterror should be copied into after the call.
  102. Register SwiftErrorVReg;
  103. /// Original IR callsite corresponding to this call, if available.
  104. const CallBase *CB = nullptr;
  105. MDNode *KnownCallees = nullptr;
  106. /// True if the call must be tail call optimized.
  107. bool IsMustTailCall = false;
  108. /// True if the call passes all target-independent checks for tail call
  109. /// optimization.
  110. bool IsTailCall = false;
  111. /// True if the call was lowered as a tail call. This is consumed by the
  112. /// legalizer. This allows the legalizer to lower libcalls as tail calls.
  113. bool LoweredTailCall = false;
  114. /// True if the call is to a vararg function.
  115. bool IsVarArg = false;
  116. /// True if the function's return value can be lowered to registers.
  117. bool CanLowerReturn = true;
  118. /// VReg to hold the hidden sret parameter.
  119. Register DemoteRegister;
  120. /// The stack index for sret demotion.
  121. int DemoteStackIndex;
  122. };
  123. /// Argument handling is mostly uniform between the four places that
  124. /// make these decisions: function formal arguments, call
  125. /// instruction args, call instruction returns and function
  126. /// returns. However, once a decision has been made on where an
  127. /// argument should go, exactly what happens can vary slightly. This
  128. /// class abstracts the differences.
  129. ///
  130. /// ValueAssigner should not depend on any specific function state, and
  131. /// only determine the types and locations for arguments.
  132. struct ValueAssigner {
  133. ValueAssigner(bool IsIncoming, CCAssignFn *AssignFn_,
  134. CCAssignFn *AssignFnVarArg_ = nullptr)
  135. : AssignFn(AssignFn_), AssignFnVarArg(AssignFnVarArg_),
  136. IsIncomingArgumentHandler(IsIncoming) {
  137. // Some targets change the handler depending on whether the call is
  138. // varargs or not. If
  139. if (!AssignFnVarArg)
  140. AssignFnVarArg = AssignFn;
  141. }
  142. virtual ~ValueAssigner() = default;
  143. /// Returns true if the handler is dealing with incoming arguments,
  144. /// i.e. those that move values from some physical location to vregs.
  145. bool isIncomingArgumentHandler() const {
  146. return IsIncomingArgumentHandler;
  147. }
  148. /// Wrap call to (typically tablegenerated CCAssignFn). This may be
  149. /// overridden to track additional state information as arguments are
  150. /// assigned or apply target specific hacks around the legacy
  151. /// infrastructure.
  152. virtual bool assignArg(unsigned ValNo, EVT OrigVT, MVT ValVT, MVT LocVT,
  153. CCValAssign::LocInfo LocInfo, const ArgInfo &Info,
  154. ISD::ArgFlagsTy Flags, CCState &State) {
  155. if (getAssignFn(State.isVarArg())(ValNo, ValVT, LocVT, LocInfo, Flags,
  156. State))
  157. return true;
  158. StackOffset = State.getNextStackOffset();
  159. return false;
  160. }
  161. /// Assignment function to use for a general call.
  162. CCAssignFn *AssignFn;
  163. /// Assignment function to use for a variadic call. This is usually the same
  164. /// as AssignFn on most targets.
  165. CCAssignFn *AssignFnVarArg;
  166. /// Stack offset for next argument. At the end of argument evaluation, this
  167. /// is typically the total stack size.
  168. uint64_t StackOffset = 0;
  169. /// Select the appropriate assignment function depending on whether this is
  170. /// a variadic call.
  171. CCAssignFn *getAssignFn(bool IsVarArg) const {
  172. return IsVarArg ? AssignFnVarArg : AssignFn;
  173. }
  174. private:
  175. const bool IsIncomingArgumentHandler;
  176. virtual void anchor();
  177. };
  178. struct IncomingValueAssigner : public ValueAssigner {
  179. IncomingValueAssigner(CCAssignFn *AssignFn_,
  180. CCAssignFn *AssignFnVarArg_ = nullptr)
  181. : ValueAssigner(true, AssignFn_, AssignFnVarArg_) {}
  182. };
  183. struct OutgoingValueAssigner : public ValueAssigner {
  184. OutgoingValueAssigner(CCAssignFn *AssignFn_,
  185. CCAssignFn *AssignFnVarArg_ = nullptr)
  186. : ValueAssigner(false, AssignFn_, AssignFnVarArg_) {}
  187. };
  188. struct ValueHandler {
  189. MachineIRBuilder &MIRBuilder;
  190. MachineRegisterInfo &MRI;
  191. const bool IsIncomingArgumentHandler;
  192. ValueHandler(bool IsIncoming, MachineIRBuilder &MIRBuilder,
  193. MachineRegisterInfo &MRI)
  194. : MIRBuilder(MIRBuilder), MRI(MRI),
  195. IsIncomingArgumentHandler(IsIncoming) {}
  196. virtual ~ValueHandler() = default;
  197. /// Returns true if the handler is dealing with incoming arguments,
  198. /// i.e. those that move values from some physical location to vregs.
  199. bool isIncomingArgumentHandler() const {
  200. return IsIncomingArgumentHandler;
  201. }
  202. /// Materialize a VReg containing the address of the specified
  203. /// stack-based object. This is either based on a FrameIndex or
  204. /// direct SP manipulation, depending on the context. \p MPO
  205. /// should be initialized to an appropriate description of the
  206. /// address created.
  207. virtual Register getStackAddress(uint64_t MemSize, int64_t Offset,
  208. MachinePointerInfo &MPO,
  209. ISD::ArgFlagsTy Flags) = 0;
  210. /// Return the in-memory size to write for the argument at \p VA. This may
  211. /// be smaller than the allocated stack slot size.
  212. ///
  213. /// This is overridable primarily for targets to maintain compatibility with
  214. /// hacks around the existing DAG call lowering infrastructure.
  215. virtual LLT getStackValueStoreType(const DataLayout &DL,
  216. const CCValAssign &VA,
  217. ISD::ArgFlagsTy Flags) const;
  218. /// The specified value has been assigned to a physical register,
  219. /// handle the appropriate COPY (either to or from) and mark any
  220. /// relevant uses/defines as needed.
  221. virtual void assignValueToReg(Register ValVReg, Register PhysReg,
  222. CCValAssign VA) = 0;
  223. /// The specified value has been assigned to a stack
  224. /// location. Load or store it there, with appropriate extension
  225. /// if necessary.
  226. virtual void assignValueToAddress(Register ValVReg, Register Addr,
  227. LLT MemTy, MachinePointerInfo &MPO,
  228. CCValAssign &VA) = 0;
  229. /// An overload which takes an ArgInfo if additional information about the
  230. /// arg is needed. \p ValRegIndex is the index in \p Arg.Regs for the value
  231. /// to store.
  232. virtual void assignValueToAddress(const ArgInfo &Arg, unsigned ValRegIndex,
  233. Register Addr, LLT MemTy,
  234. MachinePointerInfo &MPO,
  235. CCValAssign &VA) {
  236. assignValueToAddress(Arg.Regs[ValRegIndex], Addr, MemTy, MPO, VA);
  237. }
  238. /// Handle custom values, which may be passed into one or more of \p VAs.
  239. /// \p If the handler wants the assignments to be delayed until after
  240. /// mem loc assignments, then it sets \p Thunk to the thunk to do the
  241. /// assignment.
  242. /// \return The number of \p VAs that have been assigned after the first
  243. /// one, and which should therefore be skipped from further
  244. /// processing.
  245. virtual unsigned assignCustomValue(ArgInfo &Arg, ArrayRef<CCValAssign> VAs,
  246. std::function<void()> *Thunk = nullptr) {
  247. // This is not a pure virtual method because not all targets need to worry
  248. // about custom values.
  249. llvm_unreachable("Custom values not supported");
  250. }
  251. /// Do a memory copy of \p MemSize bytes from \p SrcPtr to \p DstPtr. This
  252. /// is necessary for outgoing stack-passed byval arguments.
  253. void
  254. copyArgumentMemory(const ArgInfo &Arg, Register DstPtr, Register SrcPtr,
  255. const MachinePointerInfo &DstPtrInfo, Align DstAlign,
  256. const MachinePointerInfo &SrcPtrInfo, Align SrcAlign,
  257. uint64_t MemSize, CCValAssign &VA) const;
  258. /// Extend a register to the location type given in VA, capped at extending
  259. /// to at most MaxSize bits. If MaxSizeBits is 0 then no maximum is set.
  260. Register extendRegister(Register ValReg, CCValAssign &VA,
  261. unsigned MaxSizeBits = 0);
  262. };
  263. /// Base class for ValueHandlers used for arguments coming into the current
  264. /// function, or for return values received from a call.
  265. struct IncomingValueHandler : public ValueHandler {
  266. IncomingValueHandler(MachineIRBuilder &MIRBuilder, MachineRegisterInfo &MRI)
  267. : ValueHandler(/*IsIncoming*/ true, MIRBuilder, MRI) {}
  268. /// Insert G_ASSERT_ZEXT/G_ASSERT_SEXT or other hint instruction based on \p
  269. /// VA, returning the new register if a hint was inserted.
  270. Register buildExtensionHint(CCValAssign &VA, Register SrcReg, LLT NarrowTy);
  271. /// Provides a default implementation for argument handling.
  272. void assignValueToReg(Register ValVReg, Register PhysReg,
  273. CCValAssign VA) override;
  274. };
  275. /// Base class for ValueHandlers used for arguments passed to a function call,
  276. /// or for return values.
  277. struct OutgoingValueHandler : public ValueHandler {
  278. OutgoingValueHandler(MachineIRBuilder &MIRBuilder, MachineRegisterInfo &MRI)
  279. : ValueHandler(/*IsIncoming*/ false, MIRBuilder, MRI) {}
  280. };
  281. protected:
  282. /// Getter for generic TargetLowering class.
  283. const TargetLowering *getTLI() const {
  284. return TLI;
  285. }
  286. /// Getter for target specific TargetLowering class.
  287. template <class XXXTargetLowering>
  288. const XXXTargetLowering *getTLI() const {
  289. return static_cast<const XXXTargetLowering *>(TLI);
  290. }
  291. /// \returns Flags corresponding to the attributes on the \p ArgIdx-th
  292. /// parameter of \p Call.
  293. ISD::ArgFlagsTy getAttributesForArgIdx(const CallBase &Call,
  294. unsigned ArgIdx) const;
  295. /// Adds flags to \p Flags based off of the attributes in \p Attrs.
  296. /// \p OpIdx is the index in \p Attrs to add flags from.
  297. void addArgFlagsFromAttributes(ISD::ArgFlagsTy &Flags,
  298. const AttributeList &Attrs,
  299. unsigned OpIdx) const;
  300. template <typename FuncInfoTy>
  301. void setArgFlags(ArgInfo &Arg, unsigned OpIdx, const DataLayout &DL,
  302. const FuncInfoTy &FuncInfo) const;
  303. /// Break \p OrigArgInfo into one or more pieces the calling convention can
  304. /// process, returned in \p SplitArgs. For example, this should break structs
  305. /// down into individual fields.
  306. ///
  307. /// If \p Offsets is non-null, it points to a vector to be filled in
  308. /// with the in-memory offsets of each of the individual values.
  309. void splitToValueTypes(const ArgInfo &OrigArgInfo,
  310. SmallVectorImpl<ArgInfo> &SplitArgs,
  311. const DataLayout &DL, CallingConv::ID CallConv,
  312. SmallVectorImpl<uint64_t> *Offsets = nullptr) const;
  313. /// Analyze the argument list in \p Args, using \p Assigner to populate \p
  314. /// CCInfo. This will determine the types and locations to use for passed or
  315. /// returned values. This may resize fields in \p Args if the value is split
  316. /// across multiple registers or stack slots.
  317. ///
  318. /// This is independent of the function state and can be used
  319. /// to determine how a call would pass arguments without needing to change the
  320. /// function. This can be used to check if arguments are suitable for tail
  321. /// call lowering.
  322. ///
  323. /// \return True if everything has succeeded, false otherwise.
  324. bool determineAssignments(ValueAssigner &Assigner,
  325. SmallVectorImpl<ArgInfo> &Args,
  326. CCState &CCInfo) const;
  327. /// Invoke ValueAssigner::assignArg on each of the given \p Args and then use
  328. /// \p Handler to move them to the assigned locations.
  329. ///
  330. /// \return True if everything has succeeded, false otherwise.
  331. bool
  332. determineAndHandleAssignments(ValueHandler &Handler, ValueAssigner &Assigner,
  333. SmallVectorImpl<ArgInfo> &Args,
  334. MachineIRBuilder &MIRBuilder,
  335. CallingConv::ID CallConv, bool IsVarArg,
  336. ArrayRef<Register> ThisReturnRegs = None) const;
  337. /// Use \p Handler to insert code to handle the argument/return values
  338. /// represented by \p Args. It's expected determineAssignments previously
  339. /// processed these arguments to populate \p CCState and \p ArgLocs.
  340. bool handleAssignments(ValueHandler &Handler, SmallVectorImpl<ArgInfo> &Args,
  341. CCState &CCState,
  342. SmallVectorImpl<CCValAssign> &ArgLocs,
  343. MachineIRBuilder &MIRBuilder,
  344. ArrayRef<Register> ThisReturnRegs = None) const;
  345. /// Check whether parameters to a call that are passed in callee saved
  346. /// registers are the same as from the calling function. This needs to be
  347. /// checked for tail call eligibility.
  348. bool parametersInCSRMatch(const MachineRegisterInfo &MRI,
  349. const uint32_t *CallerPreservedMask,
  350. const SmallVectorImpl<CCValAssign> &ArgLocs,
  351. const SmallVectorImpl<ArgInfo> &OutVals) const;
  352. /// \returns True if the calling convention for a callee and its caller pass
  353. /// results in the same way. Typically used for tail call eligibility checks.
  354. ///
  355. /// \p Info is the CallLoweringInfo for the call.
  356. /// \p MF is the MachineFunction for the caller.
  357. /// \p InArgs contains the results of the call.
  358. /// \p CalleeAssigner specifies the target's handling of the argument types
  359. /// for the callee.
  360. /// \p CallerAssigner specifies the target's handling of the
  361. /// argument types for the caller.
  362. bool resultsCompatible(CallLoweringInfo &Info, MachineFunction &MF,
  363. SmallVectorImpl<ArgInfo> &InArgs,
  364. ValueAssigner &CalleeAssigner,
  365. ValueAssigner &CallerAssigner) const;
  366. public:
  367. CallLowering(const TargetLowering *TLI) : TLI(TLI) {}
  368. virtual ~CallLowering() = default;
  369. /// \return true if the target is capable of handling swifterror values that
  370. /// have been promoted to a specified register. The extended versions of
  371. /// lowerReturn and lowerCall should be implemented.
  372. virtual bool supportSwiftError() const {
  373. return false;
  374. }
  375. /// Load the returned value from the stack into virtual registers in \p VRegs.
  376. /// It uses the frame index \p FI and the start offset from \p DemoteReg.
  377. /// The loaded data size will be determined from \p RetTy.
  378. void insertSRetLoads(MachineIRBuilder &MIRBuilder, Type *RetTy,
  379. ArrayRef<Register> VRegs, Register DemoteReg,
  380. int FI) const;
  381. /// Store the return value given by \p VRegs into stack starting at the offset
  382. /// specified in \p DemoteReg.
  383. void insertSRetStores(MachineIRBuilder &MIRBuilder, Type *RetTy,
  384. ArrayRef<Register> VRegs, Register DemoteReg) const;
  385. /// Insert the hidden sret ArgInfo to the beginning of \p SplitArgs.
  386. /// This function should be called from the target specific
  387. /// lowerFormalArguments when \p F requires the sret demotion.
  388. void insertSRetIncomingArgument(const Function &F,
  389. SmallVectorImpl<ArgInfo> &SplitArgs,
  390. Register &DemoteReg, MachineRegisterInfo &MRI,
  391. const DataLayout &DL) const;
  392. /// For the call-base described by \p CB, insert the hidden sret ArgInfo to
  393. /// the OrigArgs field of \p Info.
  394. void insertSRetOutgoingArgument(MachineIRBuilder &MIRBuilder,
  395. const CallBase &CB,
  396. CallLoweringInfo &Info) const;
  397. /// \return True if the return type described by \p Outs can be returned
  398. /// without performing sret demotion.
  399. bool checkReturn(CCState &CCInfo, SmallVectorImpl<BaseArgInfo> &Outs,
  400. CCAssignFn *Fn) const;
  401. /// Get the type and the ArgFlags for the split components of \p RetTy as
  402. /// returned by \c ComputeValueVTs.
  403. void getReturnInfo(CallingConv::ID CallConv, Type *RetTy, AttributeList Attrs,
  404. SmallVectorImpl<BaseArgInfo> &Outs,
  405. const DataLayout &DL) const;
  406. /// Toplevel function to check the return type based on the target calling
  407. /// convention. \return True if the return value of \p MF can be returned
  408. /// without performing sret demotion.
  409. bool checkReturnTypeForCallConv(MachineFunction &MF) const;
  410. /// This hook must be implemented to check whether the return values
  411. /// described by \p Outs can fit into the return registers. If false
  412. /// is returned, an sret-demotion is performed.
  413. virtual bool canLowerReturn(MachineFunction &MF, CallingConv::ID CallConv,
  414. SmallVectorImpl<BaseArgInfo> &Outs,
  415. bool IsVarArg) const {
  416. return true;
  417. }
  418. /// This hook must be implemented to lower outgoing return values, described
  419. /// by \p Val, into the specified virtual registers \p VRegs.
  420. /// This hook is used by GlobalISel.
  421. ///
  422. /// \p FLI is required for sret demotion.
  423. ///
  424. /// \p SwiftErrorVReg is non-zero if the function has a swifterror parameter
  425. /// that needs to be implicitly returned.
  426. ///
  427. /// \return True if the lowering succeeds, false otherwise.
  428. virtual bool lowerReturn(MachineIRBuilder &MIRBuilder, const Value *Val,
  429. ArrayRef<Register> VRegs, FunctionLoweringInfo &FLI,
  430. Register SwiftErrorVReg) const {
  431. if (!supportSwiftError()) {
  432. assert(SwiftErrorVReg == 0 && "attempt to use unsupported swifterror");
  433. return lowerReturn(MIRBuilder, Val, VRegs, FLI);
  434. }
  435. return false;
  436. }
  437. /// This hook behaves as the extended lowerReturn function, but for targets
  438. /// that do not support swifterror value promotion.
  439. virtual bool lowerReturn(MachineIRBuilder &MIRBuilder, const Value *Val,
  440. ArrayRef<Register> VRegs,
  441. FunctionLoweringInfo &FLI) const {
  442. return false;
  443. }
  444. virtual bool fallBackToDAGISel(const MachineFunction &MF) const {
  445. return false;
  446. }
  447. /// This hook must be implemented to lower the incoming (formal)
  448. /// arguments, described by \p VRegs, for GlobalISel. Each argument
  449. /// must end up in the related virtual registers described by \p VRegs.
  450. /// In other words, the first argument should end up in \c VRegs[0],
  451. /// the second in \c VRegs[1], and so on. For each argument, there will be one
  452. /// register for each non-aggregate type, as returned by \c computeValueLLTs.
  453. /// \p MIRBuilder is set to the proper insertion for the argument
  454. /// lowering. \p FLI is required for sret demotion.
  455. ///
  456. /// \return True if the lowering succeeded, false otherwise.
  457. virtual bool lowerFormalArguments(MachineIRBuilder &MIRBuilder,
  458. const Function &F,
  459. ArrayRef<ArrayRef<Register>> VRegs,
  460. FunctionLoweringInfo &FLI) const {
  461. return false;
  462. }
  463. /// This hook must be implemented to lower the given call instruction,
  464. /// including argument and return value marshalling.
  465. ///
  466. ///
  467. /// \return true if the lowering succeeded, false otherwise.
  468. virtual bool lowerCall(MachineIRBuilder &MIRBuilder,
  469. CallLoweringInfo &Info) const {
  470. return false;
  471. }
  472. /// Lower the given call instruction, including argument and return value
  473. /// marshalling.
  474. ///
  475. /// \p CI is the call/invoke instruction.
  476. ///
  477. /// \p ResRegs are the registers where the call's return value should be
  478. /// stored (or 0 if there is no return value). There will be one register for
  479. /// each non-aggregate type, as returned by \c computeValueLLTs.
  480. ///
  481. /// \p ArgRegs is a list of lists of virtual registers containing each
  482. /// argument that needs to be passed (argument \c i should be placed in \c
  483. /// ArgRegs[i]). For each argument, there will be one register for each
  484. /// non-aggregate type, as returned by \c computeValueLLTs.
  485. ///
  486. /// \p SwiftErrorVReg is non-zero if the call has a swifterror inout
  487. /// parameter, and contains the vreg that the swifterror should be copied into
  488. /// after the call.
  489. ///
  490. /// \p GetCalleeReg is a callback to materialize a register for the callee if
  491. /// the target determines it cannot jump to the destination based purely on \p
  492. /// CI. This might be because \p CI is indirect, or because of the limited
  493. /// range of an immediate jump.
  494. ///
  495. /// \return true if the lowering succeeded, false otherwise.
  496. bool lowerCall(MachineIRBuilder &MIRBuilder, const CallBase &Call,
  497. ArrayRef<Register> ResRegs,
  498. ArrayRef<ArrayRef<Register>> ArgRegs, Register SwiftErrorVReg,
  499. std::function<unsigned()> GetCalleeReg) const;
  500. /// For targets which want to use big-endian can enable it with
  501. /// enableBigEndian() hook
  502. virtual bool enableBigEndian() const { return false; }
  503. /// For targets which support the "returned" parameter attribute, returns
  504. /// true if the given type is a valid one to use with "returned".
  505. virtual bool isTypeIsValidForThisReturn(EVT Ty) const { return false; }
  506. };
  507. } // end namespace llvm
  508. #endif // LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H
  509. #ifdef __GNUC__
  510. #pragma GCC diagnostic pop
  511. #endif