1 //===- llvm/CodeGen/GlobalISel/CallLowering.h - Call lowering ---*- C++ -*-===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 ///
9 /// \file
10 /// This file describes how to lower LLVM calls to machine code calls.
11 ///
12 //===----------------------------------------------------------------------===//
13 
14 #ifndef LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H
15 #define LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H
16 
17 #include "llvm/ADT/ArrayRef.h"
18 #include "llvm/ADT/SmallVector.h"
19 #include "llvm/CodeGen/CallingConvLower.h"
20 #include "llvm/CodeGen/MachineFunction.h"
21 #include "llvm/CodeGen/MachineOperand.h"
22 #include "llvm/CodeGen/TargetCallingConv.h"
23 #include "llvm/IR/Attributes.h"
24 #include "llvm/IR/CallingConv.h"
25 #include "llvm/IR/Type.h"
26 #include "llvm/IR/Value.h"
27 #include "llvm/Support/ErrorHandling.h"
28 #include "llvm/Support/MachineValueType.h"
29 #include <cstdint>
30 #include <functional>
31 
32 namespace llvm {
33 
34 class CallBase;
35 class DataLayout;
36 class Function;
37 class FunctionLoweringInfo;
38 class MachineIRBuilder;
39 struct MachinePointerInfo;
40 class MachineRegisterInfo;
41 class TargetLowering;
42 
43 class CallLowering {
44   const TargetLowering *TLI;
45 
46   virtual void anchor();
47 public:
48   struct BaseArgInfo {
49     Type *Ty;
50     SmallVector<ISD::ArgFlagsTy, 4> Flags;
51     bool IsFixed;
52 
53     BaseArgInfo(Type *Ty,
54                 ArrayRef<ISD::ArgFlagsTy> Flags = ArrayRef<ISD::ArgFlagsTy>(),
55                 bool IsFixed = true)
56         : Ty(Ty), Flags(Flags.begin(), Flags.end()), IsFixed(IsFixed) {}
57 
58     BaseArgInfo() : Ty(nullptr), IsFixed(false) {}
59   };
60 
61   struct ArgInfo : public BaseArgInfo {
62     SmallVector<Register, 4> Regs;
63     // If the argument had to be split into multiple parts according to the
64     // target calling convention, then this contains the original vregs
65     // if the argument was an incoming arg.
66     SmallVector<Register, 2> OrigRegs;
67 
68     /// Optionally track the original IR value for the argument. This may not be
69     /// meaningful in all contexts. This should only be used on for forwarding
70     /// through to use for aliasing information in MachinePointerInfo for memory
71     /// arguments.
72     const Value *OrigValue = nullptr;
73 
74     /// Index original Function's argument.
75     unsigned OrigArgIndex;
76 
77     /// Sentinel value for implicit machine-level input arguments.
78     static const unsigned NoArgIndex = UINT_MAX;
79 
80     ArgInfo(ArrayRef<Register> Regs, Type *Ty, unsigned OrigIndex,
81             ArrayRef<ISD::ArgFlagsTy> Flags = ArrayRef<ISD::ArgFlagsTy>(),
82             bool IsFixed = true, const Value *OrigValue = nullptr)
83         : BaseArgInfo(Ty, Flags, IsFixed), Regs(Regs.begin(), Regs.end()),
84           OrigValue(OrigValue), OrigArgIndex(OrigIndex) {
85       if (!Regs.empty() && Flags.empty())
86         this->Flags.push_back(ISD::ArgFlagsTy());
87       // FIXME: We should have just one way of saying "no register".
88       assert(((Ty->isVoidTy() || Ty->isEmptyTy()) ==
89               (Regs.empty() || Regs[0] == 0)) &&
90              "only void types should have no register");
91     }
92 
93     ArgInfo(ArrayRef<Register> Regs, const Value &OrigValue, unsigned OrigIndex,
94             ArrayRef<ISD::ArgFlagsTy> Flags = ArrayRef<ISD::ArgFlagsTy>(),
95             bool IsFixed = true)
96       : ArgInfo(Regs, OrigValue.getType(), OrigIndex, Flags, IsFixed, &OrigValue) {}
97 
98     ArgInfo() = default;
99   };
100 
101   struct CallLoweringInfo {
102     /// Calling convention to be used for the call.
103     CallingConv::ID CallConv = CallingConv::C;
104 
105     /// Destination of the call. It should be either a register, globaladdress,
106     /// or externalsymbol.
107     MachineOperand Callee = MachineOperand::CreateImm(0);
108 
109     /// Descriptor for the return type of the function.
110     ArgInfo OrigRet;
111 
112     /// List of descriptors of the arguments passed to the function.
113     SmallVector<ArgInfo, 32> OrigArgs;
114 
115     /// Valid if the call has a swifterror inout parameter, and contains the
116     /// vreg that the swifterror should be copied into after the call.
117     Register SwiftErrorVReg;
118 
119     /// Original IR callsite corresponding to this call, if available.
120     const CallBase *CB = nullptr;
121 
122     MDNode *KnownCallees = nullptr;
123 
124     /// True if the call must be tail call optimized.
125     bool IsMustTailCall = false;
126 
127     /// True if the call passes all target-independent checks for tail call
128     /// optimization.
129     bool IsTailCall = false;
130 
131     /// True if the call was lowered as a tail call. This is consumed by the
132     /// legalizer. This allows the legalizer to lower libcalls as tail calls.
133     bool LoweredTailCall = false;
134 
135     /// True if the call is to a vararg function.
136     bool IsVarArg = false;
137 
138     /// True if the function's return value can be lowered to registers.
139     bool CanLowerReturn = true;
140 
141     /// VReg to hold the hidden sret parameter.
142     Register DemoteRegister;
143 
144     /// The stack index for sret demotion.
145     int DemoteStackIndex;
146   };
147 
148   /// Argument handling is mostly uniform between the four places that
149   /// make these decisions: function formal arguments, call
150   /// instruction args, call instruction returns and function
151   /// returns. However, once a decision has been made on where an
152   /// argument should go, exactly what happens can vary slightly. This
153   /// class abstracts the differences.
154   ///
155   /// ValueAssigner should not depend on any specific function state, and
156   /// only determine the types and locations for arguments.
157   struct ValueAssigner {
158     ValueAssigner(bool IsIncoming, CCAssignFn *AssignFn_,
159                   CCAssignFn *AssignFnVarArg_ = nullptr)
160         : AssignFn(AssignFn_), AssignFnVarArg(AssignFnVarArg_),
161           IsIncomingArgumentHandler(IsIncoming) {
162 
163       // Some targets change the handler depending on whether the call is
164       // varargs or not. If
165       if (!AssignFnVarArg)
166         AssignFnVarArg = AssignFn;
167     }
168 
169     virtual ~ValueAssigner() = default;
170 
171     /// Returns true if the handler is dealing with incoming arguments,
172     /// i.e. those that move values from some physical location to vregs.
173     bool isIncomingArgumentHandler() const {
174       return IsIncomingArgumentHandler;
175     }
176 
177     /// Wrap call to (typically tablegenerated CCAssignFn). This may be
178     /// overridden to track additional state information as arguments are
179     /// assigned or apply target specific hacks around the legacy
180     /// infrastructure.
181     virtual bool assignArg(unsigned ValNo, EVT OrigVT, MVT ValVT, MVT LocVT,
182                            CCValAssign::LocInfo LocInfo, const ArgInfo &Info,
183                            ISD::ArgFlagsTy Flags, CCState &State) {
184       if (getAssignFn(State.isVarArg())(ValNo, ValVT, LocVT, LocInfo, Flags,
185                                         State))
186         return true;
187       StackOffset = State.getNextStackOffset();
188       return false;
189     }
190 
191     /// Assignment function to use for a general call.
192     CCAssignFn *AssignFn;
193 
194     /// Assignment function to use for a variadic call. This is usually the same
195     /// as AssignFn on most targets.
196     CCAssignFn *AssignFnVarArg;
197 
198     /// Stack offset for next argument. At the end of argument evaluation, this
199     /// is typically the total stack size.
200     uint64_t StackOffset = 0;
201 
202     /// Select the appropriate assignment function depending on whether this is
203     /// a variadic call.
204     CCAssignFn *getAssignFn(bool IsVarArg) const {
205       return IsVarArg ? AssignFnVarArg : AssignFn;
206     }
207 
208   private:
209     const bool IsIncomingArgumentHandler;
210     virtual void anchor();
211   };
212 
213   struct IncomingValueAssigner : public ValueAssigner {
214     IncomingValueAssigner(CCAssignFn *AssignFn_,
215                           CCAssignFn *AssignFnVarArg_ = nullptr)
216         : ValueAssigner(true, AssignFn_, AssignFnVarArg_) {}
217   };
218 
219   struct OutgoingValueAssigner : public ValueAssigner {
220     OutgoingValueAssigner(CCAssignFn *AssignFn_,
221                           CCAssignFn *AssignFnVarArg_ = nullptr)
222         : ValueAssigner(false, AssignFn_, AssignFnVarArg_) {}
223   };
224 
225   struct ValueHandler {
226     MachineIRBuilder &MIRBuilder;
227     MachineRegisterInfo &MRI;
228     const bool IsIncomingArgumentHandler;
229 
230     ValueHandler(bool IsIncoming, MachineIRBuilder &MIRBuilder,
231                  MachineRegisterInfo &MRI)
232         : MIRBuilder(MIRBuilder), MRI(MRI),
233           IsIncomingArgumentHandler(IsIncoming) {}
234 
235     virtual ~ValueHandler() = default;
236 
237     /// Returns true if the handler is dealing with incoming arguments,
238     /// i.e. those that move values from some physical location to vregs.
239     bool isIncomingArgumentHandler() const {
240       return IsIncomingArgumentHandler;
241     }
242 
243     /// Materialize a VReg containing the address of the specified
244     /// stack-based object. This is either based on a FrameIndex or
245     /// direct SP manipulation, depending on the context. \p MPO
246     /// should be initialized to an appropriate description of the
247     /// address created.
248     virtual Register getStackAddress(uint64_t MemSize, int64_t Offset,
249                                      MachinePointerInfo &MPO,
250                                      ISD::ArgFlagsTy Flags) = 0;
251 
252     /// Return the in-memory size to write for the argument at \p VA. This may
253     /// be smaller than the allocated stack slot size.
254     ///
255     /// This is overridable primarily for targets to maintain compatibility with
256     /// hacks around the existing DAG call lowering infrastructure.
257     virtual LLT getStackValueStoreType(const DataLayout &DL,
258                                        const CCValAssign &VA,
259                                        ISD::ArgFlagsTy Flags) const;
260 
261     /// The specified value has been assigned to a physical register,
262     /// handle the appropriate COPY (either to or from) and mark any
263     /// relevant uses/defines as needed.
264     virtual void assignValueToReg(Register ValVReg, Register PhysReg,
265                                   CCValAssign VA) = 0;
266 
267     /// The specified value has been assigned to a stack
268     /// location. Load or store it there, with appropriate extension
269     /// if necessary.
270     virtual void assignValueToAddress(Register ValVReg, Register Addr,
271                                       LLT MemTy, MachinePointerInfo &MPO,
272                                       CCValAssign &VA) = 0;
273 
274     /// An overload which takes an ArgInfo if additional information about the
275     /// arg is needed. \p ValRegIndex is the index in \p Arg.Regs for the value
276     /// to store.
277     virtual void assignValueToAddress(const ArgInfo &Arg, unsigned ValRegIndex,
278                                       Register Addr, LLT MemTy,
279                                       MachinePointerInfo &MPO,
280                                       CCValAssign &VA) {
281       assignValueToAddress(Arg.Regs[ValRegIndex], Addr, MemTy, MPO, VA);
282     }
283 
284     /// Handle custom values, which may be passed into one or more of \p VAs.
285     /// \p If the handler wants the assignments to be delayed until after
286     /// mem loc assignments, then it sets \p Thunk to the thunk to do the
287     /// assignment.
288     /// \return The number of \p VAs that have been assigned after the first
289     ///         one, and which should therefore be skipped from further
290     ///         processing.
291     virtual unsigned assignCustomValue(ArgInfo &Arg, ArrayRef<CCValAssign> VAs,
292                                        std::function<void()> *Thunk = nullptr) {
293       // This is not a pure virtual method because not all targets need to worry
294       // about custom values.
295       llvm_unreachable("Custom values not supported");
296     }
297 
298     /// Do a memory copy of \p MemSize bytes from \p SrcPtr to \p DstPtr. This
299     /// is necessary for outgoing stack-passed byval arguments.
300     void
301     copyArgumentMemory(const ArgInfo &Arg, Register DstPtr, Register SrcPtr,
302                        const MachinePointerInfo &DstPtrInfo, Align DstAlign,
303                        const MachinePointerInfo &SrcPtrInfo, Align SrcAlign,
304                        uint64_t MemSize, CCValAssign &VA) const;
305 
306     /// Extend a register to the location type given in VA, capped at extending
307     /// to at most MaxSize bits. If MaxSizeBits is 0 then no maximum is set.
308     Register extendRegister(Register ValReg, CCValAssign &VA,
309                             unsigned MaxSizeBits = 0);
310   };
311 
312   /// Base class for ValueHandlers used for arguments coming into the current
313   /// function, or for return values received from a call.
314   struct IncomingValueHandler : public ValueHandler {
315     IncomingValueHandler(MachineIRBuilder &MIRBuilder, MachineRegisterInfo &MRI)
316         : ValueHandler(/*IsIncoming*/ true, MIRBuilder, MRI) {}
317 
318     /// Insert G_ASSERT_ZEXT/G_ASSERT_SEXT or other hint instruction based on \p
319     /// VA, returning the new register if a hint was inserted.
320     Register buildExtensionHint(CCValAssign &VA, Register SrcReg, LLT NarrowTy);
321 
322     /// Provides a default implementation for argument handling.
323     void assignValueToReg(Register ValVReg, Register PhysReg,
324                           CCValAssign VA) override;
325   };
326 
327   /// Base class for ValueHandlers used for arguments passed to a function call,
328   /// or for return values.
329   struct OutgoingValueHandler : public ValueHandler {
330     OutgoingValueHandler(MachineIRBuilder &MIRBuilder, MachineRegisterInfo &MRI)
331         : ValueHandler(/*IsIncoming*/ false, MIRBuilder, MRI) {}
332   };
333 
334 protected:
335   /// Getter for generic TargetLowering class.
336   const TargetLowering *getTLI() const {
337     return TLI;
338   }
339 
340   /// Getter for target specific TargetLowering class.
341   template <class XXXTargetLowering>
342     const XXXTargetLowering *getTLI() const {
343     return static_cast<const XXXTargetLowering *>(TLI);
344   }
345 
346   /// \returns Flags corresponding to the attributes on the \p ArgIdx-th
347   /// parameter of \p Call.
348   ISD::ArgFlagsTy getAttributesForArgIdx(const CallBase &Call,
349                                          unsigned ArgIdx) const;
350 
351   /// Adds flags to \p Flags based off of the attributes in \p Attrs.
352   /// \p OpIdx is the index in \p Attrs to add flags from.
353   void addArgFlagsFromAttributes(ISD::ArgFlagsTy &Flags,
354                                  const AttributeList &Attrs,
355                                  unsigned OpIdx) const;
356 
357   template <typename FuncInfoTy>
358   void setArgFlags(ArgInfo &Arg, unsigned OpIdx, const DataLayout &DL,
359                    const FuncInfoTy &FuncInfo) const;
360 
361   /// Break \p OrigArgInfo into one or more pieces the calling convention can
362   /// process, returned in \p SplitArgs. For example, this should break structs
363   /// down into individual fields.
364   ///
365   /// If \p Offsets is non-null, it points to a vector to be filled in
366   /// with the in-memory offsets of each of the individual values.
367   void splitToValueTypes(const ArgInfo &OrigArgInfo,
368                          SmallVectorImpl<ArgInfo> &SplitArgs,
369                          const DataLayout &DL, CallingConv::ID CallConv,
370                          SmallVectorImpl<uint64_t> *Offsets = nullptr) const;
371 
372   /// Analyze the argument list in \p Args, using \p Assigner to populate \p
373   /// CCInfo. This will determine the types and locations to use for passed or
374   /// returned values. This may resize fields in \p Args if the value is split
375   /// across multiple registers or stack slots.
376   ///
377   /// This is independent of the function state and can be used
378   /// to determine how a call would pass arguments without needing to change the
379   /// function. This can be used to check if arguments are suitable for tail
380   /// call lowering.
381   ///
382   /// \return True if everything has succeeded, false otherwise.
383   bool determineAssignments(ValueAssigner &Assigner,
384                             SmallVectorImpl<ArgInfo> &Args,
385                             CCState &CCInfo) const;
386 
387   /// Invoke ValueAssigner::assignArg on each of the given \p Args and then use
388   /// \p Handler to move them to the assigned locations.
389   ///
390   /// \return True if everything has succeeded, false otherwise.
391   bool
392   determineAndHandleAssignments(ValueHandler &Handler, ValueAssigner &Assigner,
393                                 SmallVectorImpl<ArgInfo> &Args,
394                                 MachineIRBuilder &MIRBuilder,
395                                 CallingConv::ID CallConv, bool IsVarArg,
396                                 ArrayRef<Register> ThisReturnRegs = None) const;
397 
398   /// Use \p Handler to insert code to handle the argument/return values
399   /// represented by \p Args. It's expected determineAssignments previously
400   /// processed these arguments to populate \p CCState and \p ArgLocs.
401   bool handleAssignments(ValueHandler &Handler, SmallVectorImpl<ArgInfo> &Args,
402                          CCState &CCState,
403                          SmallVectorImpl<CCValAssign> &ArgLocs,
404                          MachineIRBuilder &MIRBuilder,
405                          ArrayRef<Register> ThisReturnRegs = None) const;
406 
407   /// Check whether parameters to a call that are passed in callee saved
408   /// registers are the same as from the calling function.  This needs to be
409   /// checked for tail call eligibility.
410   bool parametersInCSRMatch(const MachineRegisterInfo &MRI,
411                             const uint32_t *CallerPreservedMask,
412                             const SmallVectorImpl<CCValAssign> &ArgLocs,
413                             const SmallVectorImpl<ArgInfo> &OutVals) const;
414 
415   /// \returns True if the calling convention for a callee and its caller pass
416   /// results in the same way. Typically used for tail call eligibility checks.
417   ///
418   /// \p Info is the CallLoweringInfo for the call.
419   /// \p MF is the MachineFunction for the caller.
420   /// \p InArgs contains the results of the call.
421   /// \p CalleeAssigner specifies the target's handling of the argument types
422   /// for the callee.
423   /// \p CallerAssigner specifies the target's handling of the
424   /// argument types for the caller.
425   bool resultsCompatible(CallLoweringInfo &Info, MachineFunction &MF,
426                          SmallVectorImpl<ArgInfo> &InArgs,
427                          ValueAssigner &CalleeAssigner,
428                          ValueAssigner &CallerAssigner) const;
429 
430 public:
431   CallLowering(const TargetLowering *TLI) : TLI(TLI) {}
432   virtual ~CallLowering() = default;
433 
434   /// \return true if the target is capable of handling swifterror values that
435   /// have been promoted to a specified register. The extended versions of
436   /// lowerReturn and lowerCall should be implemented.
437   virtual bool supportSwiftError() const {
438     return false;
439   }
440 
441   /// Load the returned value from the stack into virtual registers in \p VRegs.
442   /// It uses the frame index \p FI and the start offset from \p DemoteReg.
443   /// The loaded data size will be determined from \p RetTy.
444   void insertSRetLoads(MachineIRBuilder &MIRBuilder, Type *RetTy,
445                        ArrayRef<Register> VRegs, Register DemoteReg,
446                        int FI) const;
447 
448   /// Store the return value given by \p VRegs into stack starting at the offset
449   /// specified in \p DemoteReg.
450   void insertSRetStores(MachineIRBuilder &MIRBuilder, Type *RetTy,
451                         ArrayRef<Register> VRegs, Register DemoteReg) const;
452 
453   /// Insert the hidden sret ArgInfo to the beginning of \p SplitArgs.
454   /// This function should be called from the target specific
455   /// lowerFormalArguments when \p F requires the sret demotion.
456   void insertSRetIncomingArgument(const Function &F,
457                                   SmallVectorImpl<ArgInfo> &SplitArgs,
458                                   Register &DemoteReg, MachineRegisterInfo &MRI,
459                                   const DataLayout &DL) const;
460 
461   /// For the call-base described by \p CB, insert the hidden sret ArgInfo to
462   /// the OrigArgs field of \p Info.
463   void insertSRetOutgoingArgument(MachineIRBuilder &MIRBuilder,
464                                   const CallBase &CB,
465                                   CallLoweringInfo &Info) const;
466 
467   /// \return True if the return type described by \p Outs can be returned
468   /// without performing sret demotion.
469   bool checkReturn(CCState &CCInfo, SmallVectorImpl<BaseArgInfo> &Outs,
470                    CCAssignFn *Fn) const;
471 
472   /// Get the type and the ArgFlags for the split components of \p RetTy as
473   /// returned by \c ComputeValueVTs.
474   void getReturnInfo(CallingConv::ID CallConv, Type *RetTy, AttributeList Attrs,
475                      SmallVectorImpl<BaseArgInfo> &Outs,
476                      const DataLayout &DL) const;
477 
478   /// Toplevel function to check the return type based on the target calling
479   /// convention. \return True if the return value of \p MF can be returned
480   /// without performing sret demotion.
481   bool checkReturnTypeForCallConv(MachineFunction &MF) const;
482 
483   /// This hook must be implemented to check whether the return values
484   /// described by \p Outs can fit into the return registers. If false
485   /// is returned, an sret-demotion is performed.
486   virtual bool canLowerReturn(MachineFunction &MF, CallingConv::ID CallConv,
487                               SmallVectorImpl<BaseArgInfo> &Outs,
488                               bool IsVarArg) const {
489     return true;
490   }
491 
492   /// This hook must be implemented to lower outgoing return values, described
493   /// by \p Val, into the specified virtual registers \p VRegs.
494   /// This hook is used by GlobalISel.
495   ///
496   /// \p FLI is required for sret demotion.
497   ///
498   /// \p SwiftErrorVReg is non-zero if the function has a swifterror parameter
499   /// that needs to be implicitly returned.
500   ///
501   /// \return True if the lowering succeeds, false otherwise.
502   virtual bool lowerReturn(MachineIRBuilder &MIRBuilder, const Value *Val,
503                            ArrayRef<Register> VRegs, FunctionLoweringInfo &FLI,
504                            Register SwiftErrorVReg) const {
505     if (!supportSwiftError()) {
506       assert(SwiftErrorVReg == 0 && "attempt to use unsupported swifterror");
507       return lowerReturn(MIRBuilder, Val, VRegs, FLI);
508     }
509     return false;
510   }
511 
512   /// This hook behaves as the extended lowerReturn function, but for targets
513   /// that do not support swifterror value promotion.
514   virtual bool lowerReturn(MachineIRBuilder &MIRBuilder, const Value *Val,
515                            ArrayRef<Register> VRegs,
516                            FunctionLoweringInfo &FLI) const {
517     return false;
518   }
519 
520   virtual bool fallBackToDAGISel(const MachineFunction &MF) const {
521     return false;
522   }
523 
524   /// This hook must be implemented to lower the incoming (formal)
525   /// arguments, described by \p VRegs, for GlobalISel. Each argument
526   /// must end up in the related virtual registers described by \p VRegs.
527   /// In other words, the first argument should end up in \c VRegs[0],
528   /// the second in \c VRegs[1], and so on. For each argument, there will be one
529   /// register for each non-aggregate type, as returned by \c computeValueLLTs.
530   /// \p MIRBuilder is set to the proper insertion for the argument
531   /// lowering. \p FLI is required for sret demotion.
532   ///
533   /// \return True if the lowering succeeded, false otherwise.
534   virtual bool lowerFormalArguments(MachineIRBuilder &MIRBuilder,
535                                     const Function &F,
536                                     ArrayRef<ArrayRef<Register>> VRegs,
537                                     FunctionLoweringInfo &FLI) const {
538     return false;
539   }
540 
541   /// This hook must be implemented to lower the given call instruction,
542   /// including argument and return value marshalling.
543   ///
544   ///
545   /// \return true if the lowering succeeded, false otherwise.
546   virtual bool lowerCall(MachineIRBuilder &MIRBuilder,
547                          CallLoweringInfo &Info) const {
548     return false;
549   }
550 
551   /// Lower the given call instruction, including argument and return value
552   /// marshalling.
553   ///
554   /// \p CI is the call/invoke instruction.
555   ///
556   /// \p ResRegs are the registers where the call's return value should be
557   /// stored (or 0 if there is no return value). There will be one register for
558   /// each non-aggregate type, as returned by \c computeValueLLTs.
559   ///
560   /// \p ArgRegs is a list of lists of virtual registers containing each
561   /// argument that needs to be passed (argument \c i should be placed in \c
562   /// ArgRegs[i]). For each argument, there will be one register for each
563   /// non-aggregate type, as returned by \c computeValueLLTs.
564   ///
565   /// \p SwiftErrorVReg is non-zero if the call has a swifterror inout
566   /// parameter, and contains the vreg that the swifterror should be copied into
567   /// after the call.
568   ///
569   /// \p GetCalleeReg is a callback to materialize a register for the callee if
570   /// the target determines it cannot jump to the destination based purely on \p
571   /// CI. This might be because \p CI is indirect, or because of the limited
572   /// range of an immediate jump.
573   ///
574   /// \return true if the lowering succeeded, false otherwise.
575   bool lowerCall(MachineIRBuilder &MIRBuilder, const CallBase &Call,
576                  ArrayRef<Register> ResRegs,
577                  ArrayRef<ArrayRef<Register>> ArgRegs, Register SwiftErrorVReg,
578                  std::function<unsigned()> GetCalleeReg) const;
579 
580   /// For targets which want to use big-endian can enable it with
581   /// enableBigEndian() hook
582   virtual bool enableBigEndian() const { return false; }
583 
584   /// For targets which support the "returned" parameter attribute, returns
585   /// true if the given type is a valid one to use with "returned".
586   virtual bool isTypeIsValidForThisReturn(EVT Ty) const { return false; }
587 };
588 
589 } // end namespace llvm
590 
591 #endif // LLVM_CODEGEN_GLOBALISEL_CALLLOWERING_H
592