1 //===- CoroInternal.h - Internal Coroutine interfaces ---------*- C++ -*---===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // Common definitions/declarations used internally by coroutine lowering passes. 9 //===----------------------------------------------------------------------===// 10 11 #ifndef LLVM_LIB_TRANSFORMS_COROUTINES_COROINTERNAL_H 12 #define LLVM_LIB_TRANSFORMS_COROUTINES_COROINTERNAL_H 13 14 #include "CoroInstr.h" 15 #include "llvm/IR/IRBuilder.h" 16 17 namespace llvm { 18 19 class CallGraph; 20 21 namespace coro { 22 23 bool declaresAnyIntrinsic(const Module &M); 24 bool declaresIntrinsics(const Module &M, 25 const std::initializer_list<StringRef>); 26 void replaceCoroFree(CoroIdInst *CoroId, bool Elide); 27 28 /// Attempts to rewrite the location operand of debug intrinsics in terms of 29 /// the coroutine frame pointer, folding pointer offsets into the DIExpression 30 /// of the intrinsic. 31 /// If the frame pointer is an Argument, store it into an alloca if 32 /// OptimizeFrame is false. 33 void salvageDebugInfo( 34 SmallDenseMap<Argument *, AllocaInst *, 4> &ArgToAllocaMap, 35 DbgVariableIntrinsic *DVI, bool OptimizeFrame); 36 37 // Keeps data and helper functions for lowering coroutine intrinsics. 38 struct LowererBase { 39 Module &TheModule; 40 LLVMContext &Context; 41 PointerType *const Int8Ptr; 42 FunctionType *const ResumeFnType; 43 ConstantPointerNull *const NullPtr; 44 45 LowererBase(Module &M); 46 Value *makeSubFnCall(Value *Arg, int Index, Instruction *InsertPt); 47 }; 48 49 enum class ABI { 50 /// The "resume-switch" lowering, where there are separate resume and 51 /// destroy functions that are shared between all suspend points. The 52 /// coroutine frame implicitly stores the resume and destroy functions, 53 /// the current index, and any promise value. 54 Switch, 55 56 /// The "returned-continuation" lowering, where each suspend point creates a 57 /// single continuation function that is used for both resuming and 58 /// destroying. Does not support promises. 59 Retcon, 60 61 /// The "unique returned-continuation" lowering, where each suspend point 62 /// creates a single continuation function that is used for both resuming 63 /// and destroying. Does not support promises. The function is known to 64 /// suspend at most once during its execution, and the return value of 65 /// the continuation is void. 66 RetconOnce, 67 68 /// The "async continuation" lowering, where each suspend point creates a 69 /// single continuation function. The continuation function is available as an 70 /// intrinsic. 71 Async, 72 }; 73 74 // Holds structural Coroutine Intrinsics for a particular function and other 75 // values used during CoroSplit pass. 76 struct LLVM_LIBRARY_VISIBILITY Shape { 77 CoroBeginInst *CoroBegin; 78 SmallVector<AnyCoroEndInst *, 4> CoroEnds; 79 SmallVector<CoroSizeInst *, 2> CoroSizes; 80 SmallVector<CoroAlignInst *, 2> CoroAligns; 81 SmallVector<AnyCoroSuspendInst *, 4> CoroSuspends; 82 SmallVector<CallInst*, 2> SwiftErrorOps; 83 84 // Field indexes for special fields in the switch lowering. 85 struct SwitchFieldIndex { 86 enum { 87 Resume, 88 Destroy 89 90 // The promise field is always at a fixed offset from the start of 91 // frame given its type, but the index isn't a constant for all 92 // possible frames. 93 94 // The switch-index field isn't at a fixed offset or index, either; 95 // we just work it in where it fits best. 96 }; 97 }; 98 99 coro::ABI ABI; 100 101 StructType *FrameTy; 102 Align FrameAlign; 103 uint64_t FrameSize; 104 Value *FramePtr; 105 BasicBlock *AllocaSpillBlock; 106 107 /// This would only be true if optimization are enabled. 108 bool OptimizeFrame; 109 110 struct SwitchLoweringStorage { 111 SwitchInst *ResumeSwitch; 112 AllocaInst *PromiseAlloca; 113 BasicBlock *ResumeEntryBlock; 114 unsigned IndexField; 115 unsigned IndexAlign; 116 unsigned IndexOffset; 117 bool HasFinalSuspend; 118 bool HasUnwindCoroEnd; 119 }; 120 121 struct RetconLoweringStorage { 122 Function *ResumePrototype; 123 Function *Alloc; 124 Function *Dealloc; 125 BasicBlock *ReturnBlock; 126 bool IsFrameInlineInStorage; 127 }; 128 129 struct AsyncLoweringStorage { 130 Value *Context; 131 CallingConv::ID AsyncCC; 132 unsigned ContextArgNo; 133 uint64_t ContextHeaderSize; 134 uint64_t ContextAlignment; 135 uint64_t FrameOffset; // Start of the frame. 136 uint64_t ContextSize; // Includes frame size. 137 GlobalVariable *AsyncFuncPointer; 138 139 Align getContextAlignment() const { return Align(ContextAlignment); } 140 }; 141 142 union { 143 SwitchLoweringStorage SwitchLowering; 144 RetconLoweringStorage RetconLowering; 145 AsyncLoweringStorage AsyncLowering; 146 }; 147 148 CoroIdInst *getSwitchCoroId() const { 149 assert(ABI == coro::ABI::Switch); 150 return cast<CoroIdInst>(CoroBegin->getId()); 151 } 152 153 AnyCoroIdRetconInst *getRetconCoroId() const { 154 assert(ABI == coro::ABI::Retcon || 155 ABI == coro::ABI::RetconOnce); 156 return cast<AnyCoroIdRetconInst>(CoroBegin->getId()); 157 } 158 159 CoroIdAsyncInst *getAsyncCoroId() const { 160 assert(ABI == coro::ABI::Async); 161 return cast<CoroIdAsyncInst>(CoroBegin->getId()); 162 } 163 164 unsigned getSwitchIndexField() const { 165 assert(ABI == coro::ABI::Switch); 166 assert(FrameTy && "frame type not assigned"); 167 return SwitchLowering.IndexField; 168 } 169 IntegerType *getIndexType() const { 170 assert(ABI == coro::ABI::Switch); 171 assert(FrameTy && "frame type not assigned"); 172 return cast<IntegerType>(FrameTy->getElementType(getSwitchIndexField())); 173 } 174 ConstantInt *getIndex(uint64_t Value) const { 175 return ConstantInt::get(getIndexType(), Value); 176 } 177 178 PointerType *getSwitchResumePointerType() const { 179 assert(ABI == coro::ABI::Switch); 180 assert(FrameTy && "frame type not assigned"); 181 return cast<PointerType>(FrameTy->getElementType(SwitchFieldIndex::Resume)); 182 } 183 184 FunctionType *getResumeFunctionType() const { 185 switch (ABI) { 186 case coro::ABI::Switch: 187 return FunctionType::get(Type::getVoidTy(FrameTy->getContext()), 188 FrameTy->getPointerTo(), /*IsVarArg*/false); 189 case coro::ABI::Retcon: 190 case coro::ABI::RetconOnce: 191 return RetconLowering.ResumePrototype->getFunctionType(); 192 case coro::ABI::Async: 193 // Not used. The function type depends on the active suspend. 194 return nullptr; 195 } 196 197 llvm_unreachable("Unknown coro::ABI enum"); 198 } 199 200 ArrayRef<Type*> getRetconResultTypes() const { 201 assert(ABI == coro::ABI::Retcon || 202 ABI == coro::ABI::RetconOnce); 203 auto FTy = CoroBegin->getFunction()->getFunctionType(); 204 205 // The safety of all this is checked by checkWFRetconPrototype. 206 if (auto STy = dyn_cast<StructType>(FTy->getReturnType())) { 207 return STy->elements().slice(1); 208 } else { 209 return ArrayRef<Type*>(); 210 } 211 } 212 213 ArrayRef<Type*> getRetconResumeTypes() const { 214 assert(ABI == coro::ABI::Retcon || 215 ABI == coro::ABI::RetconOnce); 216 217 // The safety of all this is checked by checkWFRetconPrototype. 218 auto FTy = RetconLowering.ResumePrototype->getFunctionType(); 219 return FTy->params().slice(1); 220 } 221 222 CallingConv::ID getResumeFunctionCC() const { 223 switch (ABI) { 224 case coro::ABI::Switch: 225 return CallingConv::Fast; 226 227 case coro::ABI::Retcon: 228 case coro::ABI::RetconOnce: 229 return RetconLowering.ResumePrototype->getCallingConv(); 230 case coro::ABI::Async: 231 return AsyncLowering.AsyncCC; 232 } 233 llvm_unreachable("Unknown coro::ABI enum"); 234 } 235 236 AllocaInst *getPromiseAlloca() const { 237 if (ABI == coro::ABI::Switch) 238 return SwitchLowering.PromiseAlloca; 239 return nullptr; 240 } 241 242 Instruction *getInsertPtAfterFramePtr() const { 243 if (auto *I = dyn_cast<Instruction>(FramePtr)) 244 return I->getNextNode(); 245 return &cast<Argument>(FramePtr)->getParent()->getEntryBlock().front(); 246 } 247 248 /// Allocate memory according to the rules of the active lowering. 249 /// 250 /// \param CG - if non-null, will be updated for the new call 251 Value *emitAlloc(IRBuilder<> &Builder, Value *Size, CallGraph *CG) const; 252 253 /// Deallocate memory according to the rules of the active lowering. 254 /// 255 /// \param CG - if non-null, will be updated for the new call 256 void emitDealloc(IRBuilder<> &Builder, Value *Ptr, CallGraph *CG) const; 257 258 Shape() = default; 259 explicit Shape(Function &F, bool OptimizeFrame = false) 260 : OptimizeFrame(OptimizeFrame) { 261 buildFrom(F); 262 } 263 void buildFrom(Function &F); 264 }; 265 266 bool defaultMaterializable(Instruction &V); 267 void buildCoroutineFrame( 268 Function &F, Shape &Shape, 269 const std::function<bool(Instruction &)> &MaterializableCallback); 270 CallInst *createMustTailCall(DebugLoc Loc, Function *MustTailCallFn, 271 ArrayRef<Value *> Arguments, IRBuilder<> &); 272 } // End namespace coro. 273 } // End namespace llvm 274 275 #endif 276