1 //===- CoroElide.cpp - Coroutine Frame Allocation Elision Pass ------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 
9 #include "llvm/Transforms/Coroutines/CoroElide.h"
10 #include "CoroInternal.h"
11 #include "llvm/ADT/DenseMap.h"
12 #include "llvm/Analysis/AliasAnalysis.h"
13 #include "llvm/Analysis/InstructionSimplify.h"
14 #include "llvm/IR/Dominators.h"
15 #include "llvm/IR/InstIterator.h"
16 #include "llvm/InitializePasses.h"
17 #include "llvm/Pass.h"
18 #include "llvm/Support/ErrorHandling.h"
19 
20 using namespace llvm;
21 
22 #define DEBUG_TYPE "coro-elide"
23 
24 namespace {
25 // Created on demand if the coro-elide pass has work to do.
26 struct Lowerer : coro::LowererBase {
27   SmallVector<CoroIdInst *, 4> CoroIds;
28   SmallVector<CoroBeginInst *, 1> CoroBegins;
29   SmallVector<CoroAllocInst *, 1> CoroAllocs;
30   SmallVector<CoroSubFnInst *, 4> ResumeAddr;
31   DenseMap<CoroBeginInst *, SmallVector<CoroSubFnInst *, 4>> DestroyAddr;
32   SmallVector<CoroFreeInst *, 1> CoroFrees;
33   SmallPtrSet<const SwitchInst *, 4> CoroSuspendSwitches;
34 
35   Lowerer(Module &M) : LowererBase(M) {}
36 
37   void elideHeapAllocations(Function *F, uint64_t FrameSize, Align FrameAlign,
38                             AAResults &AA);
39   bool shouldElide(Function *F, DominatorTree &DT) const;
40   void collectPostSplitCoroIds(Function *F);
41   bool processCoroId(CoroIdInst *, AAResults &AA, DominatorTree &DT);
42   bool hasEscapePath(const CoroBeginInst *,
43                      const SmallPtrSetImpl<BasicBlock *> &) const;
44 };
45 } // end anonymous namespace
46 
47 // Go through the list of coro.subfn.addr intrinsics and replace them with the
48 // provided constant.
49 static void replaceWithConstant(Constant *Value,
50                                 SmallVectorImpl<CoroSubFnInst *> &Users) {
51   if (Users.empty())
52     return;
53 
54   // See if we need to bitcast the constant to match the type of the intrinsic
55   // being replaced. Note: All coro.subfn.addr intrinsics return the same type,
56   // so we only need to examine the type of the first one in the list.
57   Type *IntrTy = Users.front()->getType();
58   Type *ValueTy = Value->getType();
59   if (ValueTy != IntrTy) {
60     // May need to tweak the function type to match the type expected at the
61     // use site.
62     assert(ValueTy->isPointerTy() && IntrTy->isPointerTy());
63     Value = ConstantExpr::getBitCast(Value, IntrTy);
64   }
65 
66   // Now the value type matches the type of the intrinsic. Replace them all!
67   for (CoroSubFnInst *I : Users)
68     replaceAndRecursivelySimplify(I, Value);
69 }
70 
71 // See if any operand of the call instruction references the coroutine frame.
72 static bool operandReferences(CallInst *CI, AllocaInst *Frame, AAResults &AA) {
73   for (Value *Op : CI->operand_values())
74     if (AA.alias(Op, Frame) != NoAlias)
75       return true;
76   return false;
77 }
78 
79 // Look for any tail calls referencing the coroutine frame and remove tail
80 // attribute from them, since now coroutine frame resides on the stack and tail
81 // call implies that the function does not references anything on the stack.
82 static void removeTailCallAttribute(AllocaInst *Frame, AAResults &AA) {
83   Function &F = *Frame->getFunction();
84   for (Instruction &I : instructions(F))
85     if (auto *Call = dyn_cast<CallInst>(&I))
86       if (Call->isTailCall() && operandReferences(Call, Frame, AA))
87         Call->setTailCall(false);
88 }
89 
90 // Given a resume function @f.resume(%f.frame* %frame), returns the size
91 // and expected alignment of %f.frame type.
92 static std::pair<uint64_t, Align> getFrameLayout(Function *Resume) {
93   // Prefer to pull information from the function attributes.
94   auto Size = Resume->getParamDereferenceableBytes(0);
95   auto Align = Resume->getParamAlign(0);
96 
97   // If those aren't given, extract them from the type.
98   if (Size == 0 || !Align) {
99     auto *FrameTy = Resume->arg_begin()->getType()->getPointerElementType();
100 
101     const DataLayout &DL = Resume->getParent()->getDataLayout();
102     if (!Size) Size = DL.getTypeAllocSize(FrameTy);
103     if (!Align) Align = DL.getABITypeAlign(FrameTy);
104   }
105 
106   return std::make_pair(Size, *Align);
107 }
108 
109 // Finds first non alloca instruction in the entry block of a function.
110 static Instruction *getFirstNonAllocaInTheEntryBlock(Function *F) {
111   for (Instruction &I : F->getEntryBlock())
112     if (!isa<AllocaInst>(&I))
113       return &I;
114   llvm_unreachable("no terminator in the entry block");
115 }
116 
117 // To elide heap allocations we need to suppress code blocks guarded by
118 // llvm.coro.alloc and llvm.coro.free instructions.
119 void Lowerer::elideHeapAllocations(Function *F, uint64_t FrameSize,
120                                    Align FrameAlign, AAResults &AA) {
121   LLVMContext &C = F->getContext();
122   auto *InsertPt =
123       getFirstNonAllocaInTheEntryBlock(CoroIds.front()->getFunction());
124 
125   // Replacing llvm.coro.alloc with false will suppress dynamic
126   // allocation as it is expected for the frontend to generate the code that
127   // looks like:
128   //   id = coro.id(...)
129   //   mem = coro.alloc(id) ? malloc(coro.size()) : 0;
130   //   coro.begin(id, mem)
131   auto *False = ConstantInt::getFalse(C);
132   for (auto *CA : CoroAllocs) {
133     CA->replaceAllUsesWith(False);
134     CA->eraseFromParent();
135   }
136 
137   // FIXME: Design how to transmit alignment information for every alloca that
138   // is spilled into the coroutine frame and recreate the alignment information
139   // here. Possibly we will need to do a mini SROA here and break the coroutine
140   // frame into individual AllocaInst recreating the original alignment.
141   const DataLayout &DL = F->getParent()->getDataLayout();
142   auto FrameTy = ArrayType::get(Type::getInt8Ty(C), FrameSize);
143   auto *Frame = new AllocaInst(FrameTy, DL.getAllocaAddrSpace(), "", InsertPt);
144   Frame->setAlignment(FrameAlign);
145   auto *FrameVoidPtr =
146       new BitCastInst(Frame, Type::getInt8PtrTy(C), "vFrame", InsertPt);
147 
148   for (auto *CB : CoroBegins) {
149     CB->replaceAllUsesWith(FrameVoidPtr);
150     CB->eraseFromParent();
151   }
152 
153   // Since now coroutine frame lives on the stack we need to make sure that
154   // any tail call referencing it, must be made non-tail call.
155   removeTailCallAttribute(Frame, AA);
156 }
157 
158 bool Lowerer::hasEscapePath(const CoroBeginInst *CB,
159                             const SmallPtrSetImpl<BasicBlock *> &TIs) const {
160   const auto &It = DestroyAddr.find(CB);
161   assert(It != DestroyAddr.end());
162 
163   // Limit the number of blocks we visit.
164   unsigned Limit = 32 * (1 + It->second.size());
165 
166   SmallVector<const BasicBlock *, 32> Worklist;
167   Worklist.push_back(CB->getParent());
168 
169   SmallPtrSet<const BasicBlock *, 32> Visited;
170   // Consider basicblock of coro.destroy as visited one, so that we
171   // skip the path pass through coro.destroy.
172   for (auto *DA : It->second)
173     Visited.insert(DA->getParent());
174 
175   do {
176     const auto *BB = Worklist.pop_back_val();
177     if (!Visited.insert(BB).second)
178       continue;
179     if (TIs.count(BB))
180       return true;
181 
182     // Conservatively say that there is potentially a path.
183     if (!--Limit)
184       return true;
185 
186     auto TI = BB->getTerminator();
187     // Although the default dest of coro.suspend switches is suspend pointer
188     // which means a escape path to normal terminator, it is reasonable to skip
189     // it since coroutine frame doesn't change outside the coroutine body.
190     if (isa<SwitchInst>(TI) &&
191         CoroSuspendSwitches.count(cast<SwitchInst>(TI))) {
192       Worklist.push_back(cast<SwitchInst>(TI)->getSuccessor(1));
193       Worklist.push_back(cast<SwitchInst>(TI)->getSuccessor(2));
194     } else
195       Worklist.append(succ_begin(BB), succ_end(BB));
196 
197   } while (!Worklist.empty());
198 
199   // We have exhausted all possible paths and are certain that coro.begin can
200   // not reach to any of terminators.
201   return false;
202 }
203 
204 bool Lowerer::shouldElide(Function *F, DominatorTree &DT) const {
205   // If no CoroAllocs, we cannot suppress allocation, so elision is not
206   // possible.
207   if (CoroAllocs.empty())
208     return false;
209 
210   // Check that for every coro.begin there is at least one coro.destroy directly
211   // referencing the SSA value of that coro.begin along each
212   // non-exceptional path.
213   // If the value escaped, then coro.destroy would have been referencing a
214   // memory location storing that value and not the virtual register.
215 
216   SmallPtrSet<BasicBlock *, 8> Terminators;
217   // First gather all of the non-exceptional terminators for the function.
218   // Consider the final coro.suspend as the real terminator when the current
219   // function is a coroutine.
220     for (BasicBlock &B : *F) {
221       auto *TI = B.getTerminator();
222       if (TI->getNumSuccessors() == 0 && !TI->isExceptionalTerminator() &&
223           !isa<UnreachableInst>(TI))
224         Terminators.insert(&B);
225     }
226 
227   // Filter out the coro.destroy that lie along exceptional paths.
228   SmallPtrSet<CoroBeginInst *, 8> ReferencedCoroBegins;
229   for (auto &It : DestroyAddr) {
230     for (Instruction *DA : It.second) {
231       for (BasicBlock *TI : Terminators) {
232         if (DT.dominates(DA, TI->getTerminator())) {
233           ReferencedCoroBegins.insert(It.first);
234           break;
235         }
236       }
237     }
238 
239     // Whether there is any paths from coro.begin to Terminators which not pass
240     // through any of the coro.destroys.
241     if (!ReferencedCoroBegins.count(It.first) &&
242         !hasEscapePath(It.first, Terminators))
243       ReferencedCoroBegins.insert(It.first);
244   }
245 
246   // If size of the set is the same as total number of coro.begin, that means we
247   // found a coro.free or coro.destroy referencing each coro.begin, so we can
248   // perform heap elision.
249   if (ReferencedCoroBegins.size() != CoroBegins.size())
250     return false;
251 
252   // If any call in the function is a musttail call, it usually won't work
253   // because we cannot drop the tailcall attribute, and a tail call will reuse
254   // the entire stack where we are going to put the new frame. In theory a more
255   // precise analysis can be done to check whether the new frame aliases with
256   // the call, however it's challenging to do so before the elision actually
257   // happened.
258   for (BasicBlock &BB : *F)
259     if (BB.getTerminatingMustTailCall())
260       return false;
261 
262   return true;
263 }
264 
265 void Lowerer::collectPostSplitCoroIds(Function *F) {
266   CoroIds.clear();
267   CoroSuspendSwitches.clear();
268   for (auto &I : instructions(F)) {
269     if (auto *CII = dyn_cast<CoroIdInst>(&I))
270       if (CII->getInfo().isPostSplit())
271         // If it is the coroutine itself, don't touch it.
272         if (CII->getCoroutine() != CII->getFunction())
273           CoroIds.push_back(CII);
274 
275     // Consider case like:
276     // %0 = call i8 @llvm.coro.suspend(...)
277     // switch i8 %0, label %suspend [i8 0, label %resume
278     //                              i8 1, label %cleanup]
279     // and collect the SwitchInsts which are used by escape analysis later.
280     if (auto *CSI = dyn_cast<CoroSuspendInst>(&I))
281       if (CSI->hasOneUse() && isa<SwitchInst>(CSI->use_begin()->getUser())) {
282         SwitchInst *SWI = cast<SwitchInst>(CSI->use_begin()->getUser());
283         if (SWI->getNumCases() == 2)
284           CoroSuspendSwitches.insert(SWI);
285       }
286   }
287 }
288 
289 bool Lowerer::processCoroId(CoroIdInst *CoroId, AAResults &AA,
290                             DominatorTree &DT) {
291   CoroBegins.clear();
292   CoroAllocs.clear();
293   CoroFrees.clear();
294   ResumeAddr.clear();
295   DestroyAddr.clear();
296 
297   // Collect all coro.begin and coro.allocs associated with this coro.id.
298   for (User *U : CoroId->users()) {
299     if (auto *CB = dyn_cast<CoroBeginInst>(U))
300       CoroBegins.push_back(CB);
301     else if (auto *CA = dyn_cast<CoroAllocInst>(U))
302       CoroAllocs.push_back(CA);
303     else if (auto *CF = dyn_cast<CoroFreeInst>(U))
304       CoroFrees.push_back(CF);
305   }
306 
307   // Collect all coro.subfn.addrs associated with coro.begin.
308   // Note, we only devirtualize the calls if their coro.subfn.addr refers to
309   // coro.begin directly. If we run into cases where this check is too
310   // conservative, we can consider relaxing the check.
311   for (CoroBeginInst *CB : CoroBegins) {
312     for (User *U : CB->users())
313       if (auto *II = dyn_cast<CoroSubFnInst>(U))
314         switch (II->getIndex()) {
315         case CoroSubFnInst::ResumeIndex:
316           ResumeAddr.push_back(II);
317           break;
318         case CoroSubFnInst::DestroyIndex:
319           DestroyAddr[CB].push_back(II);
320           break;
321         default:
322           llvm_unreachable("unexpected coro.subfn.addr constant");
323         }
324   }
325 
326   // PostSplit coro.id refers to an array of subfunctions in its Info
327   // argument.
328   ConstantArray *Resumers = CoroId->getInfo().Resumers;
329   assert(Resumers && "PostSplit coro.id Info argument must refer to an array"
330                      "of coroutine subfunctions");
331   auto *ResumeAddrConstant =
332       ConstantExpr::getExtractValue(Resumers, CoroSubFnInst::ResumeIndex);
333 
334   replaceWithConstant(ResumeAddrConstant, ResumeAddr);
335 
336   bool ShouldElide = shouldElide(CoroId->getFunction(), DT);
337 
338   auto *DestroyAddrConstant = ConstantExpr::getExtractValue(
339       Resumers,
340       ShouldElide ? CoroSubFnInst::CleanupIndex : CoroSubFnInst::DestroyIndex);
341 
342   for (auto &It : DestroyAddr)
343     replaceWithConstant(DestroyAddrConstant, It.second);
344 
345   if (ShouldElide) {
346     auto FrameSizeAndAlign = getFrameLayout(cast<Function>(ResumeAddrConstant));
347     elideHeapAllocations(CoroId->getFunction(), FrameSizeAndAlign.first,
348                          FrameSizeAndAlign.second, AA);
349     coro::replaceCoroFree(CoroId, /*Elide=*/true);
350   }
351 
352   return true;
353 }
354 
355 // See if there are any coro.subfn.addr instructions referring to coro.devirt
356 // trigger, if so, replace them with a direct call to devirt trigger function.
357 static bool replaceDevirtTrigger(Function &F) {
358   SmallVector<CoroSubFnInst *, 1> DevirtAddr;
359   for (auto &I : instructions(F))
360     if (auto *SubFn = dyn_cast<CoroSubFnInst>(&I))
361       if (SubFn->getIndex() == CoroSubFnInst::RestartTrigger)
362         DevirtAddr.push_back(SubFn);
363 
364   if (DevirtAddr.empty())
365     return false;
366 
367   Module &M = *F.getParent();
368   Function *DevirtFn = M.getFunction(CORO_DEVIRT_TRIGGER_FN);
369   assert(DevirtFn && "coro.devirt.fn not found");
370   replaceWithConstant(DevirtFn, DevirtAddr);
371 
372   return true;
373 }
374 
375 static bool declaresCoroElideIntrinsics(Module &M) {
376   return coro::declaresIntrinsics(M, {"llvm.coro.id", "llvm.coro.id.async"});
377 }
378 
379 PreservedAnalyses CoroElidePass::run(Function &F, FunctionAnalysisManager &AM) {
380   auto &M = *F.getParent();
381   if (!declaresCoroElideIntrinsics(M))
382     return PreservedAnalyses::all();
383 
384   Lowerer L(M);
385   L.CoroIds.clear();
386   L.collectPostSplitCoroIds(&F);
387   // If we did not find any coro.id, there is nothing to do.
388   if (L.CoroIds.empty())
389     return PreservedAnalyses::all();
390 
391   AAResults &AA = AM.getResult<AAManager>(F);
392   DominatorTree &DT = AM.getResult<DominatorTreeAnalysis>(F);
393 
394   bool Changed = false;
395   for (auto *CII : L.CoroIds)
396     Changed |= L.processCoroId(CII, AA, DT);
397 
398   return Changed ? PreservedAnalyses::none() : PreservedAnalyses::all();
399 }
400 
401 namespace {
402 struct CoroElideLegacy : FunctionPass {
403   static char ID;
404   CoroElideLegacy() : FunctionPass(ID) {
405     initializeCoroElideLegacyPass(*PassRegistry::getPassRegistry());
406   }
407 
408   std::unique_ptr<Lowerer> L;
409 
410   bool doInitialization(Module &M) override {
411     if (declaresCoroElideIntrinsics(M))
412       L = std::make_unique<Lowerer>(M);
413     return false;
414   }
415 
416   bool runOnFunction(Function &F) override {
417     if (!L)
418       return false;
419 
420     bool Changed = false;
421 
422     if (F.hasFnAttribute(CORO_PRESPLIT_ATTR))
423       Changed = replaceDevirtTrigger(F);
424 
425     L->CoroIds.clear();
426     L->collectPostSplitCoroIds(&F);
427     // If we did not find any coro.id, there is nothing to do.
428     if (L->CoroIds.empty())
429       return Changed;
430 
431     AAResults &AA = getAnalysis<AAResultsWrapperPass>().getAAResults();
432     DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>().getDomTree();
433 
434     for (auto *CII : L->CoroIds)
435       Changed |= L->processCoroId(CII, AA, DT);
436 
437     return Changed;
438   }
439   void getAnalysisUsage(AnalysisUsage &AU) const override {
440     AU.addRequired<AAResultsWrapperPass>();
441     AU.addRequired<DominatorTreeWrapperPass>();
442   }
443   StringRef getPassName() const override { return "Coroutine Elision"; }
444 };
445 }
446 
447 char CoroElideLegacy::ID = 0;
448 INITIALIZE_PASS_BEGIN(
449     CoroElideLegacy, "coro-elide",
450     "Coroutine frame allocation elision and indirect calls replacement", false,
451     false)
452 INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass)
453 INITIALIZE_PASS_END(
454     CoroElideLegacy, "coro-elide",
455     "Coroutine frame allocation elision and indirect calls replacement", false,
456     false)
457 
458 Pass *llvm::createCoroElideLegacyPass() { return new CoroElideLegacy(); }
459