1 //===- CoroElide.cpp - Coroutine Frame Allocation Elision Pass ------------===//
3 // The LLVM Compiler Infrastructure
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
8 //===----------------------------------------------------------------------===//
9 // This pass replaces dynamic allocation of coroutine frame with alloca and
10 // replaces calls to llvm.coro.resume and llvm.coro.destroy with direct calls
11 // to coroutine sub-functions.
12 //===----------------------------------------------------------------------===//
14 #include "CoroInternal.h"
15 #include "llvm/Analysis/AliasAnalysis.h"
16 #include "llvm/Analysis/InstructionSimplify.h"
17 #include "llvm/IR/Dominators.h"
18 #include "llvm/IR/InstIterator.h"
19 #include "llvm/Pass.h"
20 #include "llvm/Support/ErrorHandling.h"
24 #define DEBUG_TYPE "coro-elide"
27 // Created on demand if CoroElide pass has work to do.
28 struct Lowerer : coro::LowererBase {
29 SmallVector<CoroIdInst *, 4> CoroIds;
30 SmallVector<CoroBeginInst *, 1> CoroBegins;
31 SmallVector<CoroAllocInst *, 1> CoroAllocs;
32 SmallVector<CoroSubFnInst *, 4> ResumeAddr;
33 SmallVector<CoroSubFnInst *, 4> DestroyAddr;
34 SmallVector<CoroFreeInst *, 1> CoroFrees;
36 Lowerer(Module &M) : LowererBase(M) {}
38 void elideHeapAllocations(Function *F, Type *FrameTy, AAResults &AA);
39 bool shouldElide(Function *F, DominatorTree &DT) const;
40 bool processCoroId(CoroIdInst *, AAResults &AA, DominatorTree &DT);
42 } // end anonymous namespace
44 // Go through the list of coro.subfn.addr intrinsics and replace them with the
46 static void replaceWithConstant(Constant *Value,
47 SmallVectorImpl<CoroSubFnInst *> &Users) {
51 // See if we need to bitcast the constant to match the type of the intrinsic
52 // being replaced. Note: All coro.subfn.addr intrinsics return the same type,
53 // so we only need to examine the type of the first one in the list.
54 Type *IntrTy = Users.front()->getType();
55 Type *ValueTy = Value->getType();
56 if (ValueTy != IntrTy) {
57 // May need to tweak the function type to match the type expected at the
59 assert(ValueTy->isPointerTy() && IntrTy->isPointerTy());
60 Value = ConstantExpr::getBitCast(Value, IntrTy);
63 // Now the value type matches the type of the intrinsic. Replace them all!
64 for (CoroSubFnInst *I : Users)
65 replaceAndRecursivelySimplify(I, Value);
68 // See if any operand of the call instruction references the coroutine frame.
69 static bool operandReferences(CallInst *CI, AllocaInst *Frame, AAResults &AA) {
70 for (Value *Op : CI->operand_values())
71 if (AA.alias(Op, Frame) != NoAlias)
76 // Look for any tail calls referencing the coroutine frame and remove tail
77 // attribute from them, since now coroutine frame resides on the stack and tail
78 // call implies that the function does not references anything on the stack.
79 static void removeTailCallAttribute(AllocaInst *Frame, AAResults &AA) {
80 Function &F = *Frame->getFunction();
81 for (Instruction &I : instructions(F))
82 if (auto *Call = dyn_cast<CallInst>(&I))
83 if (Call->isTailCall() && operandReferences(Call, Frame, AA)) {
84 // FIXME: If we ever hit this check. Evaluate whether it is more
85 // appropriate to retain musttail and allow the code to compile.
86 if (Call->isMustTailCall())
87 report_fatal_error("Call referring to the coroutine frame cannot be "
88 "marked as musttail");
89 Call->setTailCall(false);
93 // Given a resume function @f.resume(%f.frame* %frame), returns %f.frame type.
94 static Type *getFrameType(Function *Resume) {
95 auto *ArgType = Resume->arg_begin()->getType();
96 return cast<PointerType>(ArgType)->getElementType();
99 // Finds first non alloca instruction in the entry block of a function.
100 static Instruction *getFirstNonAllocaInTheEntryBlock(Function *F) {
101 for (Instruction &I : F->getEntryBlock())
102 if (!isa<AllocaInst>(&I))
104 llvm_unreachable("no terminator in the entry block");
107 // To elide heap allocations we need to suppress code blocks guarded by
108 // llvm.coro.alloc and llvm.coro.free instructions.
109 void Lowerer::elideHeapAllocations(Function *F, Type *FrameTy, AAResults &AA) {
110 LLVMContext &C = FrameTy->getContext();
112 getFirstNonAllocaInTheEntryBlock(CoroIds.front()->getFunction());
114 // Replacing llvm.coro.alloc with false will suppress dynamic
115 // allocation as it is expected for the frontend to generate the code that
118 // mem = coro.alloc(id) ? malloc(coro.size()) : 0;
119 // coro.begin(id, mem)
120 auto *False = ConstantInt::getFalse(C);
121 for (auto *CA : CoroAllocs) {
122 CA->replaceAllUsesWith(False);
123 CA->eraseFromParent();
126 // FIXME: Design how to transmit alignment information for every alloca that
127 // is spilled into the coroutine frame and recreate the alignment information
128 // here. Possibly we will need to do a mini SROA here and break the coroutine
129 // frame into individual AllocaInst recreating the original alignment.
130 const DataLayout &DL = F->getParent()->getDataLayout();
131 auto *Frame = new AllocaInst(FrameTy, DL.getAllocaAddrSpace(), "", InsertPt);
133 new BitCastInst(Frame, Type::getInt8PtrTy(C), "vFrame", InsertPt);
135 for (auto *CB : CoroBegins) {
136 CB->replaceAllUsesWith(FrameVoidPtr);
137 CB->eraseFromParent();
140 // Since now coroutine frame lives on the stack we need to make sure that
141 // any tail call referencing it, must be made non-tail call.
142 removeTailCallAttribute(Frame, AA);
145 bool Lowerer::shouldElide(Function *F, DominatorTree &DT) const {
146 // If no CoroAllocs, we cannot suppress allocation, so elision is not
148 if (CoroAllocs.empty())
151 // Check that for every coro.begin there is a coro.destroy directly
152 // referencing the SSA value of that coro.begin along a non-exceptional path.
153 // If the value escaped, then coro.destroy would have been referencing a
154 // memory location storing that value and not the virtual register.
156 // First gather all of the non-exceptional terminators for the function.
157 SmallPtrSet<Instruction *, 8> Terminators;
158 for (BasicBlock &B : *F) {
159 auto *TI = B.getTerminator();
160 if (TI->getNumSuccessors() == 0 && !TI->isExceptionalTerminator() &&
161 !isa<UnreachableInst>(TI))
162 Terminators.insert(TI);
165 // Filter out the coro.destroy that lie along exceptional paths.
166 SmallPtrSet<CoroSubFnInst *, 4> DAs;
167 for (CoroSubFnInst *DA : DestroyAddr) {
168 for (Instruction *TI : Terminators) {
169 if (DT.dominates(DA, TI)) {
176 // Find all the coro.begin referenced by coro.destroy along happy paths.
177 SmallPtrSet<CoroBeginInst *, 8> ReferencedCoroBegins;
178 for (CoroSubFnInst *DA : DAs) {
179 if (auto *CB = dyn_cast<CoroBeginInst>(DA->getFrame()))
180 ReferencedCoroBegins.insert(CB);
185 // If size of the set is the same as total number of coro.begin, that means we
186 // found a coro.free or coro.destroy referencing each coro.begin, so we can
187 // perform heap elision.
188 return ReferencedCoroBegins.size() == CoroBegins.size();
191 bool Lowerer::processCoroId(CoroIdInst *CoroId, AAResults &AA,
199 // Collect all coro.begin and coro.allocs associated with this coro.id.
200 for (User *U : CoroId->users()) {
201 if (auto *CB = dyn_cast<CoroBeginInst>(U))
202 CoroBegins.push_back(CB);
203 else if (auto *CA = dyn_cast<CoroAllocInst>(U))
204 CoroAllocs.push_back(CA);
205 else if (auto *CF = dyn_cast<CoroFreeInst>(U))
206 CoroFrees.push_back(CF);
209 // Collect all coro.subfn.addrs associated with coro.begin.
210 // Note, we only devirtualize the calls if their coro.subfn.addr refers to
211 // coro.begin directly. If we run into cases where this check is too
212 // conservative, we can consider relaxing the check.
213 for (CoroBeginInst *CB : CoroBegins) {
214 for (User *U : CB->users())
215 if (auto *II = dyn_cast<CoroSubFnInst>(U))
216 switch (II->getIndex()) {
217 case CoroSubFnInst::ResumeIndex:
218 ResumeAddr.push_back(II);
220 case CoroSubFnInst::DestroyIndex:
221 DestroyAddr.push_back(II);
224 llvm_unreachable("unexpected coro.subfn.addr constant");
228 // PostSplit coro.id refers to an array of subfunctions in its Info
230 ConstantArray *Resumers = CoroId->getInfo().Resumers;
231 assert(Resumers && "PostSplit coro.id Info argument must refer to an array"
232 "of coroutine subfunctions");
233 auto *ResumeAddrConstant =
234 ConstantExpr::getExtractValue(Resumers, CoroSubFnInst::ResumeIndex);
236 replaceWithConstant(ResumeAddrConstant, ResumeAddr);
238 bool ShouldElide = shouldElide(CoroId->getFunction(), DT);
240 auto *DestroyAddrConstant = ConstantExpr::getExtractValue(
242 ShouldElide ? CoroSubFnInst::CleanupIndex : CoroSubFnInst::DestroyIndex);
244 replaceWithConstant(DestroyAddrConstant, DestroyAddr);
247 auto *FrameTy = getFrameType(cast<Function>(ResumeAddrConstant));
248 elideHeapAllocations(CoroId->getFunction(), FrameTy, AA);
249 coro::replaceCoroFree(CoroId, /*Elide=*/true);
255 // See if there are any coro.subfn.addr instructions referring to coro.devirt
256 // trigger, if so, replace them with a direct call to devirt trigger function.
257 static bool replaceDevirtTrigger(Function &F) {
258 SmallVector<CoroSubFnInst *, 1> DevirtAddr;
259 for (auto &I : instructions(F))
260 if (auto *SubFn = dyn_cast<CoroSubFnInst>(&I))
261 if (SubFn->getIndex() == CoroSubFnInst::RestartTrigger)
262 DevirtAddr.push_back(SubFn);
264 if (DevirtAddr.empty())
267 Module &M = *F.getParent();
268 Function *DevirtFn = M.getFunction(CORO_DEVIRT_TRIGGER_FN);
269 assert(DevirtFn && "coro.devirt.fn not found");
270 replaceWithConstant(DevirtFn, DevirtAddr);
275 //===----------------------------------------------------------------------===//
277 //===----------------------------------------------------------------------===//
280 struct CoroElide : FunctionPass {
282 CoroElide() : FunctionPass(ID) {
283 initializeCoroElidePass(*PassRegistry::getPassRegistry());
286 std::unique_ptr<Lowerer> L;
288 bool doInitialization(Module &M) override {
289 if (coro::declaresIntrinsics(M, {"llvm.coro.id"}))
290 L = llvm::make_unique<Lowerer>(M);
294 bool runOnFunction(Function &F) override {
298 bool Changed = false;
300 if (F.hasFnAttribute(CORO_PRESPLIT_ATTR))
301 Changed = replaceDevirtTrigger(F);
305 // Collect all PostSplit coro.ids.
306 for (auto &I : instructions(F))
307 if (auto *CII = dyn_cast<CoroIdInst>(&I))
308 if (CII->getInfo().isPostSplit())
309 // If it is the coroutine itself, don't touch it.
310 if (CII->getCoroutine() != CII->getFunction())
311 L->CoroIds.push_back(CII);
313 // If we did not find any coro.id, there is nothing to do.
314 if (L->CoroIds.empty())
317 AAResults &AA = getAnalysis<AAResultsWrapperPass>().getAAResults();
318 DominatorTree &DT = getAnalysis<DominatorTreeWrapperPass>().getDomTree();
320 for (auto *CII : L->CoroIds)
321 Changed |= L->processCoroId(CII, AA, DT);
325 void getAnalysisUsage(AnalysisUsage &AU) const override {
326 AU.addRequired<AAResultsWrapperPass>();
327 AU.addRequired<DominatorTreeWrapperPass>();
329 StringRef getPassName() const override { return "Coroutine Elision"; }
333 char CoroElide::ID = 0;
334 INITIALIZE_PASS_BEGIN(
335 CoroElide, "coro-elide",
336 "Coroutine frame allocation elision and indirect calls replacement", false,
338 INITIALIZE_PASS_DEPENDENCY(AAResultsWrapperPass)
340 CoroElide, "coro-elide",
341 "Coroutine frame allocation elision and indirect calls replacement", false,
344 Pass *llvm::createCoroElidePass() { return new CoroElide(); }