1 //===- CoroInternal.h - Internal Coroutine interfaces ---------*- C++ -*---===//
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
8 // Common definitions/declarations used internally by coroutine lowering passes.
9 //===----------------------------------------------------------------------===//
11 #ifndef LLVM_LIB_TRANSFORMS_COROUTINES_COROINTERNAL_H
12 #define LLVM_LIB_TRANSFORMS_COROUTINES_COROINTERNAL_H
14 #include "CoroInstr.h"
15 #include "llvm/IR/IRBuilder.h"
16 #include "llvm/Transforms/Coroutines.h"
24 void initializeCoroEarlyLegacyPass(PassRegistry &);
25 void initializeCoroSplitLegacyPass(PassRegistry &);
26 void initializeCoroElideLegacyPass(PassRegistry &);
27 void initializeCoroCleanupLegacyPass(PassRegistry &);
29 // CoroEarly pass marks every function that has coro.begin with a string
30 // attribute "coroutine.presplit"="0". CoroSplit pass processes the coroutine
31 // twice. First, it lets it go through complete IPO optimization pipeline as a
32 // single function. It forces restart of the pipeline by inserting an indirect
33 // call to an empty function "coro.devirt.trigger" which is devirtualized by
34 // CoroElide pass that triggers a restart of the pipeline by CGPassManager.
35 // When CoroSplit pass sees the same coroutine the second time, it splits it up,
36 // adds coroutine subfunctions to the SCC to be processed by IPO pipeline.
38 #define CORO_PRESPLIT_ATTR "coroutine.presplit"
39 #define UNPREPARED_FOR_SPLIT "0"
40 #define PREPARED_FOR_SPLIT "1"
42 #define CORO_DEVIRT_TRIGGER_FN "coro.devirt.trigger"
46 bool declaresIntrinsics(const Module &M,
47 const std::initializer_list<StringRef>);
48 void replaceAllCoroAllocs(CoroBeginInst *CB, bool Replacement);
49 void replaceAllCoroFrees(CoroBeginInst *CB, Value *Replacement);
50 void replaceCoroFree(CoroIdInst *CoroId, bool Elide);
51 void updateCallGraph(Function &Caller, ArrayRef<Function *> Funcs,
52 CallGraph &CG, CallGraphSCC &SCC);
54 // Keeps data and helper functions for lowering coroutine intrinsics.
58 PointerType *const Int8Ptr;
59 FunctionType *const ResumeFnType;
60 ConstantPointerNull *const NullPtr;
62 LowererBase(Module &M);
63 Value *makeSubFnCall(Value *Arg, int Index, Instruction *InsertPt);
67 /// The "resume-switch" lowering, where there are separate resume and
68 /// destroy functions that are shared between all suspend points. The
69 /// coroutine frame implicitly stores the resume and destroy functions,
70 /// the current index, and any promise value.
73 /// The "returned-continuation" lowering, where each suspend point creates a
74 /// single continuation function that is used for both resuming and
75 /// destroying. Does not support promises.
78 /// The "unique returned-continuation" lowering, where each suspend point
79 /// creates a single continuation function that is used for both resuming
80 /// and destroying. Does not support promises. The function is known to
81 /// suspend at most once during its execution, and the return value of
82 /// the continuation is void.
86 // Holds structural Coroutine Intrinsics for a particular function and other
87 // values used during CoroSplit pass.
88 struct LLVM_LIBRARY_VISIBILITY Shape {
89 CoroBeginInst *CoroBegin;
90 SmallVector<CoroEndInst *, 4> CoroEnds;
91 SmallVector<CoroSizeInst *, 2> CoroSizes;
92 SmallVector<AnyCoroSuspendInst *, 4> CoroSuspends;
93 SmallVector<CallInst*, 2> SwiftErrorOps;
95 // Field indexes for special fields in the switch lowering.
96 struct SwitchFieldIndex {
101 // The promise field is always at a fixed offset from the start of
102 // frame given its type, but the index isn't a constant for all
105 // The switch-index field isn't at a fixed offset or index, either;
106 // we just work it in where it fits best.
115 Instruction *FramePtr;
116 BasicBlock *AllocaSpillBlock;
118 struct SwitchLoweringStorage {
119 SwitchInst *ResumeSwitch;
120 AllocaInst *PromiseAlloca;
121 BasicBlock *ResumeEntryBlock;
123 unsigned PromiseField;
124 bool HasFinalSuspend;
127 struct RetconLoweringStorage {
128 Function *ResumePrototype;
131 BasicBlock *ReturnBlock;
132 bool IsFrameInlineInStorage;
136 SwitchLoweringStorage SwitchLowering;
137 RetconLoweringStorage RetconLowering;
140 CoroIdInst *getSwitchCoroId() const {
141 assert(ABI == coro::ABI::Switch);
142 return cast<CoroIdInst>(CoroBegin->getId());
145 AnyCoroIdRetconInst *getRetconCoroId() const {
146 assert(ABI == coro::ABI::Retcon ||
147 ABI == coro::ABI::RetconOnce);
148 return cast<AnyCoroIdRetconInst>(CoroBegin->getId());
151 unsigned getSwitchIndexField() const {
152 assert(ABI == coro::ABI::Switch);
153 assert(FrameTy && "frame type not assigned");
154 return SwitchLowering.IndexField;
156 IntegerType *getIndexType() const {
157 assert(ABI == coro::ABI::Switch);
158 assert(FrameTy && "frame type not assigned");
159 return cast<IntegerType>(FrameTy->getElementType(getSwitchIndexField()));
161 ConstantInt *getIndex(uint64_t Value) const {
162 return ConstantInt::get(getIndexType(), Value);
165 PointerType *getSwitchResumePointerType() const {
166 assert(ABI == coro::ABI::Switch);
167 assert(FrameTy && "frame type not assigned");
168 return cast<PointerType>(FrameTy->getElementType(SwitchFieldIndex::Resume));
171 FunctionType *getResumeFunctionType() const {
173 case coro::ABI::Switch: {
174 auto *FnPtrTy = getSwitchResumePointerType();
175 return cast<FunctionType>(FnPtrTy->getPointerElementType());
177 case coro::ABI::Retcon:
178 case coro::ABI::RetconOnce:
179 return RetconLowering.ResumePrototype->getFunctionType();
181 llvm_unreachable("Unknown coro::ABI enum");
184 ArrayRef<Type*> getRetconResultTypes() const {
185 assert(ABI == coro::ABI::Retcon ||
186 ABI == coro::ABI::RetconOnce);
187 auto FTy = CoroBegin->getFunction()->getFunctionType();
189 // The safety of all this is checked by checkWFRetconPrototype.
190 if (auto STy = dyn_cast<StructType>(FTy->getReturnType())) {
191 return STy->elements().slice(1);
193 return ArrayRef<Type*>();
197 ArrayRef<Type*> getRetconResumeTypes() const {
198 assert(ABI == coro::ABI::Retcon ||
199 ABI == coro::ABI::RetconOnce);
201 // The safety of all this is checked by checkWFRetconPrototype.
202 auto FTy = RetconLowering.ResumePrototype->getFunctionType();
203 return FTy->params().slice(1);
206 CallingConv::ID getResumeFunctionCC() const {
208 case coro::ABI::Switch:
209 return CallingConv::Fast;
211 case coro::ABI::Retcon:
212 case coro::ABI::RetconOnce:
213 return RetconLowering.ResumePrototype->getCallingConv();
215 llvm_unreachable("Unknown coro::ABI enum");
218 AllocaInst *getPromiseAlloca() const {
219 if (ABI == coro::ABI::Switch)
220 return SwitchLowering.PromiseAlloca;
223 unsigned getPromiseField() const {
224 assert(ABI == coro::ABI::Switch);
225 assert(FrameTy && "frame type not assigned");
226 assert(SwitchLowering.PromiseAlloca && "no promise alloca");
227 return SwitchLowering.PromiseField;
230 /// Allocate memory according to the rules of the active lowering.
232 /// \param CG - if non-null, will be updated for the new call
233 Value *emitAlloc(IRBuilder<> &Builder, Value *Size, CallGraph *CG) const;
235 /// Deallocate memory according to the rules of the active lowering.
237 /// \param CG - if non-null, will be updated for the new call
238 void emitDealloc(IRBuilder<> &Builder, Value *Ptr, CallGraph *CG) const;
241 explicit Shape(Function &F) { buildFrom(F); }
242 void buildFrom(Function &F);
245 void buildCoroutineFrame(Function &F, Shape &Shape);
247 } // End namespace coro.
248 } // End namespace llvm