73#include <initializer_list>
78#define DEBUG_TYPE "coro-split"
110 Value *NewFramePtr =
nullptr;
120 : OrigF(OrigF), NewF(
nullptr), Suffix(Suffix), Shape(Shape),
128 : OrigF(OrigF), NewF(NewF), Suffix(Suffix), Shape(Shape),
129 FKind(Shape.
ABI == coro::ABI::Async ? Kind::Async : Kind::Continuation),
132 Shape.
ABI == coro::ABI::RetconOnce || Shape.
ABI == coro::ABI::Async);
133 assert(NewF &&
"need existing function for continuation");
134 assert(ActiveSuspend &&
"need active suspend point for continuation");
138 assert(NewF !=
nullptr &&
"declaration not yet set");
145 bool isSwitchDestroyFunction() {
148 case Kind::Continuation:
149 case Kind::SwitchResume:
151 case Kind::SwitchUnwind:
152 case Kind::SwitchCleanup:
158 void replaceEntryBlock();
159 Value *deriveNewFramePointer();
160 void replaceRetconOrAsyncSuspendUses();
161 void replaceCoroSuspends();
162 void replaceCoroEnds();
165 void handleFinalSuspend();
174 Shape.
ABI == coro::ABI::RetconOnce);
187 auto *EndAsync = dyn_cast<CoroAsyncEndInst>(
End);
193 auto *MustTailCallFunc = EndAsync->getMustTailCallFunction();
194 if (!MustTailCallFunc) {
200 auto *CoroEndBlock =
End->getParent();
201 auto *MustTailCallFuncBlock = CoroEndBlock->getSinglePredecessor();
202 assert(MustTailCallFuncBlock &&
"Must have a single predecessor block");
203 auto It = MustTailCallFuncBlock->getTerminator()->getIterator();
204 auto *MustTailCall = cast<CallInst>(&*std::prev(It));
205 CoroEndBlock->splice(
End->getIterator(), MustTailCallFuncBlock,
206 MustTailCall->getIterator());
214 auto *BB =
End->getParent();
215 BB->splitBasicBlock(
End);
216 BB->getTerminator()->eraseFromParent();
219 assert(InlineRes.isSuccess() &&
"Expected inlining to succeed");
236 case coro::ABI::Switch:
237 assert(!cast<CoroEndInst>(
End)->hasResults() &&
238 "switch coroutine should not return any values");
247 case coro::ABI::Async: {
249 if (!CoroEndBlockNeedsCleanup)
256 case coro::ABI::RetconOnce: {
258 auto *CoroEnd = cast<CoroEndInst>(
End);
261 if (!CoroEnd->hasResults()) {
267 auto *CoroResults = CoroEnd->getResults();
268 unsigned NumReturns = CoroResults->numReturns();
270 if (
auto *RetStructTy = dyn_cast<StructType>(
RetTy)) {
271 assert(RetStructTy->getNumElements() == NumReturns &&
272 "numbers of returns should match resume function singature");
275 for (
Value *RetValEl : CoroResults->return_values())
276 ReturnValue =
Builder.CreateInsertValue(ReturnValue, RetValEl,
Idx++);
277 Builder.CreateRet(ReturnValue);
278 }
else if (NumReturns == 0) {
283 Builder.CreateRet(*CoroResults->retval_begin());
286 CoroResults->eraseFromParent();
292 case coro::ABI::Retcon: {
293 assert(!cast<CoroEndInst>(
End)->hasResults() &&
294 "retcon coroutine should not return any values");
297 auto RetStructTy = dyn_cast<StructType>(
RetTy);
299 cast<PointerType>(RetStructTy ? RetStructTy->getElementType(0) :
RetTy);
306 Builder.CreateRet(ReturnValue);
312 auto *BB =
End->getParent();
313 BB->splitBasicBlock(
End);
314 BB->getTerminator()->eraseFromParent();
328 Shape.
ABI == coro::ABI::Switch &&
329 "markCoroutineAsDone is only supported for Switch-Resumed ABI for now.");
330 auto *GepIndex =
Builder.CreateStructGEP(
335 Builder.CreateStore(NullPtr, GepIndex);
348 "The final suspend should only live in the last position of "
351 auto *FinalIndex =
Builder.CreateStructGEP(
354 Builder.CreateStore(IndexVal, FinalIndex);
366 case coro::ABI::Switch: {
379 case coro::ABI::Async:
382 case coro::ABI::Retcon:
383 case coro::ABI::RetconOnce:
390 auto *FromPad = cast<CleanupPadInst>(Bundle->Inputs[0]);
391 auto *CleanupRet =
Builder.CreateCleanupRet(FromPad,
nullptr);
392 End->getParent()->splitBasicBlock(
End);
393 CleanupRet->getParent()->getTerminator()->eraseFromParent();
407 End->eraseFromParent();
432 auto *GepIndex =
Builder.CreateStructGEP(
439 size_t SuspendIndex = 0;
441 auto *S = cast<CoroSuspendInst>(AnyS);
447 auto *Save = S->getCoroSave();
454 auto *GepIndex =
Builder.CreateStructGEP(
456 Builder.CreateStore(IndexVal, GepIndex);
460 Save->eraseFromParent();
485 auto *SuspendBB = S->getParent();
487 SuspendBB->splitBasicBlock(S,
"resume." +
Twine(SuspendIndex));
488 auto *LandingBB = ResumeBB->splitBasicBlock(
489 S->getNextNode(), ResumeBB->getName() +
Twine(
".landing"));
490 Switch->addCase(IndexVal, ResumeBB);
492 cast<BranchInst>(SuspendBB->getTerminator())->setSuccessor(0, LandingBB);
494 PN->insertBefore(LandingBB->begin());
495 S->replaceAllUsesWith(PN);
496 PN->addIncoming(
Builder.getInt8(-1), SuspendBB);
497 PN->addIncoming(S, ResumeBB);
502 Builder.SetInsertPoint(UnreachBB);
517void CoroCloner::handleFinalSuspend() {
518 assert(Shape.ABI == coro::ABI::Switch &&
519 Shape.SwitchLowering.HasFinalSuspend);
521 if (isSwitchDestroyFunction() && Shape.SwitchLowering.HasUnwindCoroEnd)
524 auto *
Switch = cast<SwitchInst>(VMap[Shape.SwitchLowering.ResumeSwitch]);
525 auto FinalCaseIt = std::prev(
Switch->case_end());
526 BasicBlock *ResumeBB = FinalCaseIt->getCaseSuccessor();
527 Switch->removeCase(FinalCaseIt);
528 if (isSwitchDestroyFunction()) {
532 auto *GepIndex =
Builder.CreateStructGEP(Shape.FrameTy, NewFramePtr,
535 auto *
Load =
Builder.CreateLoad(Shape.getSwitchResumePointerType(),
545 auto *AsyncSuspend = cast<CoroSuspendAsyncInst>(Suspend);
546 auto *StructTy = cast<StructType>(AsyncSuspend->getType());
549 return FunctionType::get(VoidTy, StructTy->elements(),
false);
557 auto *FnTy = (Shape.
ABI != coro::ABI::Async)
565 M->getFunctionList().insert(InsertBefore, NewF);
574void CoroCloner::replaceRetconOrAsyncSuspendUses() {
575 assert(Shape.ABI == coro::ABI::Retcon || Shape.ABI == coro::ABI::RetconOnce ||
576 Shape.ABI == coro::ABI::Async);
578 auto NewS = VMap[ActiveSuspend];
579 if (NewS->use_empty())
return;
585 bool IsAsyncABI = Shape.ABI == coro::ABI::Async;
586 for (
auto I = IsAsyncABI ? NewF->arg_begin() : std::next(NewF->arg_begin()),
593 if (!isa<StructType>(NewS->getType())) {
595 NewS->replaceAllUsesWith(
Args.front());
601 auto *EVI = dyn_cast<ExtractValueInst>(
U.getUser());
602 if (!EVI || EVI->getNumIndices() != 1)
605 EVI->replaceAllUsesWith(Args[EVI->getIndices().front()]);
606 EVI->eraseFromParent();
610 if (NewS->use_empty())
return;
614 for (
size_t I = 0,
E =
Args.size();
I !=
E; ++
I)
615 Agg =
Builder.CreateInsertValue(Agg, Args[
I],
I);
617 NewS->replaceAllUsesWith(Agg);
620void CoroCloner::replaceCoroSuspends() {
621 Value *SuspendResult;
630 case coro::ABI::Switch:
631 SuspendResult =
Builder.getInt8(isSwitchDestroyFunction() ? 1 : 0);
635 case coro::ABI::Async:
641 case coro::ABI::RetconOnce:
642 case coro::ABI::Retcon:
648 if (CS == ActiveSuspend)
continue;
650 auto *MappedCS = cast<AnyCoroSuspendInst>(VMap[CS]);
651 MappedCS->replaceAllUsesWith(SuspendResult);
652 MappedCS->eraseFromParent();
656void CoroCloner::replaceCoroEnds() {
660 auto *NewCE = cast<AnyCoroEndInst>(VMap[CE]);
669 Value *CachedSlot =
nullptr;
670 auto getSwiftErrorSlot = [&](
Type *ValueTy) ->
Value * {
675 for (
auto &Arg :
F.args()) {
676 if (Arg.isSwiftError()) {
684 auto Alloca =
Builder.CreateAlloca(ValueTy);
685 Alloca->setSwiftError(
true);
692 auto MappedOp = VMap ? cast<CallInst>((*VMap)[
Op]) :
Op;
697 if (
Op->arg_empty()) {
698 auto ValueTy =
Op->getType();
699 auto Slot = getSwiftErrorSlot(ValueTy);
700 MappedResult =
Builder.CreateLoad(ValueTy, Slot);
703 auto Value = MappedOp->getArgOperand(0);
705 auto Slot = getSwiftErrorSlot(ValueTy);
710 MappedOp->replaceAllUsesWith(MappedResult);
711 MappedOp->eraseFromParent();
715 if (VMap ==
nullptr) {
725 if (
auto *DVI = dyn_cast<DbgVariableIntrinsic>(&
I))
730void CoroCloner::replaceSwiftErrorOps() {
734void CoroCloner::salvageDebugInfo() {
749 auto IsUnreachableBlock = [&](
BasicBlock *BB) {
754 if (IsUnreachableBlock(DVI->getParent()))
755 DVI->eraseFromParent();
756 else if (isa_and_nonnull<AllocaInst>(DVI->getVariableLocationOp(0))) {
759 for (
auto *
User : DVI->getVariableLocationOp(0)->
users())
760 if (
auto *
I = dyn_cast<Instruction>(
User))
761 if (!isa<AllocaInst>(
I) && !IsUnreachableBlock(
I->getParent()))
764 DVI->eraseFromParent();
769void CoroCloner::replaceEntryBlock() {
775 auto *Entry = cast<BasicBlock>(VMap[Shape.AllocaSpillBlock]);
776 auto *OldEntry = &NewF->getEntryBlock();
777 Entry->setName(
"entry" + Suffix);
778 Entry->moveBefore(OldEntry);
779 Entry->getTerminator()->eraseFromParent();
784 assert(Entry->hasOneUse());
785 auto BranchToEntry = cast<BranchInst>(Entry->user_back());
786 assert(BranchToEntry->isUnconditional());
787 Builder.SetInsertPoint(BranchToEntry);
789 BranchToEntry->eraseFromParent();
794 case coro::ABI::Switch: {
798 cast<BasicBlock>(VMap[Shape.SwitchLowering.ResumeEntryBlock]);
802 case coro::ABI::Async:
803 case coro::ABI::Retcon:
804 case coro::ABI::RetconOnce: {
808 assert((Shape.ABI == coro::ABI::Async &&
809 isa<CoroSuspendAsyncInst>(ActiveSuspend)) ||
810 ((Shape.ABI == coro::ABI::Retcon ||
811 Shape.ABI == coro::ABI::RetconOnce) &&
812 isa<CoroSuspendRetconInst>(ActiveSuspend)));
813 auto *MappedCS = cast<AnyCoroSuspendInst>(VMap[ActiveSuspend]);
814 auto Branch = cast<BranchInst>(MappedCS->getNextNode());
826 auto *Alloca = dyn_cast<AllocaInst>(&
I);
827 if (!Alloca ||
I.use_empty())
829 if (DT.isReachableFromEntry(
I.getParent()) ||
830 !isa<ConstantInt>(Alloca->getArraySize()))
832 I.moveBefore(*Entry, Entry->getFirstInsertionPt());
837Value *CoroCloner::deriveNewFramePointer() {
842 case coro::ABI::Switch:
843 return &*NewF->arg_begin();
849 case coro::ABI::Async: {
850 auto *ActiveAsyncSuspend = cast<CoroSuspendAsyncInst>(ActiveSuspend);
851 auto ContextIdx = ActiveAsyncSuspend->getStorageArgumentIndex() & 0xff;
852 auto *CalleeContext = NewF->getArg(ContextIdx);
853 auto *ProjectionFunc =
854 ActiveAsyncSuspend->getAsyncContextProjectionFunction();
856 cast<CoroSuspendAsyncInst>(VMap[ActiveSuspend])->getDebugLoc();
858 auto *CallerContext =
Builder.CreateCall(ProjectionFunc->getFunctionType(),
859 ProjectionFunc, CalleeContext);
860 CallerContext->setCallingConv(ProjectionFunc->getCallingConv());
861 CallerContext->setDebugLoc(DbgLoc);
864 auto *FramePtrAddr =
Builder.CreateConstInBoundsGEP1_32(
866 Shape.AsyncLowering.FrameOffset,
"async.ctx.frameptr");
870 assert(InlineRes.isSuccess());
875 case coro::ABI::Retcon:
876 case coro::ABI::RetconOnce: {
877 Argument *NewStorage = &*NewF->arg_begin();
878 auto FramePtrTy = PointerType::getUnqual(Shape.FrameTy->getContext());
881 if (Shape.RetconLowering.IsFrameInlineInStorage)
885 return Builder.CreateLoad(FramePtrTy, NewStorage);
893 Align Alignment,
bool NoAlias) {
903 Attrs = Attrs.addParamAttributes(
Context, ParamIndex, ParamAttrs);
907 unsigned ParamIndex) {
910 Attrs = Attrs.addParamAttributes(
Context, ParamIndex, ParamAttrs);
914 unsigned ParamIndex) {
917 Attrs = Attrs.addParamAttributes(
Context, ParamIndex, ParamAttrs);
922void CoroCloner::create() {
926 OrigF.getParent()->end(), ActiveSuspend);
937 VMap[&
A] = DummyArgs.
back();
944 auto savedVisibility = NewF->getVisibility();
945 auto savedUnnamedAddr = NewF->getUnnamedAddr();
946 auto savedDLLStorageClass = NewF->getDLLStorageClass();
951 auto savedLinkage = NewF->getLinkage();
955 CloneFunctionChangeType::LocalChangesOnly, Returns);
957 auto &
Context = NewF->getContext();
966 assert(SP != OrigF.getSubprogram() && SP->isDistinct());
968 if (
auto DL = ActiveSuspend->getDebugLoc())
969 if (SP->getFile() ==
DL->getFile())
970 SP->setScopeLine(
DL->getLine());
980 SP->getUnit()->getSourceLanguage() == dwarf::DW_LANG_Swift) {
981 SP->replaceLinkageName(
MDString::get(Context, NewF->getName()));
982 if (
auto *Decl = SP->getDeclaration()) {
983 auto *NewDecl = DISubprogram::get(
984 Decl->getContext(), Decl->getScope(), Decl->getName(),
985 NewF->getName(), Decl->getFile(), Decl->getLine(), Decl->getType(),
986 Decl->getScopeLine(), Decl->getContainingType(),
987 Decl->getVirtualIndex(), Decl->getThisAdjustment(),
988 Decl->getFlags(), Decl->getSPFlags(), Decl->getUnit(),
989 Decl->getTemplateParams(),
nullptr, Decl->getRetainedNodes(),
990 Decl->getThrownTypes(), Decl->getAnnotations(),
991 Decl->getTargetFuncName());
992 SP->replaceDeclaration(NewDecl);
997 NewF->setLinkage(savedLinkage);
998 NewF->setVisibility(savedVisibility);
999 NewF->setUnnamedAddr(savedUnnamedAddr);
1000 NewF->setDLLStorageClass(savedDLLStorageClass);
1004 if (Shape.ABI == coro::ABI::Switch &&
1005 NewF->hasMetadata(LLVMContext::MD_func_sanitize))
1006 NewF->eraseMetadata(LLVMContext::MD_func_sanitize);
1009 auto OrigAttrs = NewF->getAttributes();
1012 switch (Shape.ABI) {
1013 case coro::ABI::Switch:
1016 NewAttrs = NewAttrs.addFnAttributes(
1017 Context,
AttrBuilder(Context, OrigAttrs.getFnAttrs()));
1020 Shape.FrameAlign,
false);
1022 case coro::ABI::Async: {
1023 auto *ActiveAsyncSuspend = cast<CoroSuspendAsyncInst>(ActiveSuspend);
1024 if (OrigF.hasParamAttribute(Shape.AsyncLowering.ContextArgNo,
1025 Attribute::SwiftAsync)) {
1027 ActiveAsyncSuspend->getStorageArgumentIndex();
1028 auto ContextArgIndex = ArgAttributeIndices & 0xff;
1033 auto SwiftSelfIndex = ArgAttributeIndices >> 8;
1039 auto FnAttrs = OrigF.getAttributes().getFnAttrs();
1040 NewAttrs = NewAttrs.addFnAttributes(Context,
AttrBuilder(Context, FnAttrs));
1043 case coro::ABI::Retcon:
1044 case coro::ABI::RetconOnce:
1047 NewAttrs = Shape.RetconLowering.ResumePrototype->getAttributes();
1051 Shape.getRetconCoroId()->getStorageSize(),
1052 Shape.getRetconCoroId()->getStorageAlignment(),
1058 switch (Shape.ABI) {
1063 case coro::ABI::Switch:
1064 case coro::ABI::RetconOnce:
1073 case coro::ABI::Retcon:
1079 case coro::ABI::Async:
1083 NewF->setAttributes(NewAttrs);
1084 NewF->setCallingConv(Shape.getResumeFunctionCC());
1087 replaceEntryBlock();
1089 Builder.SetInsertPoint(&NewF->getEntryBlock().front());
1090 NewFramePtr = deriveNewFramePointer();
1093 Value *OldFramePtr = VMap[Shape.FramePtr];
1094 NewFramePtr->
takeName(OldFramePtr);
1098 auto *NewVFrame =
Builder.CreateBitCast(
1100 Value *OldVFrame = cast<Value>(VMap[Shape.CoroBegin]);
1101 if (OldVFrame != NewVFrame)
1108 DummyArg->deleteValue();
1111 switch (Shape.ABI) {
1112 case coro::ABI::Switch:
1116 if (Shape.SwitchLowering.HasFinalSuspend)
1117 handleFinalSuspend();
1119 case coro::ABI::Async:
1120 case coro::ABI::Retcon:
1121 case coro::ABI::RetconOnce:
1124 assert(ActiveSuspend !=
nullptr &&
1125 "no active suspend when lowering a continuation-style coroutine");
1126 replaceRetconOrAsyncSuspendUses();
1131 replaceCoroSuspends();
1144 if (Shape.ABI == coro::ABI::Switch)
1146 FKind == CoroCloner::Kind::SwitchCleanup);
1154 CoroCloner Cloner(
F, Suffix, Shape, FKind);
1156 return Cloner.getFunction();
1162 auto *FuncPtrStruct = cast<ConstantStruct>(
1164 auto *OrigRelativeFunOffset = FuncPtrStruct->getOperand(0);
1165 auto *OrigContextSize = FuncPtrStruct->getOperand(1);
1169 FuncPtrStruct->getType(), OrigRelativeFunOffset, NewContextSize);
1175 if (Shape.
ABI == coro::ABI::Async)
1188 auto *SizeIntrin = Shape.
CoroSizes.back();
1189 Module *M = SizeIntrin->getModule();
1221 auto *ArrTy = ArrayType::get(Part->
getType(), Args.size());
1225 GlobalVariable::PrivateLinkage, ConstVal,
1226 F.getName() +
Twine(
".resumers"));
1241 auto *ResumeAddr =
Builder.CreateStructGEP(
1244 Builder.CreateStore(ResumeFn, ResumeAddr);
1246 Value *DestroyOrCleanupFn = DestroyFn;
1252 DestroyOrCleanupFn =
Builder.CreateSelect(CA, DestroyFn, CleanupFn);
1255 auto *DestroyAddr =
Builder.CreateStructGEP(
1258 Builder.CreateStore(DestroyOrCleanupFn, DestroyAddr);
1280 auto V = PN.getIncomingValueForBlock(PrevBB);
1282 auto VI = ResolvedValues.
find(V);
1283 if (VI != ResolvedValues.
end())
1286 ResolvedValues[&PN] = V;
1295 if (isa<ReturnInst>(InitialInst))
1305 if (isa<BitCastInst>(
I) ||
I->isDebugOrPseudoInst() ||
1306 I->isLifetimeStartOrEnd())
1307 I =
I->getNextNode();
1311 I = &*
I->eraseFromParent();
1318 auto TryResolveConstant = [&ResolvedValues](
Value *V) {
1319 auto It = ResolvedValues.
find(V);
1320 if (It != ResolvedValues.
end())
1322 return dyn_cast<ConstantInt>(V);
1326 while (
I->isTerminator() || isa<CmpInst>(
I)) {
1327 if (isa<ReturnInst>(
I)) {
1332 if (
auto *BR = dyn_cast<BranchInst>(
I)) {
1333 unsigned SuccIndex = 0;
1334 if (BR->isConditional()) {
1346 SuccIndex =
Cond->isOne() ? 0 : 1;
1349 BasicBlock *Succ = BR->getSuccessor(SuccIndex);
1356 if (
auto *CondCmp = dyn_cast<CmpInst>(
I)) {
1359 auto *BR = dyn_cast<BranchInst>(
1360 GetFirstValidInstruction(CondCmp->getNextNode()));
1361 if (!BR || !BR->isConditional() || CondCmp != BR->getCondition())
1367 ConstantInt *Cond0 = TryResolveConstant(CondCmp->getOperand(0));
1368 auto *Cond1 = dyn_cast<ConstantInt>(CondCmp->getOperand(1));
1369 if (!Cond0 || !Cond1)
1376 CondCmp->getPredicate(), Cond0, Cond1,
DL));
1380 ResolvedValues[BR->getCondition()] = ConstResult;
1387 if (
auto *SI = dyn_cast<SwitchInst>(
I)) {
1411 if (!CalleeTy->getReturnType()->isVoidTy() || (CalleeTy->getNumParams() != 1))
1414 Type *CalleeParmTy = CalleeTy->getParamType(0);
1424 Attribute::StructRet, Attribute::ByVal, Attribute::InAlloca,
1425 Attribute::Preallocated, Attribute::InReg, Attribute::Returned,
1426 Attribute::SwiftSelf, Attribute::SwiftError};
1428 for (
auto AK : ABIAttrs)
1429 if (Attrs.hasParamAttr(0, AK))
1441 bool changed =
false;
1446 if (
auto *Call = dyn_cast<CallInst>(&
I))
1467 auto *CoroId = CoroBegin->
getId();
1469 switch (Shape.
ABI) {
1470 case coro::ABI::Switch: {
1471 auto SwitchId = cast<CoroIdInst>(CoroId);
1477 auto *VFrame =
Builder.CreateBitCast(Frame,
Builder.getInt8PtrTy());
1478 AllocInst->replaceAllUsesWith(
Builder.getFalse());
1479 AllocInst->eraseFromParent();
1480 CoroBegin->replaceAllUsesWith(VFrame);
1482 CoroBegin->replaceAllUsesWith(CoroBegin->getMem());
1487 case coro::ABI::Async:
1488 case coro::ABI::Retcon:
1489 case coro::ABI::RetconOnce:
1494 CoroBegin->eraseFromParent();
1503 if (isa<IntrinsicInst>(
I))
1506 if (isa<CallBase>(
I))
1522 while (!Worklist.
empty()) {
1532 Set.
erase(ResDesBB);
1534 for (
auto *BB : Set)
1543 auto *ResumeOrDestroyBB = ResumeOrDestroy->
getParent();
1545 if (SaveBB == ResumeOrDestroyBB)
1573 Prev = Pred->getTerminator();
1576 CallBase *CB = dyn_cast<CallBase>(Prev);
1583 auto *SubFn = dyn_cast<CoroSubFnInst>(Callee);
1588 if (SubFn->getFrame() != CoroBegin)
1602 Save->eraseFromParent();
1605 if (
auto *Invoke = dyn_cast<InvokeInst>(CB)) {
1614 if (CalledValue != SubFn && CalledValue->user_empty())
1615 if (
auto *
I = dyn_cast<Instruction>(CalledValue))
1616 I->eraseFromParent();
1619 if (SubFn->user_empty())
1620 SubFn->eraseFromParent();
1628 if (Shape.
ABI != coro::ABI::Switch)
1632 size_t I = 0,
N = S.size();
1636 size_t ChangedFinalIndex = std::numeric_limits<size_t>::max();
1638 auto SI = cast<CoroSuspendInst>(S[
I]);
1647 if (cast<CoroSuspendInst>(S[
I])->isFinal()) {
1649 ChangedFinalIndex =
I;
1661 if (ChangedFinalIndex <
N) {
1662 assert(cast<CoroSuspendInst>(S[ChangedFinalIndex])->isFinal());
1663 std::swap(S[ChangedFinalIndex], S.back());
1674 CoroCloner::Kind::SwitchResume);
1676 CoroCloner::Kind::SwitchUnwind);
1678 CoroCloner::Kind::SwitchCleanup);
1707 Value *Continuation) {
1713 auto *Val =
Builder.CreateBitOrPointerCast(Continuation, Int8PtrTy);
1714 ResumeIntrinsic->replaceAllUsesWith(Val);
1715 ResumeIntrinsic->eraseFromParent();
1725 for (
auto *paramTy : FnTy->params()) {
1727 if (paramTy != FnArgs[ArgIdx]->
getType())
1729 Builder.CreateBitOrPointerCast(FnArgs[ArgIdx], paramTy));
1745 auto *TailCall =
Builder.CreateCall(FnTy, MustTailCallFn, CallArgs);
1747 TailCall->setDebugLoc(Loc);
1758 F.removeFnAttr(Attribute::NoReturn);
1759 F.removeRetAttr(Attribute::NoAlias);
1760 F.removeRetAttr(Attribute::NonNull);
1772 "async.ctx.frameptr");
1783 auto NextF = std::next(
F.getIterator());
1791 auto ResumeNameSuffix =
".resume.";
1792 auto ProjectionFunctionName =
1793 Suspend->getAsyncContextProjectionFunction()->getName();
1794 bool UseSwiftMangling =
false;
1795 if (ProjectionFunctionName.equals(
"__swift_async_resume_project_context")) {
1796 ResumeNameSuffix =
"TQ";
1797 UseSwiftMangling =
true;
1798 }
else if (ProjectionFunctionName.equals(
1799 "__swift_async_resume_get_context")) {
1800 ResumeNameSuffix =
"TY";
1801 UseSwiftMangling =
true;
1805 UseSwiftMangling ? ResumeNameSuffix +
Twine(
Idx) +
"_"
1812 auto *SuspendBB = Suspend->getParent();
1813 auto *NewSuspendBB = SuspendBB->splitBasicBlock(Suspend);
1814 auto *Branch = cast<BranchInst>(SuspendBB->getTerminator());
1819 Branch->setSuccessor(0, ReturnBB);
1824 auto *Fn = Suspend->getMustTailCallFunction();
1833 assert(InlineRes.isSuccess() &&
"Expected inlining to succeed");
1843 auto *Clone = Clones[
Idx];
1845 CoroCloner(
F,
"resume." +
Twine(
Idx), Shape, Clone, Suspend).create();
1851 assert(Shape.
ABI == coro::ABI::Retcon ||
1852 Shape.
ABI == coro::ABI::RetconOnce);
1857 F.removeFnAttr(Attribute::NoReturn);
1858 F.removeRetAttr(Attribute::NoAlias);
1859 F.removeRetAttr(Attribute::NonNull);
1865 RawFramePtr = Id->getStorage();
1881 Builder.CreateStore(RawFramePtr, Id->getStorage());
1897 auto NextF = std::next(
F.getIterator());
1901 for (
size_t i = 0, e = Shape.
CoroSuspends.size(); i != e; ++i) {
1902 auto Suspend = cast<CoroSuspendRetconInst>(Shape.
CoroSuspends[i]);
1911 auto SuspendBB = Suspend->getParent();
1912 auto NewSuspendBB = SuspendBB->splitBasicBlock(Suspend);
1913 auto Branch = cast<BranchInst>(SuspendBB->getTerminator());
1937 auto RetTy =
F.getReturnType();
1942 auto CastedContinuationTy =
1943 (ReturnPHIs.
size() == 1 ?
RetTy :
RetTy->getStructElementType(0));
1944 auto *CastedContinuation =
1945 Builder.CreateBitCast(ReturnPHIs[0], CastedContinuationTy);
1948 if (ReturnPHIs.
size() == 1) {
1949 RetV = CastedContinuation;
1952 RetV =
Builder.CreateInsertValue(RetV, CastedContinuation, 0);
1953 for (
size_t I = 1,
E = ReturnPHIs.
size();
I !=
E; ++
I)
1954 RetV =
Builder.CreateInsertValue(RetV, ReturnPHIs[
I],
I);
1961 Branch->setSuccessor(0, ReturnBB);
1962 ReturnPHIs[0]->addIncoming(Continuation, SuspendBB);
1963 size_t NextPHIIndex = 1;
1964 for (
auto &VUse : Suspend->value_operands())
1965 ReturnPHIs[NextPHIIndex++]->addIncoming(&*VUse, SuspendBB);
1970 for (
size_t i = 0, e = Shape.
CoroSuspends.size(); i != e; ++i) {
1972 auto Clone = Clones[i];
1974 CoroCloner(
F,
"resume." +
Twine(i), Shape, Clone, Suspend).create();
1982 PrettyStackTraceFunction(
Function &F) :
F(
F) {}
1984 OS <<
"While splitting coroutine ";
1985 F.printAsOperand(
OS,
false,
F.getParent());
1994 std::function<
bool(
Instruction &)> MaterializableCallback) {
1995 PrettyStackTraceFunction prettyStackTrace(
F);
2006 buildCoroutineFrame(
F, Shape, MaterializableCallback);
2014 switch (Shape.
ABI) {
2015 case coro::ABI::Switch:
2018 case coro::ABI::Async:
2021 case coro::ABI::Retcon:
2022 case coro::ABI::RetconOnce:
2058 if (Shape.
ABI != coro::ABI::Switch)
2064 End->eraseFromParent();
2068 if (!Clones.
empty()) {
2069 switch (Shape.
ABI) {
2070 case coro::ABI::Switch:
2076 case coro::ABI::Async:
2077 case coro::ABI::Retcon:
2078 case coro::ABI::RetconOnce:
2081 if (!Clones.empty())
2110 auto *Cast = dyn_cast<BitCastInst>(U.getUser());
2111 if (!Cast || Cast->getType() != Fn->getType())
2115 Cast->replaceAllUsesWith(Fn);
2116 Cast->eraseFromParent();
2125 while (
auto *Cast = dyn_cast<BitCastInst>(CastFn)) {
2126 if (!Cast->use_empty())
2128 CastFn = Cast->getOperand(0);
2129 Cast->eraseFromParent();
2135 bool Changed =
false;
2138 auto *Prepare = cast<CallInst>(
P.getUser());
2149 auto *PrepareFn = M.getFunction(
Name);
2150 if (PrepareFn && !PrepareFn->use_empty())
2155 : MaterializableCallback(coro::defaultMaterializable),
2156 OptimizeFrame(OptimizeFrame) {}
2164 Module &M = *
C.begin()->getFunction().getParent();
2176 if (
N.getFunction().isPresplitCoroutine())
2179 if (Coroutines.
empty() && PrepareFns.
empty())
2182 if (Coroutines.
empty()) {
2183 for (
auto *PrepareFn : PrepareFns) {
2191 LLVM_DEBUG(
dbgs() <<
"CoroSplit: Processing coroutine '" <<
F.getName()
2193 F.setSplittedCoroutine();
2204 <<
"Split '" <<
ore::NV(
"function",
F.getName())
2205 <<
"' (frame_size=" <<
ore::NV(
"frame_size", Shape.FrameSize)
2206 <<
", align=" <<
ore::NV(
"align", Shape.FrameAlign.value()) <<
")";
2209 if (!Shape.CoroSuspends.empty()) {
2217 if (!PrepareFns.
empty()) {
2218 for (
auto *PrepareFn : PrepareFns) {
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
AMDGPU Lower Kernel Arguments
This file contains the simple types necessary to represent the attributes associated with functions a...
BlockVerifier::State From
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
This file provides interfaces used to manipulate a call graph, regardless if it is a "old style" Call...
This file provides interfaces used to build and manipulate a call graph, which is a very useful tool ...
This file contains the declarations for the subclasses of Constant, which represent the different fla...
Remove calls to llvm coro end in the original static function void removeCoroEnds(const coro::Shape &Shape)
static void addSwiftSelfAttrs(AttributeList &Attrs, LLVMContext &Context, unsigned ParamIndex)
static void splitSwitchCoroutine(Function &F, coro::Shape &Shape, SmallVectorImpl< Function * > &Clones, TargetTransformInfo &TTI)
static bool hasCallsBetween(Instruction *Save, Instruction *ResumeOrDestroy)
static void replaceSwiftErrorOps(Function &F, coro::Shape &Shape, ValueToValueMapTy *VMap)
static void addAsyncContextAttrs(AttributeList &Attrs, LLVMContext &Context, unsigned ParamIndex)
static void addMustTailToCoroResumes(Function &F, TargetTransformInfo &TTI)
static void maybeFreeRetconStorage(IRBuilder<> &Builder, const coro::Shape &Shape, Value *FramePtr, CallGraph *CG)
static bool hasCallsInBlocksBetween(BasicBlock *SaveBB, BasicBlock *ResDesBB)
static Function * createCloneDeclaration(Function &OrigF, coro::Shape &Shape, const Twine &Suffix, Module::iterator InsertBefore, AnyCoroSuspendInst *ActiveSuspend)
static FunctionType * getFunctionTypeFromAsyncSuspend(AnyCoroSuspendInst *Suspend)
static void addPrepareFunction(const Module &M, SmallVectorImpl< Function * > &Fns, StringRef Name)
static void updateCallGraphAfterCoroutineSplit(LazyCallGraph::Node &N, const coro::Shape &Shape, const SmallVectorImpl< Function * > &Clones, LazyCallGraph::SCC &C, LazyCallGraph &CG, CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR, FunctionAnalysisManager &FAM)
static void simplifySuspendPoints(coro::Shape &Shape)
static void addFramePointerAttrs(AttributeList &Attrs, LLVMContext &Context, unsigned ParamIndex, uint64_t Size, Align Alignment, bool NoAlias)
static bool replaceAllPrepares(Function *PrepareFn, LazyCallGraph &CG, LazyCallGraph::SCC &C)
static void replaceFallthroughCoroEnd(AnyCoroEndInst *End, const coro::Shape &Shape, Value *FramePtr, bool InResume, CallGraph *CG)
Replace a non-unwind call to llvm.coro.end.
static void replaceFrameSizeAndAlignment(coro::Shape &Shape)
static SmallVector< DbgVariableIntrinsic *, 8 > collectDbgVariableIntrinsics(Function &F)
Returns all DbgVariableIntrinsic in F.
static bool replaceCoroEndAsync(AnyCoroEndInst *End)
Replace an llvm.coro.end.async.
Replace a call to llvm coro prepare static retcon void replacePrepare(CallInst *Prepare, LazyCallGraph &CG, LazyCallGraph::SCC &C)
static void replaceUnwindCoroEnd(AnyCoroEndInst *End, const coro::Shape &Shape, Value *FramePtr, bool InResume, CallGraph *CG)
Replace an unwind call to llvm.coro.end.
static bool simplifySuspendPoint(CoroSuspendInst *Suspend, CoroBeginInst *CoroBegin)
static bool hasCallsInBlockBetween(Instruction *From, Instruction *To)
static void markCoroutineAsDone(IRBuilder<> &Builder, const coro::Shape &Shape, Value *FramePtr)
static void splitAsyncCoroutine(Function &F, coro::Shape &Shape, SmallVectorImpl< Function * > &Clones)
static void updateAsyncFuncPointerContextSize(coro::Shape &Shape)
static void replaceCoroEnd(AnyCoroEndInst *End, const coro::Shape &Shape, Value *FramePtr, bool InResume, CallGraph *CG)
static void setCoroInfo(Function &F, coro::Shape &Shape, ArrayRef< Function * > Fns)
static void handleNoSuspendCoroutine(coro::Shape &Shape)
static void updateCoroFrame(coro::Shape &Shape, Function *ResumeFn, Function *DestroyFn, Function *CleanupFn)
static void createResumeEntryBlock(Function &F, coro::Shape &Shape)
static coro::Shape splitCoroutine(Function &F, SmallVectorImpl< Function * > &Clones, TargetTransformInfo &TTI, bool OptimizeFrame, std::function< bool(Instruction &)> MaterializableCallback)
static void postSplitCleanup(Function &F)
static bool simplifyTerminatorLeadingToRet(Instruction *InitialInst)
static void splitRetconCoroutine(Function &F, coro::Shape &Shape, SmallVectorImpl< Function * > &Clones)
static void scanPHIsAndUpdateValueMap(Instruction *Prev, BasicBlock *NewBlock, DenseMap< Value *, Value * > &ResolvedValues)
Coerce the arguments in p FnArgs according to p FnTy in p static CallArgs void coerceArguments(IRBuilder<> &Builder, FunctionType *FnTy, ArrayRef< Value * > FnArgs, SmallVectorImpl< Value * > &CallArgs)
static void replaceAsyncResumeFunction(CoroSuspendAsyncInst *Suspend, Value *Continuation)
static bool shouldBeMustTail(const CallInst &CI, const Function &F)
static Function * createClone(Function &F, const Twine &Suffix, coro::Shape &Shape, CoroCloner::Kind FKind)
Returns the sub type a function will return at a given Idx Should correspond to the result type of an ExtractValue instruction executed with just that one unsigned Idx
This file defines the DenseMap class.
This file contains constants used for implementing Dwarf debug support.
static Function * getFunction(Constant *C)
Rewrite Partial Register Uses
This file provides various utilities for inspecting and working with the control flow graph in LLVM I...
Select target instructions out of generic instructions
Implements a lazy call graph analysis and related passes for the new pass manager.
Module.h This file contains the declarations for the Module class.
FunctionAnalysisManager FAM
This file provides a priority worklist.
const SmallVectorImpl< MachineOperand > & Cond
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
static SymbolRef::Type getType(const Symbol *Sym)
static const unsigned FramePtr
A container for analyses that lazily runs them and caches their results.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
CoroAllocInst * getCoroAlloc()
This class represents an incoming formal argument to a Function.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
ArrayRef< T > drop_front(size_t N=1) const
Drop the first N elements of the array.
size_t size() const
size - Get the array size.
AttrBuilder & addAlignmentAttr(MaybeAlign Align)
This turns an alignment into the form used internally in Attribute.
AttrBuilder & addAttribute(Attribute::AttrKind Val)
Add an attribute to the builder.
AttrBuilder & addDereferenceableAttr(uint64_t Bytes)
This turns the number of dereferenceable bytes into the form used internally in Attribute.
AttrKind
This enumeration lists the attributes that can be associated with parameters, function results,...
LLVM Basic Block Representation.
iterator_range< const_phi_iterator > phis() const
Returns a range that iterates over the phis in the basic block.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
BasicBlock * splitBasicBlock(iterator I, const Twine &BBName="", bool Before=false)
Split the basic block into two basic blocks at the specified instruction.
const BasicBlock * getSinglePredecessor() const
Return the predecessor of this block if it has a single predecessor block.
const Instruction * getFirstNonPHIOrDbgOrLifetime(bool SkipPseudoOp=true) const
Returns a pointer to the first instruction in this block that is not a PHINode, a debug intrinsic,...
const Function * getParent() const
Return the enclosing method, or null if none.
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
static BranchInst * Create(BasicBlock *IfTrue, Instruction *InsertBefore=nullptr)
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
bool isInlineAsm() const
Check if this call is an inline asm statement.
CallingConv::ID getCallingConv() const
Value * getCalledOperand() const
Value * getArgOperand(unsigned i) const
FunctionType * getFunctionType() const
AttributeList getAttributes() const
Return the parameter attributes for this call.
The basic data container for the call graph of a Module of IR.
This class represents a function call, abstracting a target machine's calling convention.
static Constant * get(ArrayType *T, ArrayRef< Constant * > V)
static Constant * getPointerCast(Constant *C, Type *Ty)
Create a BitCast, AddrSpaceCast, or a PtrToInt cast constant expression.
This is the shared class of boolean and integer constants.
static ConstantInt * getTrue(LLVMContext &Context)
static Constant * get(Type *Ty, uint64_t V, bool IsSigned=false)
If Ty is a vector type, return a Constant with a splat of the given value.
static ConstantInt * getFalse(LLVMContext &Context)
static ConstantPointerNull * get(PointerType *T)
Static factory methods - Return objects of the specified value.
static Constant * get(StructType *T, ArrayRef< Constant * > V)
static ConstantTokenNone * get(LLVMContext &Context)
Return the ConstantTokenNone.
This represents the llvm.coro.align instruction.
This represents the llvm.coro.alloc instruction.
This class represents the llvm.coro.begin instruction.
AnyCoroIdInst * getId() const
This represents the llvm.coro.id instruction.
void setInfo(Constant *C)
This represents the llvm.coro.size instruction.
This represents the llvm.coro.suspend.async instruction.
CoroAsyncResumeInst * getResumeFunction() const
This represents the llvm.coro.suspend instruction.
CoroSaveInst * getCoroSave() const
DISubprogram * getSubprogram() const
Get the subprogram for this scope.
This class represents an Operation in the Expression.
A parsed version of the target data layout string in and methods for querying it.
This is the common base class for debug info intrinsics for variables.
iterator find(const_arg_type_t< KeyT > Val)
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
This class represents a freeze function that returns random concrete value if an operand is either a ...
A proxy from a FunctionAnalysisManager to an SCC.
Type * getReturnType() const
static Function * Create(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
FunctionType * getFunctionType() const
Returns the FunctionType for me.
CallingConv::ID getCallingConv() const
getCallingConv()/setCallingConv(CC) - These method get and set the calling convention of this functio...
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
Module * getParent()
Get the module that this global value is contained inside of...
PointerType * getType() const
Global values are always pointers.
@ ExternalLinkage
Externally visible function.
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
void setInitializer(Constant *InitVal)
setInitializer - Sets the initializer for this global variable, removing any existing initializer if ...
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
This class captures the data input to the InlineFunction call, and records the auxiliary results prod...
const Module * getModule() const
Return the module owning the function this instruction belongs to or nullptr it the function does not...
const BasicBlock * getParent() const
SymbolTableList< Instruction >::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
This is an important class for using LLVM in a threaded context.
A node in the call graph.
An SCC of the call graph.
A lazily constructed view of the call graph of a module.
void addSplitFunction(Function &OriginalFunction, Function &NewFunction)
Add a new function split/outlined from an existing function.
void addSplitRefRecursiveFunctions(Function &OriginalFunction, ArrayRef< Function * > NewFunctions)
Add new ref-recursive functions split/outlined from an existing function.
Node & get(Function &F)
Get a graph node for a given function, scanning it to populate the graph data as necessary.
SCC * lookupSCC(Node &N) const
Lookup a function's SCC in the graph.
static MDString * get(LLVMContext &Context, StringRef Str)
A Module instance is used to store all the information related to an LLVM module.
FunctionListType::iterator iterator
The Function iterators.
const DataLayout & getDataLayout() const
Get the data layout for the module's target platform.
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", Instruction *InsertBefore=nullptr)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
static PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses none()
Convenience factory function for the empty preserved set.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
PrettyStackTraceEntry - This class is used to represent a frame of the "pretty" stack trace that is d...
virtual void print(raw_ostream &OS) const =0
print - Emit information about this stack frame to OS.
Return a value (possibly void), from a function.
bool erase(PtrType Ptr)
erase - If the set contains the specified pointer, remove it and return true, otherwise return false.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
bool contains(ConstPtrType Ptr) const
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
void reserve(size_type N)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
StringRef - Represent a constant reference to a string, i.e.
Type * getTypeAtIndex(const Value *V) const
Given an index value into the type, return the type of the element.
Analysis pass providing the TargetTransformInfo.
Value handle that tracks a Value across RAUW.
ValueTy * getValPtr() const
Triple - Helper class for working with autoconf configuration names.
bool isArch64Bit() const
Test whether the architecture is 64-bit.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
bool isPointerTy() const
True if this is an instance of PointerType.
unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
static Type * getVoidTy(LLVMContext &C)
static IntegerType * getInt8Ty(LLVMContext &C)
static PointerType * getInt8PtrTy(LLVMContext &C, unsigned AS=0)
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
A Use represents the edge between a Value definition and its users.
void setOperand(unsigned i, Value *Val)
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
const Value * stripPointerCasts() const
Strip off pointer casts, all-zero GEPs and address space casts.
iterator_range< use_iterator > uses()
StringRef getName() const
Return a constant reference to the value's name.
void takeName(Value *V)
Transfer the name from V to this value.
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
@ C
The default llvm calling convention, compatible with C.
@ Switch
The "resume-switch" lowering, where there are separate resume and destroy functions that are shared b...
void salvageDebugInfo(SmallDenseMap< Argument *, AllocaInst *, 4 > &ArgToAllocaMap, DbgVariableIntrinsic *DVI, bool OptimizeFrame, bool IsEntryPoint)
Attempts to rewrite the location operand of debug intrinsics in terms of the coroutine frame pointer,...
void replaceCoroFree(CoroIdInst *CoroId, bool Elide)
CallInst * createMustTailCall(DebugLoc Loc, Function *MustTailCallFn, ArrayRef< Value * > Arguments, IRBuilder<> &)
DiagnosticInfoOptimizationBase::Argument NV
This is an optimization pass for GlobalISel generic memory operations.
void ReplaceInstWithInst(BasicBlock *BB, BasicBlock::iterator &BI, Instruction *I)
Replace the instruction specified by BI with the instruction specified by I.
bool verifyFunction(const Function &F, raw_ostream *OS=nullptr)
Check a function for errors, useful for use when debugging a pass.
void salvageDebugInfo(const MachineRegisterInfo &MRI, MachineInstr &MI)
Assuming the instruction MI is going to be deleted, attempt to salvage debug users of MI by writing t...
LazyCallGraph::SCC & updateCGAndAnalysisManagerForFunctionPass(LazyCallGraph &G, LazyCallGraph::SCC &C, LazyCallGraph::Node &N, CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR, FunctionAnalysisManager &FAM)
Helper to update the call graph after running a function pass.
LazyCallGraph::SCC & updateCGAndAnalysisManagerForCGSCCPass(LazyCallGraph &G, LazyCallGraph::SCC &C, LazyCallGraph::Node &N, CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR, FunctionAnalysisManager &FAM)
Helper to update the call graph after running a CGSCC pass.
Constant * ConstantFoldCompareInstOperands(unsigned Predicate, Constant *LHS, Constant *RHS, const DataLayout &DL, const TargetLibraryInfo *TLI=nullptr, const Instruction *I=nullptr)
Attempt to constant fold a compare instruction (icmp/fcmp) with the specified operands.
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
bool isInstructionTriviallyDead(Instruction *I, const TargetLibraryInfo *TLI=nullptr)
Return true if the result produced by the instruction is not used, and the instruction will return.
@ Async
"Asynchronous" unwind tables (instr precise)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
unsigned changeToUnreachable(Instruction *I, bool PreserveLCSSA=false, DomTreeUpdater *DTU=nullptr, MemorySSAUpdater *MSSAU=nullptr)
Insert an unreachable instruction before the specified instruction, making it and the rest of the cod...
raw_fd_ostream & errs()
This returns a reference to a raw_ostream for standard error.
DWARFExpression::Operation Op
InlineResult InlineFunction(CallBase &CB, InlineFunctionInfo &IFI, bool MergeAttributes=false, AAResults *CalleeAAR=nullptr, bool InsertLifetime=true, Function *ForwardVarArgsTo=nullptr)
This function inlines the called function into the basic block of the caller.
void CloneFunctionInto(Function *NewFunc, const Function *OldFunc, ValueToValueMapTy &VMap, CloneFunctionChangeType Changes, SmallVectorImpl< ReturnInst * > &Returns, const char *NameSuffix="", ClonedCodeInfo *CodeInfo=nullptr, ValueMapTypeRemapper *TypeMapper=nullptr, ValueMaterializer *Materializer=nullptr)
Clone OldFunc into NewFunc, transforming the old arguments into references to VMap values.
auto predecessors(const MachineBasicBlock *BB)
bool removeUnreachableBlocks(Function &F, DomTreeUpdater *DTU=nullptr, MemorySSAUpdater *MSSAU=nullptr)
Remove all blocks that can not be reached from the function's entry.
bool isPotentiallyReachable(const Instruction *From, const Instruction *To, const SmallPtrSetImpl< BasicBlock * > *ExclusionSet=nullptr, const DominatorTree *DT=nullptr, const LoopInfo *LI=nullptr)
Determine whether instruction 'To' is reachable from 'From', without passing through any blocks in Ex...
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
This struct is a compact representation of a valid (non-zero power of two) alignment.
uint64_t value() const
This is a hole in the type system and should not be abused.
Support structure for SCC passes to communicate updates the call graph back to the CGSCC pass manager...
SmallPriorityWorklist< LazyCallGraph::SCC *, 1 > & CWorklist
Worklist of the SCCs queued for processing.
const std::function< bool(Instruction &)> MaterializableCallback
PreservedAnalyses run(LazyCallGraph::SCC &C, CGSCCAnalysisManager &AM, LazyCallGraph &CG, CGSCCUpdateResult &UR)
CoroSplitPass(bool OptimizeFrame=false)
GlobalVariable * AsyncFuncPointer
bool IsFrameInlineInStorage
SwitchInst * ResumeSwitch
BasicBlock * ResumeEntryBlock
AsyncLoweringStorage AsyncLowering
FunctionType * getResumeFunctionType() const
IntegerType * getIndexType() const
CoroIdInst * getSwitchCoroId() const
Instruction * getInsertPtAfterFramePtr() const
SmallVector< CoroSizeInst *, 2 > CoroSizes
SmallVector< AnyCoroSuspendInst *, 4 > CoroSuspends
Value * emitAlloc(IRBuilder<> &Builder, Value *Size, CallGraph *CG) const
Allocate memory according to the rules of the active lowering.
SmallVector< CallInst *, 2 > SwiftErrorOps
ConstantInt * getIndex(uint64_t Value) const
bool OptimizeFrame
This would only be true if optimization are enabled.
SwitchLoweringStorage SwitchLowering
CoroBeginInst * CoroBegin
ArrayRef< Type * > getRetconResultTypes() const
void emitDealloc(IRBuilder<> &Builder, Value *Ptr, CallGraph *CG) const
Deallocate memory according to the rules of the active lowering.
RetconLoweringStorage RetconLowering
SmallVector< CoroAlignInst *, 2 > CoroAligns
SmallVector< AnyCoroEndInst *, 4 > CoroEnds
unsigned getSwitchIndexField() const