73#include <initializer_list>
78#define DEBUG_TYPE "coro-split"
110 Value *NewFramePtr =
nullptr;
120 : OrigF(OrigF), NewF(
nullptr), Suffix(Suffix), Shape(Shape),
128 : OrigF(OrigF), NewF(NewF), Suffix(Suffix), Shape(Shape),
129 FKind(Shape.
ABI == coro::ABI::Async ? Kind::Async : Kind::Continuation),
132 Shape.
ABI == coro::ABI::RetconOnce || Shape.
ABI == coro::ABI::Async);
133 assert(NewF &&
"need existing function for continuation");
134 assert(ActiveSuspend &&
"need active suspend point for continuation");
138 assert(NewF !=
nullptr &&
"declaration not yet set");
145 bool isSwitchDestroyFunction() {
148 case Kind::Continuation:
149 case Kind::SwitchResume:
151 case Kind::SwitchUnwind:
152 case Kind::SwitchCleanup:
158 void replaceEntryBlock();
159 Value *deriveNewFramePointer();
160 void replaceRetconOrAsyncSuspendUses();
161 void replaceCoroSuspends();
162 void replaceCoroEnds();
165 void handleFinalSuspend();
174 Shape.
ABI == coro::ABI::RetconOnce);
187 auto *EndAsync = dyn_cast<CoroAsyncEndInst>(End);
193 auto *MustTailCallFunc = EndAsync->getMustTailCallFunction();
194 if (!MustTailCallFunc) {
200 auto *CoroEndBlock = End->getParent();
201 auto *MustTailCallFuncBlock = CoroEndBlock->getSinglePredecessor();
202 assert(MustTailCallFuncBlock &&
"Must have a single predecessor block");
203 auto It = MustTailCallFuncBlock->getTerminator()->getIterator();
204 auto *MustTailCall = cast<CallInst>(&*std::prev(It));
205 CoroEndBlock->splice(End->getIterator(), MustTailCallFuncBlock,
206 MustTailCall->getIterator());
214 auto *BB = End->getParent();
215 BB->splitBasicBlock(End);
216 BB->getTerminator()->eraseFromParent();
219 assert(InlineRes.isSuccess() &&
"Expected inlining to succeed");
236 case coro::ABI::Switch:
245 case coro::ABI::Async: {
247 if (!CoroEndBlockNeedsCleanup)
254 case coro::ABI::RetconOnce:
261 case coro::ABI::Retcon: {
264 auto RetStructTy = dyn_cast<StructType>(
RetTy);
266 cast<PointerType>(RetStructTy ? RetStructTy->getElementType(0) :
RetTy);
273 Builder.CreateRet(ReturnValue);
279 auto *BB = End->getParent();
280 BB->splitBasicBlock(End);
281 BB->getTerminator()->eraseFromParent();
295 Shape.
ABI == coro::ABI::Switch &&
296 "markCoroutineAsDone is only supported for Switch-Resumed ABI for now.");
297 auto *GepIndex =
Builder.CreateStructGEP(
302 Builder.CreateStore(NullPtr, GepIndex);
313 case coro::ABI::Switch: {
326 case coro::ABI::Async:
329 case coro::ABI::Retcon:
330 case coro::ABI::RetconOnce:
337 auto *FromPad = cast<CleanupPadInst>(Bundle->Inputs[0]);
338 auto *CleanupRet =
Builder.CreateCleanupRet(FromPad,
nullptr);
339 End->getParent()->splitBasicBlock(End);
340 CleanupRet->getParent()->getTerminator()->eraseFromParent();
351 auto &
Context = End->getContext();
354 End->eraseFromParent();
379 auto *GepIndex =
Builder.CreateStructGEP(
386 size_t SuspendIndex = 0;
388 auto *S = cast<CoroSuspendInst>(AnyS);
394 auto *Save = S->getCoroSave();
411 auto *GepIndex =
Builder.CreateStructGEP(
413 Builder.CreateStore(IndexVal, GepIndex);
417 Save->eraseFromParent();
442 auto *SuspendBB = S->getParent();
444 SuspendBB->splitBasicBlock(S,
"resume." +
Twine(SuspendIndex));
445 auto *LandingBB = ResumeBB->splitBasicBlock(
446 S->getNextNode(), ResumeBB->getName() +
Twine(
".landing"));
447 Switch->addCase(IndexVal, ResumeBB);
449 cast<BranchInst>(SuspendBB->getTerminator())->setSuccessor(0, LandingBB);
451 S->replaceAllUsesWith(PN);
452 PN->addIncoming(
Builder.getInt8(-1), SuspendBB);
453 PN->addIncoming(S, ResumeBB);
458 Builder.SetInsertPoint(UnreachBB);
473void CoroCloner::handleFinalSuspend() {
474 assert(Shape.ABI == coro::ABI::Switch &&
475 Shape.SwitchLowering.HasFinalSuspend);
477 if (isSwitchDestroyFunction() && Shape.SwitchLowering.HasUnwindCoroEnd)
480 auto *
Switch = cast<SwitchInst>(VMap[Shape.SwitchLowering.ResumeSwitch]);
481 auto FinalCaseIt = std::prev(
Switch->case_end());
482 BasicBlock *ResumeBB = FinalCaseIt->getCaseSuccessor();
483 Switch->removeCase(FinalCaseIt);
484 if (isSwitchDestroyFunction()) {
488 auto *GepIndex =
Builder.CreateStructGEP(Shape.FrameTy, NewFramePtr,
491 auto *
Load =
Builder.CreateLoad(Shape.getSwitchResumePointerType(),
501 auto *AsyncSuspend = cast<CoroSuspendAsyncInst>(Suspend);
502 auto *StructTy = cast<StructType>(AsyncSuspend->getType());
505 return FunctionType::get(VoidTy, StructTy->elements(),
false);
513 auto *FnTy = (Shape.
ABI != coro::ABI::Async)
521 M->getFunctionList().insert(InsertBefore, NewF);
530void CoroCloner::replaceRetconOrAsyncSuspendUses() {
531 assert(Shape.ABI == coro::ABI::Retcon || Shape.ABI == coro::ABI::RetconOnce ||
532 Shape.ABI == coro::ABI::Async);
534 auto NewS = VMap[ActiveSuspend];
535 if (NewS->use_empty())
return;
541 bool IsAsyncABI = Shape.ABI == coro::ABI::Async;
542 for (
auto I = IsAsyncABI ? NewF->arg_begin() : std::next(NewF->arg_begin()),
549 if (!isa<StructType>(NewS->getType())) {
551 NewS->replaceAllUsesWith(
Args.front());
557 auto *EVI = dyn_cast<ExtractValueInst>(
U.getUser());
558 if (!EVI || EVI->getNumIndices() != 1)
561 EVI->replaceAllUsesWith(Args[EVI->getIndices().front()]);
562 EVI->eraseFromParent();
566 if (NewS->use_empty())
return;
570 for (
size_t I = 0,
E =
Args.size();
I !=
E; ++
I)
571 Agg =
Builder.CreateInsertValue(Agg, Args[
I],
I);
573 NewS->replaceAllUsesWith(Agg);
576void CoroCloner::replaceCoroSuspends() {
577 Value *SuspendResult;
586 case coro::ABI::Switch:
587 SuspendResult =
Builder.getInt8(isSwitchDestroyFunction() ? 1 : 0);
591 case coro::ABI::Async:
597 case coro::ABI::RetconOnce:
598 case coro::ABI::Retcon:
604 if (CS == ActiveSuspend)
continue;
606 auto *MappedCS = cast<AnyCoroSuspendInst>(VMap[CS]);
607 MappedCS->replaceAllUsesWith(SuspendResult);
608 MappedCS->eraseFromParent();
612void CoroCloner::replaceCoroEnds() {
616 auto *NewCE = cast<AnyCoroEndInst>(VMap[CE]);
625 Value *CachedSlot =
nullptr;
626 auto getSwiftErrorSlot = [&](
Type *ValueTy) ->
Value * {
629 ->isOpaqueOrPointeeTypeMatches(ValueTy) &&
630 "multiple swifterror slots in function with different types");
635 for (
auto &
Arg :
F.args()) {
636 if (
Arg.isSwiftError()) {
639 ->isOpaqueOrPointeeTypeMatches(ValueTy) &&
640 "swifterror argument does not have expected type");
647 auto Alloca =
Builder.CreateAlloca(ValueTy);
648 Alloca->setSwiftError(
true);
655 auto MappedOp = VMap ? cast<CallInst>((*VMap)[Op]) : Op;
660 if (Op->arg_empty()) {
661 auto ValueTy = Op->getType();
662 auto Slot = getSwiftErrorSlot(ValueTy);
663 MappedResult =
Builder.CreateLoad(ValueTy, Slot);
665 assert(Op->arg_size() == 1);
666 auto Value = MappedOp->getArgOperand(0);
668 auto Slot = getSwiftErrorSlot(ValueTy);
673 MappedOp->replaceAllUsesWith(MappedResult);
674 MappedOp->eraseFromParent();
678 if (VMap ==
nullptr) {
683void CoroCloner::replaceSwiftErrorOps() {
687void CoroCloner::salvageDebugInfo() {
690 for (
auto &BB : *NewF)
692 if (
auto *DVI = dyn_cast<DbgVariableIntrinsic>(&
I))
700 auto IsUnreachableBlock = [&](
BasicBlock *BB) {
705 if (IsUnreachableBlock(DVI->getParent()))
706 DVI->eraseFromParent();
707 else if (isa_and_nonnull<AllocaInst>(DVI->getVariableLocationOp(0))) {
710 for (
auto *
User : DVI->getVariableLocationOp(0)->
users())
711 if (
auto *
I = dyn_cast<Instruction>(
User))
712 if (!isa<AllocaInst>(
I) && !IsUnreachableBlock(
I->getParent()))
715 DVI->eraseFromParent();
720void CoroCloner::replaceEntryBlock() {
726 auto *Entry = cast<BasicBlock>(VMap[Shape.AllocaSpillBlock]);
727 auto *OldEntry = &NewF->getEntryBlock();
728 Entry->setName(
"entry" + Suffix);
729 Entry->moveBefore(OldEntry);
730 Entry->getTerminator()->eraseFromParent();
735 assert(Entry->hasOneUse());
736 auto BranchToEntry = cast<BranchInst>(Entry->user_back());
737 assert(BranchToEntry->isUnconditional());
738 Builder.SetInsertPoint(BranchToEntry);
740 BranchToEntry->eraseFromParent();
745 case coro::ABI::Switch: {
749 cast<BasicBlock>(VMap[Shape.SwitchLowering.ResumeEntryBlock]);
753 case coro::ABI::Async:
754 case coro::ABI::Retcon:
755 case coro::ABI::RetconOnce: {
759 assert((Shape.ABI == coro::ABI::Async &&
760 isa<CoroSuspendAsyncInst>(ActiveSuspend)) ||
761 ((Shape.ABI == coro::ABI::Retcon ||
762 Shape.ABI == coro::ABI::RetconOnce) &&
763 isa<CoroSuspendRetconInst>(ActiveSuspend)));
764 auto *MappedCS = cast<AnyCoroSuspendInst>(VMap[ActiveSuspend]);
765 auto Branch = cast<BranchInst>(MappedCS->getNextNode());
777 auto *Alloca = dyn_cast<AllocaInst>(&
I);
778 if (!Alloca ||
I.use_empty())
780 if (DT.isReachableFromEntry(
I.getParent()) ||
781 !isa<ConstantInt>(Alloca->getArraySize()))
783 I.moveBefore(*Entry, Entry->getFirstInsertionPt());
788Value *CoroCloner::deriveNewFramePointer() {
793 case coro::ABI::Switch:
794 return &*NewF->arg_begin();
800 case coro::ABI::Async: {
801 auto *ActiveAsyncSuspend = cast<CoroSuspendAsyncInst>(ActiveSuspend);
802 auto ContextIdx = ActiveAsyncSuspend->getStorageArgumentIndex() & 0xff;
803 auto *CalleeContext = NewF->getArg(ContextIdx);
804 auto *FramePtrTy = Shape.FrameTy->getPointerTo();
805 auto *ProjectionFunc =
806 ActiveAsyncSuspend->getAsyncContextProjectionFunction();
808 cast<CoroSuspendAsyncInst>(VMap[ActiveSuspend])->getDebugLoc();
810 auto *CallerContext =
Builder.CreateCall(ProjectionFunc->getFunctionType(),
811 ProjectionFunc, CalleeContext);
812 CallerContext->setCallingConv(ProjectionFunc->getCallingConv());
813 CallerContext->setDebugLoc(DbgLoc);
816 auto *FramePtrAddr =
Builder.CreateConstInBoundsGEP1_32(
818 Shape.AsyncLowering.FrameOffset,
"async.ctx.frameptr");
822 assert(InlineRes.isSuccess());
824 return Builder.CreateBitCast(FramePtrAddr, FramePtrTy);
827 case coro::ABI::Retcon:
828 case coro::ABI::RetconOnce: {
829 Argument *NewStorage = &*NewF->arg_begin();
830 auto FramePtrTy = Shape.FrameTy->getPointerTo();
833 if (Shape.RetconLowering.IsFrameInlineInStorage)
834 return Builder.CreateBitCast(NewStorage, FramePtrTy);
838 Builder.CreateBitCast(NewStorage, FramePtrTy->getPointerTo());
839 return Builder.CreateLoad(FramePtrTy, FramePtrPtr);
847 Align Alignment,
bool NoAlias) {
857 Attrs = Attrs.addParamAttributes(
Context, ParamIndex, ParamAttrs);
861 unsigned ParamIndex) {
864 Attrs = Attrs.addParamAttributes(
Context, ParamIndex, ParamAttrs);
868 unsigned ParamIndex) {
871 Attrs = Attrs.addParamAttributes(
Context, ParamIndex, ParamAttrs);
876void CoroCloner::create() {
880 OrigF.getParent()->end(), ActiveSuspend);
891 VMap[&
A] = DummyArgs.
back();
898 auto savedVisibility = NewF->getVisibility();
899 auto savedUnnamedAddr = NewF->getUnnamedAddr();
900 auto savedDLLStorageClass = NewF->getDLLStorageClass();
905 auto savedLinkage = NewF->getLinkage();
909 CloneFunctionChangeType::LocalChangesOnly, Returns);
911 auto &
Context = NewF->getContext();
920 assert(SP != OrigF.getSubprogram() && SP->isDistinct());
922 if (
auto DL = ActiveSuspend->getDebugLoc())
923 if (SP->getFile() ==
DL->getFile())
924 SP->setScopeLine(
DL->getLine());
933 if (!SP->getDeclaration() && SP->getUnit() &&
934 SP->getUnit()->getSourceLanguage() == dwarf::DW_LANG_Swift)
935 SP->replaceLinkageName(
MDString::get(Context, NewF->getName()));
938 NewF->setLinkage(savedLinkage);
939 NewF->setVisibility(savedVisibility);
940 NewF->setUnnamedAddr(savedUnnamedAddr);
941 NewF->setDLLStorageClass(savedDLLStorageClass);
945 if (Shape.ABI == coro::ABI::Switch &&
946 NewF->hasMetadata(LLVMContext::MD_func_sanitize))
947 NewF->eraseMetadata(LLVMContext::MD_func_sanitize);
950 auto OrigAttrs = NewF->getAttributes();
954 case coro::ABI::Switch:
957 NewAttrs = NewAttrs.addFnAttributes(
958 Context,
AttrBuilder(Context, OrigAttrs.getFnAttrs()));
961 Shape.FrameAlign,
false);
963 case coro::ABI::Async: {
964 auto *ActiveAsyncSuspend = cast<CoroSuspendAsyncInst>(ActiveSuspend);
965 if (OrigF.hasParamAttribute(Shape.AsyncLowering.ContextArgNo,
966 Attribute::SwiftAsync)) {
968 ActiveAsyncSuspend->getStorageArgumentIndex();
969 auto ContextArgIndex = ArgAttributeIndices & 0xff;
974 auto SwiftSelfIndex = ArgAttributeIndices >> 8;
980 auto FnAttrs = OrigF.getAttributes().getFnAttrs();
981 NewAttrs = NewAttrs.addFnAttributes(Context,
AttrBuilder(Context, FnAttrs));
984 case coro::ABI::Retcon:
985 case coro::ABI::RetconOnce:
988 NewAttrs = Shape.RetconLowering.ResumePrototype->getAttributes();
992 Shape.getRetconCoroId()->getStorageSize(),
993 Shape.getRetconCoroId()->getStorageAlignment(),
1004 case coro::ABI::Switch:
1005 case coro::ABI::RetconOnce:
1014 case coro::ABI::Retcon:
1020 case coro::ABI::Async:
1024 NewF->setAttributes(NewAttrs);
1025 NewF->setCallingConv(Shape.getResumeFunctionCC());
1028 replaceEntryBlock();
1030 Builder.SetInsertPoint(&NewF->getEntryBlock().front());
1031 NewFramePtr = deriveNewFramePointer();
1034 Value *OldFramePtr = VMap[Shape.FramePtr];
1035 NewFramePtr->
takeName(OldFramePtr);
1039 auto *NewVFrame =
Builder.CreateBitCast(
1041 Value *OldVFrame = cast<Value>(VMap[Shape.CoroBegin]);
1042 if (OldVFrame != NewVFrame)
1049 DummyArg->deleteValue();
1052 switch (Shape.ABI) {
1053 case coro::ABI::Switch:
1057 if (Shape.SwitchLowering.HasFinalSuspend)
1058 handleFinalSuspend();
1060 case coro::ABI::Async:
1061 case coro::ABI::Retcon:
1062 case coro::ABI::RetconOnce:
1065 assert(ActiveSuspend !=
nullptr &&
1066 "no active suspend when lowering a continuation-style coroutine");
1067 replaceRetconOrAsyncSuspendUses();
1072 replaceCoroSuspends();
1085 if (Shape.ABI == coro::ABI::Switch)
1087 FKind == CoroCloner::Kind::SwitchCleanup);
1095 CoroCloner Cloner(
F, Suffix, Shape, FKind);
1097 return Cloner.getFunction();
1103 auto *FuncPtrStruct = cast<ConstantStruct>(
1105 auto *OrigRelativeFunOffset = FuncPtrStruct->getOperand(0);
1106 auto *OrigContextSize = FuncPtrStruct->getOperand(1);
1110 FuncPtrStruct->getType(), OrigRelativeFunOffset, NewContextSize);
1116 if (Shape.
ABI == coro::ABI::Async)
1129 auto *SizeIntrin = Shape.
CoroSizes.back();
1130 Module *M = SizeIntrin->getModule();
1162 auto *ArrTy = ArrayType::get(Part->
getType(), Args.size());
1166 GlobalVariable::PrivateLinkage, ConstVal,
1167 F.getName() +
Twine(
".resumers"));
1182 auto *ResumeAddr =
Builder.CreateStructGEP(
1185 Builder.CreateStore(ResumeFn, ResumeAddr);
1187 Value *DestroyOrCleanupFn = DestroyFn;
1193 DestroyOrCleanupFn =
Builder.CreateSelect(CA, DestroyFn, CleanupFn);
1196 auto *DestroyAddr =
Builder.CreateStructGEP(
1199 Builder.CreateStore(DestroyOrCleanupFn, DestroyAddr);
1221 auto V = PN.getIncomingValueForBlock(PrevBB);
1223 auto VI = ResolvedValues.
find(V);
1224 if (
VI != ResolvedValues.
end())
1227 ResolvedValues[&PN] = V;
1243 if (isa<BitCastInst>(
I) ||
I->isDebugOrPseudoInst() ||
1244 I->isLifetimeStartOrEnd())
1245 I =
I->getNextNode();
1249 I = &*
I->eraseFromParent();
1256 auto TryResolveConstant = [&ResolvedValues](
Value *V) {
1257 auto It = ResolvedValues.
find(V);
1258 if (It != ResolvedValues.
end())
1260 return dyn_cast<ConstantInt>(V);
1264 while (
I->isTerminator() || isa<CmpInst>(
I)) {
1265 if (isa<ReturnInst>(
I)) {
1266 if (
I != InitialInst) {
1269 if (UnconditionalSucc)
1275 if (
auto *BR = dyn_cast<BranchInst>(
I)) {
1276 if (BR->isUnconditional()) {
1278 if (
I == InitialInst)
1279 UnconditionalSucc = Succ;
1298 }
else if (
auto *CondCmp = dyn_cast<CmpInst>(
I)) {
1301 auto *BR = dyn_cast<BranchInst>(
1302 GetFirstValidInstruction(CondCmp->getNextNode()));
1303 if (!BR || !BR->isConditional() || CondCmp != BR->getCondition())
1309 ConstantInt *Cond0 = TryResolveConstant(CondCmp->getOperand(0));
1310 auto *Cond1 = dyn_cast<ConstantInt>(CondCmp->getOperand(1));
1311 if (!Cond0 || !Cond1)
1318 CondCmp->getPredicate(), Cond0, Cond1,
DL));
1322 CondCmp->replaceAllUsesWith(ConstResult);
1323 CondCmp->eraseFromParent();
1328 }
else if (
auto *
SI = dyn_cast<SwitchInst>(
I)) {
1351 if (!CalleeTy->getReturnType()->isVoidTy() || (CalleeTy->getNumParams() != 1))
1354 Type *CalleeParmTy = CalleeTy->getParamType(0);
1364 Attribute::StructRet, Attribute::ByVal, Attribute::InAlloca,
1365 Attribute::Preallocated, Attribute::InReg, Attribute::Returned,
1366 Attribute::SwiftSelf, Attribute::SwiftError};
1368 for (
auto AK : ABIAttrs)
1369 if (Attrs.hasParamAttr(0, AK))
1381 bool changed =
false;
1386 if (
auto *Call = dyn_cast<CallInst>(&
I))
1407 auto *CoroId = CoroBegin->
getId();
1409 switch (Shape.
ABI) {
1410 case coro::ABI::Switch: {
1411 auto SwitchId = cast<CoroIdInst>(CoroId);
1417 auto *VFrame =
Builder.CreateBitCast(Frame,
Builder.getInt8PtrTy());
1418 AllocInst->replaceAllUsesWith(
Builder.getFalse());
1419 AllocInst->eraseFromParent();
1420 CoroBegin->replaceAllUsesWith(VFrame);
1422 CoroBegin->replaceAllUsesWith(CoroBegin->getMem());
1427 case coro::ABI::Async:
1428 case coro::ABI::Retcon:
1429 case coro::ABI::RetconOnce:
1434 CoroBegin->eraseFromParent();
1443 if (isa<IntrinsicInst>(
I))
1446 if (isa<CallBase>(
I))
1462 while (!Worklist.
empty()) {
1472 Set.
erase(ResDesBB);
1474 for (
auto *BB : Set)
1483 auto *ResumeOrDestroyBB = ResumeOrDestroy->
getParent();
1485 if (SaveBB == ResumeOrDestroyBB)
1513 Prev = Pred->getTerminator();
1516 CallBase *CB = dyn_cast<CallBase>(Prev);
1523 auto *SubFn = dyn_cast<CoroSubFnInst>(
Callee);
1528 if (SubFn->getFrame() != CoroBegin)
1542 Save->eraseFromParent();
1545 if (
auto *Invoke = dyn_cast<InvokeInst>(CB)) {
1554 if (CalledValue != SubFn && CalledValue->user_empty())
1555 if (
auto *
I = dyn_cast<Instruction>(CalledValue))
1556 I->eraseFromParent();
1559 if (SubFn->user_empty())
1560 SubFn->eraseFromParent();
1568 if (Shape.
ABI != coro::ABI::Switch)
1572 size_t I = 0,
N = S.size();
1576 size_t ChangedFinalIndex = std::numeric_limits<size_t>::max();
1578 auto SI = cast<CoroSuspendInst>(S[
I]);
1587 if (cast<CoroSuspendInst>(S[
I])->isFinal()) {
1589 ChangedFinalIndex =
I;
1601 if (ChangedFinalIndex <
N) {
1602 assert(cast<CoroSuspendInst>(S[ChangedFinalIndex])->isFinal());
1603 std::swap(S[ChangedFinalIndex], S.back());
1614 CoroCloner::Kind::SwitchResume);
1616 CoroCloner::Kind::SwitchUnwind);
1618 CoroCloner::Kind::SwitchCleanup);
1647 Value *Continuation) {
1653 auto *Val =
Builder.CreateBitOrPointerCast(Continuation, Int8PtrTy);
1654 ResumeIntrinsic->replaceAllUsesWith(Val);
1655 ResumeIntrinsic->eraseFromParent();
1665 for (
auto *paramTy : FnTy->params()) {
1667 if (paramTy != FnArgs[ArgIdx]->
getType())
1669 Builder.CreateBitOrPointerCast(FnArgs[ArgIdx], paramTy));
1685 auto *TailCall =
Builder.CreateCall(FnTy, MustTailCallFn, CallArgs);
1687 TailCall->setDebugLoc(Loc);
1698 F.removeFnAttr(Attribute::NoReturn);
1699 F.removeRetAttr(Attribute::NoAlias);
1700 F.removeRetAttr(Attribute::NonNull);
1712 "async.ctx.frameptr");
1723 auto NextF = std::next(
F.getIterator());
1731 auto ResumeNameSuffix =
".resume.";
1732 auto ProjectionFunctionName =
1733 Suspend->getAsyncContextProjectionFunction()->getName();
1734 bool UseSwiftMangling =
false;
1735 if (ProjectionFunctionName.equals(
"__swift_async_resume_project_context")) {
1736 ResumeNameSuffix =
"TQ";
1737 UseSwiftMangling =
true;
1738 }
else if (ProjectionFunctionName.equals(
1739 "__swift_async_resume_get_context")) {
1740 ResumeNameSuffix =
"TY";
1741 UseSwiftMangling =
true;
1745 UseSwiftMangling ? ResumeNameSuffix +
Twine(
Idx) +
"_"
1752 auto *SuspendBB = Suspend->getParent();
1753 auto *NewSuspendBB = SuspendBB->splitBasicBlock(Suspend);
1754 auto *Branch = cast<BranchInst>(SuspendBB->getTerminator());
1759 Branch->setSuccessor(0, ReturnBB);
1764 auto *Fn = Suspend->getMustTailCallFunction();
1773 assert(InlineRes.isSuccess() &&
"Expected inlining to succeed");
1783 auto *Clone = Clones[
Idx];
1785 CoroCloner(
F,
"resume." +
Twine(
Idx), Shape, Clone, Suspend).create();
1791 assert(Shape.
ABI == coro::ABI::Retcon ||
1792 Shape.
ABI == coro::ABI::RetconOnce);
1797 F.removeFnAttr(Attribute::NoReturn);
1798 F.removeRetAttr(Attribute::NoAlias);
1799 F.removeRetAttr(Attribute::NonNull);
1805 RawFramePtr = Id->getStorage();
1821 auto Dest =
Builder.CreateBitCast(Id->getStorage(),
1823 Builder.CreateStore(RawFramePtr, Dest);
1839 auto NextF = std::next(
F.getIterator());
1843 for (
size_t i = 0, e = Shape.
CoroSuspends.size(); i != e; ++i) {
1844 auto Suspend = cast<CoroSuspendRetconInst>(Shape.
CoroSuspends[i]);
1853 auto SuspendBB = Suspend->getParent();
1854 auto NewSuspendBB = SuspendBB->splitBasicBlock(Suspend);
1855 auto Branch = cast<BranchInst>(SuspendBB->getTerminator());
1879 auto RetTy =
F.getReturnType();
1884 auto CastedContinuationTy =
1885 (ReturnPHIs.
size() == 1 ?
RetTy :
RetTy->getStructElementType(0));
1886 auto *CastedContinuation =
1887 Builder.CreateBitCast(ReturnPHIs[0], CastedContinuationTy);
1890 if (ReturnPHIs.
size() == 1) {
1891 RetV = CastedContinuation;
1894 RetV =
Builder.CreateInsertValue(RetV, CastedContinuation, 0);
1895 for (
size_t I = 1,
E = ReturnPHIs.
size();
I !=
E; ++
I)
1896 RetV =
Builder.CreateInsertValue(RetV, ReturnPHIs[
I],
I);
1903 Branch->setSuccessor(0, ReturnBB);
1904 ReturnPHIs[0]->addIncoming(Continuation, SuspendBB);
1905 size_t NextPHIIndex = 1;
1906 for (
auto &VUse : Suspend->value_operands())
1907 ReturnPHIs[NextPHIIndex++]->addIncoming(&*VUse, SuspendBB);
1912 for (
size_t i = 0, e = Shape.
CoroSuspends.size(); i != e; ++i) {
1914 auto Clone = Clones[i];
1916 CoroCloner(
F,
"resume." +
Twine(i), Shape, Clone, Suspend).create();
1924 PrettyStackTraceFunction(
Function &F) :
F(
F) {}
1926 OS <<
"While splitting coroutine ";
1927 F.printAsOperand(
OS,
false,
F.getParent());
1936 std::function<
bool(
Instruction &)> MaterializableCallback) {
1937 PrettyStackTraceFunction prettyStackTrace(
F);
1948 buildCoroutineFrame(
F, Shape, MaterializableCallback);
1956 switch (Shape.
ABI) {
1957 case coro::ABI::Switch:
1960 case coro::ABI::Async:
1963 case coro::ABI::Retcon:
1964 case coro::ABI::RetconOnce:
1979 for (
auto &BB :
F) {
1980 for (
auto &
I : BB) {
1981 if (
auto *DDI = dyn_cast<DbgDeclareInst>(&
I)) {
1987 for (
auto *DDI : Worklist)
2008 if (Shape.
ABI != coro::ABI::Switch)
2012 auto &
Context = End->getContext();
2014 End->eraseFromParent();
2018 if (!Clones.
empty()) {
2019 switch (Shape.
ABI) {
2020 case coro::ABI::Switch:
2026 case coro::ABI::Async:
2027 case coro::ABI::Retcon:
2028 case coro::ABI::RetconOnce:
2031 if (!Clones.empty())
2060 auto *Cast = dyn_cast<BitCastInst>(U.getUser());
2061 if (!Cast || Cast->getType() != Fn->getType())
2065 Cast->replaceAllUsesWith(Fn);
2066 Cast->eraseFromParent();
2075 while (
auto *Cast = dyn_cast<BitCastInst>(CastFn)) {
2076 if (!Cast->use_empty())
2078 CastFn = Cast->getOperand(0);
2079 Cast->eraseFromParent();
2085 bool Changed =
false;
2088 auto *Prepare = cast<CallInst>(
P.getUser());
2099 auto *PrepareFn = M.getFunction(
Name);
2100 if (PrepareFn && !PrepareFn->use_empty())
2105 : MaterializableCallback(coro::defaultMaterializable),
2106 OptimizeFrame(OptimizeFrame) {}
2114 Module &M = *
C.begin()->getFunction().getParent();
2126 if (
N.getFunction().isPresplitCoroutine())
2129 if (Coroutines.
empty() && PrepareFns.
empty())
2132 if (Coroutines.
empty()) {
2133 for (
auto *PrepareFn : PrepareFns) {
2141 LLVM_DEBUG(
dbgs() <<
"CoroSplit: Processing coroutine '" <<
F.getName()
2143 F.setSplittedCoroutine();
2154 <<
"Split '" <<
ore::NV(
"function",
F.getName())
2155 <<
"' (frame_size=" <<
ore::NV(
"frame_size", Shape.FrameSize)
2156 <<
", align=" <<
ore::NV(
"align", Shape.FrameAlign.value()) <<
")";
2159 if (!Shape.CoroSuspends.empty()) {
2167 if (!PrepareFns.
empty()) {
2168 for (
auto *PrepareFn : PrepareFns) {
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
amdgpu Simplify well known AMD library false FunctionCallee Callee
amdgpu Simplify well known AMD library false FunctionCallee Value * Arg
AMDGPU Lower Kernel Arguments
SmallPtrSet< MachineInstr *, 2 > Uses
This file contains the simple types necessary to represent the attributes associated with functions a...
SmallVector< MachineOperand, 4 > Cond
BlockVerifier::State From
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
This file provides interfaces used to manipulate a call graph, regardless if it is a "old style" Call...
This file provides interfaces used to build and manipulate a call graph, which is a very useful tool ...
This file contains the declarations for the subclasses of Constant, which represent the different fla...
Remove calls to llvm coro end in the original static function void removeCoroEnds(const coro::Shape &Shape)
static void addSwiftSelfAttrs(AttributeList &Attrs, LLVMContext &Context, unsigned ParamIndex)
static void splitSwitchCoroutine(Function &F, coro::Shape &Shape, SmallVectorImpl< Function * > &Clones, TargetTransformInfo &TTI)
static bool hasCallsBetween(Instruction *Save, Instruction *ResumeOrDestroy)
static void replaceSwiftErrorOps(Function &F, coro::Shape &Shape, ValueToValueMapTy *VMap)
static void addAsyncContextAttrs(AttributeList &Attrs, LLVMContext &Context, unsigned ParamIndex)
static void addMustTailToCoroResumes(Function &F, TargetTransformInfo &TTI)
static void maybeFreeRetconStorage(IRBuilder<> &Builder, const coro::Shape &Shape, Value *FramePtr, CallGraph *CG)
static bool hasCallsInBlocksBetween(BasicBlock *SaveBB, BasicBlock *ResDesBB)
static Function * createCloneDeclaration(Function &OrigF, coro::Shape &Shape, const Twine &Suffix, Module::iterator InsertBefore, AnyCoroSuspendInst *ActiveSuspend)
static FunctionType * getFunctionTypeFromAsyncSuspend(AnyCoroSuspendInst *Suspend)
static void addPrepareFunction(const Module &M, SmallVectorImpl< Function * > &Fns, StringRef Name)
static void updateCallGraphAfterCoroutineSplit(LazyCallGraph::Node &N, const coro::Shape &Shape, const SmallVectorImpl< Function * > &Clones, LazyCallGraph::SCC &C, LazyCallGraph &CG, CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR, FunctionAnalysisManager &FAM)
static void simplifySuspendPoints(coro::Shape &Shape)
static void addFramePointerAttrs(AttributeList &Attrs, LLVMContext &Context, unsigned ParamIndex, uint64_t Size, Align Alignment, bool NoAlias)
static bool replaceAllPrepares(Function *PrepareFn, LazyCallGraph &CG, LazyCallGraph::SCC &C)
static void replaceFallthroughCoroEnd(AnyCoroEndInst *End, const coro::Shape &Shape, Value *FramePtr, bool InResume, CallGraph *CG)
Replace a non-unwind call to llvm.coro.end.
static void replaceFrameSizeAndAlignment(coro::Shape &Shape)
static bool replaceCoroEndAsync(AnyCoroEndInst *End)
Replace an llvm.coro.end.async.
Replace a call to llvm coro prepare static retcon void replacePrepare(CallInst *Prepare, LazyCallGraph &CG, LazyCallGraph::SCC &C)
static void replaceUnwindCoroEnd(AnyCoroEndInst *End, const coro::Shape &Shape, Value *FramePtr, bool InResume, CallGraph *CG)
Replace an unwind call to llvm.coro.end.
static bool simplifySuspendPoint(CoroSuspendInst *Suspend, CoroBeginInst *CoroBegin)
static bool hasCallsInBlockBetween(Instruction *From, Instruction *To)
static void markCoroutineAsDone(IRBuilder<> &Builder, const coro::Shape &Shape, Value *FramePtr)
static void splitAsyncCoroutine(Function &F, coro::Shape &Shape, SmallVectorImpl< Function * > &Clones)
static void updateAsyncFuncPointerContextSize(coro::Shape &Shape)
static void replaceCoroEnd(AnyCoroEndInst *End, const coro::Shape &Shape, Value *FramePtr, bool InResume, CallGraph *CG)
static void setCoroInfo(Function &F, coro::Shape &Shape, ArrayRef< Function * > Fns)
static void handleNoSuspendCoroutine(coro::Shape &Shape)
static void updateCoroFrame(coro::Shape &Shape, Function *ResumeFn, Function *DestroyFn, Function *CleanupFn)
static void createResumeEntryBlock(Function &F, coro::Shape &Shape)
static coro::Shape splitCoroutine(Function &F, SmallVectorImpl< Function * > &Clones, TargetTransformInfo &TTI, bool OptimizeFrame, std::function< bool(Instruction &)> MaterializableCallback)
static void postSplitCleanup(Function &F)
static bool simplifyTerminatorLeadingToRet(Instruction *InitialInst)
static void splitRetconCoroutine(Function &F, coro::Shape &Shape, SmallVectorImpl< Function * > &Clones)
static void scanPHIsAndUpdateValueMap(Instruction *Prev, BasicBlock *NewBlock, DenseMap< Value *, Value * > &ResolvedValues)
Coerce the arguments in p FnArgs according to p FnTy in p static CallArgs void coerceArguments(IRBuilder<> &Builder, FunctionType *FnTy, ArrayRef< Value * > FnArgs, SmallVectorImpl< Value * > &CallArgs)
static void replaceAsyncResumeFunction(CoroSuspendAsyncInst *Suspend, Value *Continuation)
static bool shouldBeMustTail(const CallInst &CI, const Function &F)
static Function * createClone(Function &F, const Twine &Suffix, coro::Shape &Shape, CoroCloner::Kind FKind)
Returns the sub type a function will return at a given Idx Should correspond to the result type of an ExtractValue instruction executed with just that one unsigned Idx
This file defines the DenseMap class.
This file contains constants used for implementing Dwarf debug support.
static Function * getFunction(Constant *C)
This file provides various utilities for inspecting and working with the control flow graph in LLVM I...
Implements a lazy call graph analysis and related passes for the new pass manager.
Module.h This file contains the declarations for the Module class.
print must be executed print the must be executed context for all instructions
FunctionAnalysisManager FAM
This file provides a priority worklist.
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
static SymbolRef::Type getType(const Symbol *Sym)
static const unsigned FramePtr
A container for analyses that lazily runs them and caches their results.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
CoroAllocInst * getCoroAlloc()
This class represents an incoming formal argument to a Function.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
ArrayRef< T > drop_front(size_t N=1) const
Drop the first N elements of the array.
size_t size() const
size - Get the array size.
AttrBuilder & addAlignmentAttr(MaybeAlign Align)
This turns an alignment into the form used internally in Attribute.
AttrBuilder & addAttribute(Attribute::AttrKind Val)
Add an attribute to the builder.
AttrBuilder & addDereferenceableAttr(uint64_t Bytes)
This turns the number of dereferenceable bytes into the form used internally in Attribute.
AttrKind
This enumeration lists the attributes that can be associated with parameters, function results,...
LLVM Basic Block Representation.
iterator_range< const_phi_iterator > phis() const
Returns a range that iterates over the phis in the basic block.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
BasicBlock * splitBasicBlock(iterator I, const Twine &BBName="", bool Before=false)
Split the basic block into two basic blocks at the specified instruction.
const BasicBlock * getSinglePredecessor() const
Return the predecessor of this block if it has a single predecessor block.
const Instruction * getFirstNonPHIOrDbgOrLifetime(bool SkipPseudoOp=true) const
Returns a pointer to the first instruction in this block that is not a PHINode, a debug intrinsic,...
const Function * getParent() const
Return the enclosing method, or null if none.
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
void removePredecessor(BasicBlock *Pred, bool KeepOneInputPHIs=false)
Update PHI nodes in this BasicBlock before removal of predecessor Pred.
static BranchInst * Create(BasicBlock *IfTrue, Instruction *InsertBefore=nullptr)
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
bool isInlineAsm() const
Check if this call is an inline asm statement.
CallingConv::ID getCallingConv() const
Value * getCalledOperand() const
Value * getArgOperand(unsigned i) const
FunctionType * getFunctionType() const
AttributeList getAttributes() const
Return the parameter attributes for this call.
The basic data container for the call graph of a Module of IR.
This class represents a function call, abstracting a target machine's calling convention.
static Constant * get(ArrayType *T, ArrayRef< Constant * > V)
static Constant * getPointerCast(Constant *C, Type *Ty)
Create a BitCast, AddrSpaceCast, or a PtrToInt cast constant expression.
This is the shared class of boolean and integer constants.
static ConstantInt * getTrue(LLVMContext &Context)
static Constant * get(Type *Ty, uint64_t V, bool IsSigned=false)
If Ty is a vector type, return a Constant with a splat of the given value.
static ConstantInt * getFalse(LLVMContext &Context)
static ConstantPointerNull * get(PointerType *T)
Static factory methods - Return objects of the specified value.
static Constant * get(StructType *T, ArrayRef< Constant * > V)
static ConstantTokenNone * get(LLVMContext &Context)
Return the ConstantTokenNone.
This represents the llvm.coro.align instruction.
This represents the llvm.coro.alloc instruction.
This class represents the llvm.coro.begin instruction.
AnyCoroIdInst * getId() const
This represents the llvm.coro.id instruction.
void setInfo(Constant *C)
This represents the llvm.coro.size instruction.
This represents the llvm.coro.suspend.async instruction.
CoroAsyncResumeInst * getResumeFunction() const
This represents the llvm.coro.suspend instruction.
CoroSaveInst * getCoroSave() const
DISubprogram * getSubprogram() const
Get the subprogram for this scope.
A parsed version of the target data layout string in and methods for querying it.
This is the common base class for debug info intrinsics for variables.
iterator find(const_arg_type_t< KeyT > Val)
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
This class represents a freeze function that returns random concrete value if an operand is either a ...
A proxy from a FunctionAnalysisManager to an SCC.
Type * getReturnType() const
static Function * Create(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
FunctionType * getFunctionType() const
Returns the FunctionType for me.
CallingConv::ID getCallingConv() const
getCallingConv()/setCallingConv(CC) - These method get and set the calling convention of this functio...
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
Module * getParent()
Get the module that this global value is contained inside of...
PointerType * getType() const
Global values are always pointers.
@ ExternalLinkage
Externally visible function.
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
void setInitializer(Constant *InitVal)
setInitializer - Sets the initializer for this global variable, removing any existing initializer if ...
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
This class captures the data input to the InlineFunction call, and records the auxiliary results prod...
const Module * getModule() const
Return the module owning the function this instruction belongs to or nullptr it the function does not...
const BasicBlock * getParent() const
SymbolTableList< Instruction >::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
This is an important class for using LLVM in a threaded context.
A node in the call graph.
An SCC of the call graph.
A lazily constructed view of the call graph of a module.
void addSplitFunction(Function &OriginalFunction, Function &NewFunction)
Add a new function split/outlined from an existing function.
void addSplitRefRecursiveFunctions(Function &OriginalFunction, ArrayRef< Function * > NewFunctions)
Add new ref-recursive functions split/outlined from an existing function.
Node & get(Function &F)
Get a graph node for a given function, scanning it to populate the graph data as necessary.
SCC * lookupSCC(Node &N) const
Lookup a function's SCC in the graph.
static MDString * get(LLVMContext &Context, StringRef Str)
A Module instance is used to store all the information related to an LLVM module.
const DataLayout & getDataLayout() const
Get the data layout for the module's target platform.
FunctionListType::iterator iterator
The Function iterators.
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", Instruction *InsertBefore=nullptr)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses none()
Convenience factory function for the empty preserved set.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
PrettyStackTraceEntry - This class is used to represent a frame of the "pretty" stack trace that is d...
virtual void print(raw_ostream &OS) const =0
print - Emit information about this stack frame to OS.
Return a value (possibly void), from a function.
bool erase(PtrType Ptr)
erase - If the set contains the specified pointer, remove it and return true, otherwise return false.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
bool contains(ConstPtrType Ptr) const
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
void reserve(size_type N)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
StringRef - Represent a constant reference to a string, i.e.
Type * getTypeAtIndex(const Value *V) const
Given an index value into the type, return the type of the element.
Analysis pass providing the TargetTransformInfo.
Value handle that tracks a Value across RAUW.
ValueTy * getValPtr() const
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
PointerType * getPointerTo(unsigned AddrSpace=0) const
Return a pointer to the current type.
bool isPointerTy() const
True if this is an instance of PointerType.
unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
static Type * getVoidTy(LLVMContext &C)
static IntegerType * getInt8Ty(LLVMContext &C)
static PointerType * getInt8PtrTy(LLVMContext &C, unsigned AS=0)
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
A Use represents the edge between a Value definition and its users.
void setOperand(unsigned i, Value *Val)
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
const Value * stripPointerCasts() const
Strip off pointer casts, all-zero GEPs and address space casts.
iterator_range< use_iterator > uses()
StringRef getName() const
Return a constant reference to the value's name.
void takeName(Value *V)
Transfer the name from V to this value.
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
@ C
The default llvm calling convention, compatible with C.
@ Switch
The "resume-switch" lowering, where there are separate resume and destroy functions that are shared b...
void salvageDebugInfo(SmallDenseMap< llvm::Value *, llvm::AllocaInst *, 4 > &DbgPtrAllocaCache, DbgVariableIntrinsic *DVI, bool OptimizeFrame)
Recover a dbg.declare prepared by the frontend and emit an alloca holding a pointer to the coroutine ...
void replaceCoroFree(CoroIdInst *CoroId, bool Elide)
CallInst * createMustTailCall(DebugLoc Loc, Function *MustTailCallFn, ArrayRef< Value * > Arguments, IRBuilder<> &)
DiagnosticInfoOptimizationBase::Argument NV
This is an optimization pass for GlobalISel generic memory operations.
void ReplaceInstWithInst(BasicBlock *BB, BasicBlock::iterator &BI, Instruction *I)
Replace the instruction specified by BI with the instruction specified by I.
bool ConstantFoldTerminator(BasicBlock *BB, bool DeleteDeadConditions=false, const TargetLibraryInfo *TLI=nullptr, DomTreeUpdater *DTU=nullptr)
If a terminator instruction is predicated on a constant value, convert it into an unconditional branc...
bool verifyFunction(const Function &F, raw_ostream *OS=nullptr)
Check a function for errors, useful for use when debugging a pass.
void salvageDebugInfo(const MachineRegisterInfo &MRI, MachineInstr &MI)
Assuming the instruction MI is going to be deleted, attempt to salvage debug users of MI by writing t...
LazyCallGraph::SCC & updateCGAndAnalysisManagerForFunctionPass(LazyCallGraph &G, LazyCallGraph::SCC &C, LazyCallGraph::Node &N, CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR, FunctionAnalysisManager &FAM)
Helper to update the call graph after running a function pass.
LazyCallGraph::SCC & updateCGAndAnalysisManagerForCGSCCPass(LazyCallGraph &G, LazyCallGraph::SCC &C, LazyCallGraph::Node &N, CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR, FunctionAnalysisManager &FAM)
Helper to update the call graph after running a CGSCC pass.
Constant * ConstantFoldCompareInstOperands(unsigned Predicate, Constant *LHS, Constant *RHS, const DataLayout &DL, const TargetLibraryInfo *TLI=nullptr, const Instruction *I=nullptr)
Attempt to constant fold a compare instruction (icmp/fcmp) with the specified operands.
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
bool isInstructionTriviallyDead(Instruction *I, const TargetLibraryInfo *TLI=nullptr)
Return true if the result produced by the instruction is not used, and the instruction will return.
@ Async
"Asynchronous" unwind tables (instr precise)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
unsigned changeToUnreachable(Instruction *I, bool PreserveLCSSA=false, DomTreeUpdater *DTU=nullptr, MemorySSAUpdater *MSSAU=nullptr)
Insert an unreachable instruction before the specified instruction, making it and the rest of the cod...
raw_fd_ostream & errs()
This returns a reference to a raw_ostream for standard error.
InlineResult InlineFunction(CallBase &CB, InlineFunctionInfo &IFI, bool MergeAttributes=false, AAResults *CalleeAAR=nullptr, bool InsertLifetime=true, Function *ForwardVarArgsTo=nullptr)
This function inlines the called function into the basic block of the caller.
void CloneFunctionInto(Function *NewFunc, const Function *OldFunc, ValueToValueMapTy &VMap, CloneFunctionChangeType Changes, SmallVectorImpl< ReturnInst * > &Returns, const char *NameSuffix="", ClonedCodeInfo *CodeInfo=nullptr, ValueMapTypeRemapper *TypeMapper=nullptr, ValueMaterializer *Materializer=nullptr)
Clone OldFunc into NewFunc, transforming the old arguments into references to VMap values.
auto predecessors(const MachineBasicBlock *BB)
bool removeUnreachableBlocks(Function &F, DomTreeUpdater *DTU=nullptr, MemorySSAUpdater *MSSAU=nullptr)
Remove all blocks that can not be reached from the function's entry.
bool isPotentiallyReachable(const Instruction *From, const Instruction *To, const SmallPtrSetImpl< BasicBlock * > *ExclusionSet=nullptr, const DominatorTree *DT=nullptr, const LoopInfo *LI=nullptr)
Determine whether instruction 'To' is reachable from 'From', without passing through any blocks in Ex...
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
This struct is a compact representation of a valid (non-zero power of two) alignment.
uint64_t value() const
This is a hole in the type system and should not be abused.
Support structure for SCC passes to communicate updates the call graph back to the CGSCC pass manager...
SmallPriorityWorklist< LazyCallGraph::SCC *, 1 > & CWorklist
Worklist of the SCCs queued for processing.
const std::function< bool(Instruction &)> MaterializableCallback
PreservedAnalyses run(LazyCallGraph::SCC &C, CGSCCAnalysisManager &AM, LazyCallGraph &CG, CGSCCUpdateResult &UR)
CoroSplitPass(bool OptimizeFrame=false)
GlobalVariable * AsyncFuncPointer
bool IsFrameInlineInStorage
SwitchInst * ResumeSwitch
BasicBlock * ResumeEntryBlock
AsyncLoweringStorage AsyncLowering
FunctionType * getResumeFunctionType() const
IntegerType * getIndexType() const
CoroIdInst * getSwitchCoroId() const
Instruction * getInsertPtAfterFramePtr() const
SmallVector< CoroSizeInst *, 2 > CoroSizes
SmallVector< AnyCoroSuspendInst *, 4 > CoroSuspends
Value * emitAlloc(IRBuilder<> &Builder, Value *Size, CallGraph *CG) const
Allocate memory according to the rules of the active lowering.
SmallVector< CallInst *, 2 > SwiftErrorOps
ConstantInt * getIndex(uint64_t Value) const
bool OptimizeFrame
This would only be true if optimization are enabled.
SwitchLoweringStorage SwitchLowering
CoroBeginInst * CoroBegin
ArrayRef< Type * > getRetconResultTypes() const
void emitDealloc(IRBuilder<> &Builder, Value *Ptr, CallGraph *CG) const
Deallocate memory according to the rules of the active lowering.
RetconLoweringStorage RetconLowering
SmallVector< CoroAlignInst *, 2 > CoroAligns
SmallVector< AnyCoroEndInst *, 4 > CoroEnds
unsigned getSwitchIndexField() const