74#define DEBUG_TYPE "objc-arc-opts"
78 cl::desc(
"Maximum number of ptr states the optimizer keeps track of"),
89 if (isa<ConstantData>(
Arg))
92 if (
Arg->hasOneUse()) {
96 if (
GEP->hasAllZeroIndices())
100 cast<CallInst>(
Arg)->getArgOperand(0));
109 for (
const User *U :
Arg->users())
162STATISTIC(NumNoops,
"Number of no-op objc calls eliminated");
163STATISTIC(NumPartialNoops,
"Number of partially no-op objc calls eliminated");
164STATISTIC(NumAutoreleases,
"Number of autoreleases converted to releases");
166 "retain+autoreleases eliminated");
167STATISTIC(NumRRs,
"Number of retain+release paths eliminated");
168STATISTIC(NumPeeps,
"Number of calls peephole-optimized");
171 "Number of retains before optimization");
173 "Number of releases before optimization");
175 "Number of retains after optimization");
177 "Number of releases after optimization");
186 unsigned TopDownPathCount = 0;
189 unsigned BottomUpPathCount = 0;
212 using top_down_ptr_iterator =
decltype(PerPtrTopDown)::iterator;
213 using const_top_down_ptr_iterator =
decltype(PerPtrTopDown)
::const_iterator;
215 top_down_ptr_iterator top_down_ptr_begin() {
return PerPtrTopDown.
begin(); }
216 top_down_ptr_iterator top_down_ptr_end() {
return PerPtrTopDown.
end(); }
217 const_top_down_ptr_iterator top_down_ptr_begin()
const {
218 return PerPtrTopDown.
begin();
220 const_top_down_ptr_iterator top_down_ptr_end()
const {
221 return PerPtrTopDown.
end();
223 bool hasTopDownPtrs()
const {
224 return !PerPtrTopDown.
empty();
227 unsigned top_down_ptr_list_size()
const {
228 return std::distance(top_down_ptr_begin(), top_down_ptr_end());
231 using bottom_up_ptr_iterator =
decltype(PerPtrBottomUp)::iterator;
232 using const_bottom_up_ptr_iterator =
235 bottom_up_ptr_iterator bottom_up_ptr_begin() {
236 return PerPtrBottomUp.
begin();
238 bottom_up_ptr_iterator bottom_up_ptr_end() {
return PerPtrBottomUp.
end(); }
239 const_bottom_up_ptr_iterator bottom_up_ptr_begin()
const {
240 return PerPtrBottomUp.
begin();
242 const_bottom_up_ptr_iterator bottom_up_ptr_end()
const {
243 return PerPtrBottomUp.
end();
245 bool hasBottomUpPtrs()
const {
246 return !PerPtrBottomUp.
empty();
249 unsigned bottom_up_ptr_list_size()
const {
250 return std::distance(bottom_up_ptr_begin(), bottom_up_ptr_end());
255 void SetAsEntry() { TopDownPathCount = 1; }
259 void SetAsExit() { BottomUpPathCount = 1; }
265 return PerPtrTopDown[
Arg];
272 return PerPtrBottomUp[
Arg];
277 bottom_up_ptr_iterator findPtrBottomUpState(
const Value *
Arg) {
278 return PerPtrBottomUp.
find(
Arg);
281 void clearBottomUpPointers() {
282 PerPtrBottomUp.
clear();
285 void clearTopDownPointers() {
286 PerPtrTopDown.
clear();
289 void InitFromPred(
const BBState &
Other);
290 void InitFromSucc(
const BBState &
Other);
291 void MergePred(
const BBState &
Other);
292 void MergeSucc(
const BBState &
Other);
300 bool GetAllPathCountWithOverflow(
unsigned &PathCount)
const {
304 unsigned long long Product =
305 (
unsigned long long)TopDownPathCount*BottomUpPathCount;
308 return (Product >> 32) ||
316 edge_iterator
pred_end()
const {
return Preds.
end(); }
318 edge_iterator
succ_end()
const {
return Succs.
end(); }
323 bool isExit()
const {
return Succs.
empty(); }
337void BBState::InitFromPred(
const BBState &
Other) {
338 PerPtrTopDown =
Other.PerPtrTopDown;
339 TopDownPathCount =
Other.TopDownPathCount;
342void BBState::InitFromSucc(
const BBState &
Other) {
343 PerPtrBottomUp =
Other.PerPtrBottomUp;
344 BottomUpPathCount =
Other.BottomUpPathCount;
349void BBState::MergePred(
const BBState &
Other) {
350 if (TopDownPathCount == OverflowOccurredValue)
355 TopDownPathCount +=
Other.TopDownPathCount;
360 if (TopDownPathCount == OverflowOccurredValue) {
361 clearTopDownPointers();
367 if (TopDownPathCount <
Other.TopDownPathCount) {
368 TopDownPathCount = OverflowOccurredValue;
369 clearTopDownPointers();
376 for (
auto MI =
Other.top_down_ptr_begin(), ME =
Other.top_down_ptr_end();
378 auto Pair = PerPtrTopDown.
insert(*
MI);
385 for (
auto MI = top_down_ptr_begin(), ME = top_down_ptr_end();
MI != ME; ++
MI)
386 if (
Other.PerPtrTopDown.find(
MI->first) ==
Other.PerPtrTopDown.end())
392void BBState::MergeSucc(
const BBState &
Other) {
393 if (BottomUpPathCount == OverflowOccurredValue)
398 BottomUpPathCount +=
Other.BottomUpPathCount;
403 if (BottomUpPathCount == OverflowOccurredValue) {
404 clearBottomUpPointers();
410 if (BottomUpPathCount <
Other.BottomUpPathCount) {
411 BottomUpPathCount = OverflowOccurredValue;
412 clearBottomUpPointers();
419 for (
auto MI =
Other.bottom_up_ptr_begin(), ME =
Other.bottom_up_ptr_end();
421 auto Pair = PerPtrBottomUp.
insert(*
MI);
428 for (
auto MI = bottom_up_ptr_begin(), ME = bottom_up_ptr_end();
MI != ME;
430 if (
Other.PerPtrBottomUp.find(
MI->first) ==
Other.PerPtrBottomUp.end())
436 OS <<
" TopDown State:\n";
437 if (!BBInfo.hasTopDownPtrs()) {
440 for (
auto I = BBInfo.top_down_ptr_begin(),
E = BBInfo.top_down_ptr_end();
443 OS <<
" Ptr: " << *
I->first
444 <<
"\n KnownSafe: " << (
P.IsKnownSafe()?
"true":
"false")
445 <<
"\n ImpreciseRelease: "
446 << (
P.IsTrackingImpreciseReleases()?
"true":
"false") <<
"\n"
447 <<
" HasCFGHazards: "
448 << (
P.IsCFGHazardAfflicted()?
"true":
"false") <<
"\n"
449 <<
" KnownPositive: "
450 << (
P.HasKnownPositiveRefCount()?
"true":
"false") <<
"\n"
452 <<
P.GetSeq() <<
"\n";
456 OS <<
" BottomUp State:\n";
457 if (!BBInfo.hasBottomUpPtrs()) {
460 for (
auto I = BBInfo.bottom_up_ptr_begin(),
E = BBInfo.bottom_up_ptr_end();
463 OS <<
" Ptr: " << *
I->first
464 <<
"\n KnownSafe: " << (
P.IsKnownSafe()?
"true":
"false")
465 <<
"\n ImpreciseRelease: "
466 << (
P.IsTrackingImpreciseReleases()?
"true":
"false") <<
"\n"
467 <<
" HasCFGHazards: "
468 << (
P.IsCFGHazardAfflicted()?
"true":
"false") <<
"\n"
469 <<
" KnownPositive: "
470 << (
P.HasKnownPositiveRefCount()?
"true":
"false") <<
"\n"
472 <<
P.GetSeq() <<
"\n";
483 bool Changed =
false;
484 bool CFGChanged =
false;
498 bool DisableRetainReleasePairing =
false;
502 unsigned UsedInThisFunction;
509 void OptimizeIndividualCalls(
Function &
F);
522 const Value *&AutoreleaseRVArg);
526 BBState &MyStates)
const;
533 bool VisitInstructionTopDown(
536 &ReleaseInsertPtToRCIdentityRoots);
541 &ReleaseInsertPtToRCIdentityRoots);
558 bool &AnyPairsCompletelyEliminated);
570 template <
typename PredicateT>
571 static void cloneOpBundlesIf(
CallBase *CI,
583 if (!BlockEHColors.
empty()) {
587 if (
auto *EHPad = dyn_cast<FuncletPadInst>(EHPadBB->getFirstNonPHI())) {
595 void GatherStatistics(
Function &
F,
bool AfterOptimization =
false);
601 bool hasCFGChanged()
const {
return CFGChanged; }
621 if (II->getNormalDest() == RetainRVParent) {
632 "a bundled retainRV's argument should be a call");
638 LLVM_DEBUG(
dbgs() <<
"Transforming objc_retainAutoreleasedReturnValue => "
639 "objc_retain since the operand is not a return value.\n"
643 Function *NewDecl = EP.
get(ARCRuntimeEntryPointKind::Retain);
644 cast<CallInst>(
RetainRV)->setCalledFunction(NewDecl);
651bool ObjCARCOpt::OptimizeInlinedAutoreleaseRVCall(
663 if (
Arg != AutoreleaseRVArg) {
677 LLVM_DEBUG(
dbgs() <<
"Found inlined objc_autoreleaseReturnValue '"
685 if (Class == ARCInstKind::RetainRV) {
694 assert(Class == ARCInstKind::UnsafeClaimRV);
695 Value *
CallArg = cast<CallInst>(Inst)->getArgOperand(0);
697 EP.
get(ARCRuntimeEntryPointKind::Release), CallArg,
"", Inst);
699 "Expected UnsafeClaimRV to be safe to tail call");
705 OptimizeIndividualCallImpl(
F,
Release, ARCInstKind::Release,
Arg);
711void ObjCARCOpt::OptimizeAutoreleaseRVCall(
Function &
F,
719 if (isa<ConstantData>(
Ptr))
726 if (
const PHINode *PN = dyn_cast<PHINode>(
Ptr))
731 for (
const User *U :
Ptr->users()) {
734 if (isa<BitCastInst>(U))
737 }
while (!
Users.empty());
743 dbgs() <<
"Transforming objc_autoreleaseReturnValue => "
744 "objc_autorelease since its operand is not used as a return "
750 Function *NewDecl = EP.
get(ARCRuntimeEntryPointKind::Autorelease);
753 Class = ARCInstKind::Autorelease;
760void ObjCARCOpt::OptimizeIndividualCalls(
Function &
F) {
761 LLVM_DEBUG(
dbgs() <<
"\n== ObjCARCOpt::OptimizeIndividualCalls ==\n");
763 UsedInThisFunction = 0;
768 const Value *DelayedAutoreleaseRVArg =
nullptr;
772 DelayedAutoreleaseRVArg =
nullptr;
774 auto optimizeDelayedAutoreleaseRV = [&]() {
775 if (!DelayedAutoreleaseRV)
777 OptimizeIndividualCallImpl(
F, DelayedAutoreleaseRV,
778 ARCInstKind::AutoreleaseRV,
779 DelayedAutoreleaseRVArg);
780 setDelayedAutoreleaseRV(
nullptr);
782 auto shouldDelayAutoreleaseRV = [&](
Instruction *NonARCInst) {
784 if (!DelayedAutoreleaseRV)
789 if (NonARCInst->isTerminator())
799 auto *CB = dyn_cast<CallBase>(NonARCInst);
809 if (
auto *CI = dyn_cast<CallInst>(Inst))
821 optimizeDelayedAutoreleaseRV();
823 case ARCInstKind::CallOrUser:
824 case ARCInstKind::User:
825 case ARCInstKind::None:
829 if (!shouldDelayAutoreleaseRV(Inst))
830 optimizeDelayedAutoreleaseRV();
832 case ARCInstKind::AutoreleaseRV:
833 optimizeDelayedAutoreleaseRV();
834 setDelayedAutoreleaseRV(Inst);
836 case ARCInstKind::RetainRV:
837 case ARCInstKind::UnsafeClaimRV:
838 if (DelayedAutoreleaseRV) {
840 if (OptimizeInlinedAutoreleaseRVCall(
F, Inst,
Arg, Class,
841 DelayedAutoreleaseRV,
842 DelayedAutoreleaseRVArg)) {
843 setDelayedAutoreleaseRV(
nullptr);
846 optimizeDelayedAutoreleaseRV();
851 OptimizeIndividualCallImpl(
F, Inst, Class,
Arg);
855 optimizeDelayedAutoreleaseRV();
861 V = V->stripPointerCasts();
867 if (
auto *GV = dyn_cast<GlobalVariable>(V))
868 if (GV->hasAttribute(
"objc_arc_inert"))
871 if (
auto PN = dyn_cast<PHINode>(V)) {
873 if (!VisitedPhis.
insert(PN).second)
888 LLVM_DEBUG(
dbgs() <<
"Visiting: Class: " << Class <<
"; " << *Inst <<
"\n");
894 UsedInThisFunction |= 1 <<
unsigned(Class);
919 case ARCInstKind::NoopCast:
927 case ARCInstKind::StoreWeak:
928 case ARCInstKind::LoadWeak:
929 case ARCInstKind::LoadWeakRetained:
930 case ARCInstKind::InitWeak:
931 case ARCInstKind::DestroyWeak: {
932 CallInst *CI = cast<CallInst>(Inst);
939 dbgs() <<
"A null pointer-to-weak-pointer is undefined behavior."
941 << *CI <<
"\nNew = " << *NewValue <<
"\n");
948 case ARCInstKind::CopyWeak:
949 case ARCInstKind::MoveWeak: {
950 CallInst *CI = cast<CallInst>(Inst);
959 dbgs() <<
"A null pointer-to-weak-pointer is undefined behavior."
961 << *CI <<
"\nNew = " << *NewValue <<
"\n");
969 case ARCInstKind::RetainRV:
970 if (OptimizeRetainRVCall(
F, Inst))
973 case ARCInstKind::AutoreleaseRV:
974 OptimizeAutoreleaseRVCall(
F, Inst, Class);
990 Function *Decl = EP.
get(ARCRuntimeEntryPointKind::Release);
993 NewCall->
setMetadata(MDKindCache.
get(ARCMDKindID::ImpreciseRelease),
996 LLVM_DEBUG(
dbgs() <<
"Replacing autorelease{,RV}(x) with objc_release(x) "
997 "since x is otherwise unused.\nOld: "
998 << *
Call <<
"\nNew: " << *NewCall <<
"\n");
1002 Class = ARCInstKind::Release;
1008 if (
IsAlwaysTail(Class) && !cast<CallInst>(Inst)->isNoTailCall()) {
1011 dbgs() <<
"Adding tail keyword to function since it can never be "
1012 "passed stack args: "
1014 cast<CallInst>(Inst)->setTailCall();
1021 LLVM_DEBUG(
dbgs() <<
"Removing tail keyword from function: " << *Inst
1023 cast<CallInst>(Inst)->setTailCall(
false);
1029 LLVM_DEBUG(
dbgs() <<
"Found no throw class. Setting nounwind on: " << *Inst
1031 cast<CallInst>(Inst)->setDoesNotThrow();
1036 UsedInThisFunction |= 1 <<
unsigned(Class);
1048 LLVM_DEBUG(
dbgs() <<
"ARC calls with null are no-ops. Erasing: " << *Inst
1056 UsedInThisFunction |= 1 <<
unsigned(Class);
1064 if (Class == ARCInstKind::Release &&
1065 !Inst->
getMetadata(MDKindCache.
get(ARCMDKindID::ImpreciseRelease)))
1071 std::pair<Instruction *, const Value *> Pair = Worklist.
pop_back_val();
1081 bool HasNull =
false;
1082 bool HasCriticalEdges =
false;
1089 HasCriticalEdges =
true;
1094 if (HasCriticalEdges)
1104 case ARCInstKind::Retain:
1105 case ARCInstKind::RetainBlock:
1108 case ARCInstKind::Release:
1114 case ARCInstKind::Autorelease:
1119 case ARCInstKind::UnsafeClaimRV:
1120 case ARCInstKind::RetainRV:
1121 case ARCInstKind::AutoreleaseRV:
1137 CallInst *CInst = cast<CallInst>(Inst);
1149 addOpBundleForFunclet(InsertPos->
getParent(), OpBundles);
1151 if (
Op->getType() != ParamTy)
1157 "And inserting clone at "
1158 << *InsertPos <<
"\n");
1159 Worklist.
push_back(std::make_pair(Clone, Incoming));
1164 }
while (!Worklist.
empty());
1170 const bool SuccSRRIKnownSafe,
1172 bool &SomeSuccHasSame,
1173 bool &AllSuccsHaveSame,
1174 bool &NotAllSeqEqualButKnownSafe,
1175 bool &ShouldContinue) {
1183 ShouldContinue =
true;
1187 SomeSuccHasSame =
true;
1192 AllSuccsHaveSame =
false;
1194 NotAllSeqEqualButKnownSafe =
true;
1207 const bool SuccSRRIKnownSafe,
1209 bool &SomeSuccHasSame,
1210 bool &AllSuccsHaveSame,
1211 bool &NotAllSeqEqualButKnownSafe) {
1214 SomeSuccHasSame =
true;
1220 AllSuccsHaveSame =
false;
1222 NotAllSeqEqualButKnownSafe =
true;
1235ObjCARCOpt::CheckForCFGHazards(
const BasicBlock *BB,
1237 BBState &MyStates)
const {
1240 for (
auto I = MyStates.top_down_ptr_begin(),
E = MyStates.top_down_ptr_end();
1243 const Sequence Seq =
I->second.GetSeq();
1252 "Unknown top down sequence state.");
1255 bool SomeSuccHasSame =
false;
1256 bool AllSuccsHaveSame =
true;
1257 bool NotAllSeqEqualButKnownSafe =
false;
1263 BBStates.
find(Succ);
1273 if (SuccSSeq ==
S_None) {
1280 const bool SuccSRRIKnownSafe = SuccS.
IsKnownSafe();
1286 bool ShouldContinue =
false;
1288 AllSuccsHaveSame, NotAllSeqEqualButKnownSafe,
1296 SomeSuccHasSame, AllSuccsHaveSame,
1297 NotAllSeqEqualButKnownSafe);
1310 if (SomeSuccHasSame && !AllSuccsHaveSame) {
1312 }
else if (NotAllSeqEqualButKnownSafe) {
1322bool ObjCARCOpt::VisitInstructionBottomUp(
1324 BBState &MyStates) {
1325 bool NestingDetected =
false;
1332 case ARCInstKind::Release: {
1339 case ARCInstKind::RetainBlock:
1344 case ARCInstKind::Retain:
1345 case ARCInstKind::RetainRV: {
1351 if (Class != ARCInstKind::RetainRV) {
1360 case ARCInstKind::AutoreleasepoolPop:
1362 MyStates.clearBottomUpPointers();
1363 return NestingDetected;
1364 case ARCInstKind::AutoreleasepoolPush:
1365 case ARCInstKind::None:
1367 return NestingDetected;
1374 for (
auto MI = MyStates.bottom_up_ptr_begin(),
1375 ME = MyStates.bottom_up_ptr_end();
1388 return NestingDetected;
1391bool ObjCARCOpt::VisitBottomUp(
BasicBlock *BB,
1396 bool NestingDetected =
false;
1397 BBState &MyStates = BBStates[BB];
1401 BBState::edge_iterator
SI(MyStates.succ_begin()),
1402 SE(MyStates.succ_end());
1407 MyStates.InitFromSucc(
I->second);
1409 for (;
SI != SE; ++
SI) {
1411 I = BBStates.
find(Succ);
1413 MyStates.MergeSucc(
I->second);
1418 << BBStates[BB] <<
"\n"
1419 <<
"Performing Dataflow:\n");
1426 if (isa<InvokeInst>(Inst))
1431 NestingDetected |= VisitInstructionBottomUp(Inst, BB, Retains, MyStates);
1435 if (MyStates.bottom_up_ptr_list_size() >
MaxPtrStates) {
1436 DisableRetainReleasePairing =
true;
1444 for (BBState::edge_iterator PI(MyStates.pred_begin()),
1445 PE(MyStates.pred_end()); PI != PE; ++PI) {
1448 NestingDetected |= VisitInstructionBottomUp(II, BB, Retains, MyStates);
1451 LLVM_DEBUG(
dbgs() <<
"\nFinal State:\n" << BBStates[BB] <<
"\n");
1453 return NestingDetected;
1462 &ReleaseInsertPtToRCIdentityRoots) {
1463 for (
const auto &
P : Retains) {
1470 for (
const Instruction *InsertPt :
P.second.ReverseInsertPts)
1471 ReleaseInsertPtToRCIdentityRoots[InsertPt].insert(Root);
1481 &ReleaseInsertPtToRCIdentityRoots) {
1482 auto I = ReleaseInsertPtToRCIdentityRoots.find(InsertPt);
1483 if (
I == ReleaseInsertPtToRCIdentityRoots.end())
1488bool ObjCARCOpt::VisitInstructionTopDown(
1491 &ReleaseInsertPtToRCIdentityRoots) {
1492 bool NestingDetected =
false;
1500 Inst, ReleaseInsertPtToRCIdentityRoots))
1501 for (
const auto *Root : *Roots) {
1515 case ARCInstKind::RetainBlock:
1521 case ARCInstKind::Retain:
1522 case ARCInstKind::RetainRV: {
1530 case ARCInstKind::Release: {
1544 case ARCInstKind::AutoreleasepoolPop:
1546 MyStates.clearTopDownPointers();
1548 case ARCInstKind::AutoreleasepoolPush:
1549 case ARCInstKind::None:
1558 for (
auto MI = MyStates.top_down_ptr_begin(),
1559 ME = MyStates.top_down_ptr_end();
1571 return NestingDetected;
1574bool ObjCARCOpt::VisitTopDown(
1578 &ReleaseInsertPtToRCIdentityRoots) {
1580 bool NestingDetected =
false;
1581 BBState &MyStates = BBStates[BB];
1585 BBState::edge_iterator PI(MyStates.pred_begin()),
1586 PE(MyStates.pred_end());
1591 MyStates.InitFromPred(
I->second);
1593 for (; PI != PE; ++PI) {
1595 I = BBStates.
find(Pred);
1597 MyStates.MergePred(
I->second);
1605 for (
auto I = MyStates.top_down_ptr_begin(),
1606 E = MyStates.top_down_ptr_end();
1608 I->second.SetCFGHazardAfflicted(
true);
1611 << BBStates[BB] <<
"\n"
1612 <<
"Performing Dataflow:\n");
1618 NestingDetected |= VisitInstructionTopDown(
1619 &Inst, Releases, MyStates, ReleaseInsertPtToRCIdentityRoots);
1623 if (MyStates.top_down_ptr_list_size() >
MaxPtrStates) {
1624 DisableRetainReleasePairing =
true;
1629 LLVM_DEBUG(
dbgs() <<
"\nState Before Checking for CFG Hazards:\n"
1630 << BBStates[BB] <<
"\n\n");
1631 CheckForCFGHazards(BB, BBStates, MyStates);
1633 return NestingDetected;
1640 unsigned NoObjCARCExceptionsMDKind,
1652 BBState &MyStates = BBStates[EntryBB];
1653 MyStates.SetAsEntry();
1663 while (SuccStack.
back().second != SE) {
1665 if (Visited.
insert(SuccBB).second) {
1668 BBStates[CurrBB].addSucc(SuccBB);
1669 BBState &SuccStates = BBStates[SuccBB];
1670 SuccStates.addPred(CurrBB);
1675 if (!OnStack.
count(SuccBB)) {
1676 BBStates[CurrBB].addSucc(SuccBB);
1677 BBStates[SuccBB].addPred(CurrBB);
1680 OnStack.
erase(CurrBB);
1683 }
while (!SuccStack.
empty());
1692 BBState &MyStates = BBStates[&ExitBB];
1693 if (!MyStates.isExit())
1696 MyStates.SetAsExit();
1698 PredStack.
push_back(std::make_pair(&ExitBB, MyStates.pred_begin()));
1700 while (!PredStack.
empty()) {
1701 reverse_dfs_next_succ:
1702 BBState::edge_iterator PE = BBStates[PredStack.
back().first].pred_end();
1703 while (PredStack.
back().second != PE) {
1705 if (Visited.
insert(BB).second) {
1707 goto reverse_dfs_next_succ;
1728 MDKindCache.
get(ARCMDKindID::NoObjCARCExceptions),
1732 bool BottomUpNestingDetected =
false;
1734 BottomUpNestingDetected |= VisitBottomUp(BB, BBStates, Retains);
1735 if (DisableRetainReleasePairing)
1740 ReleaseInsertPtToRCIdentityRoots;
1744 bool TopDownNestingDetected =
false;
1746 TopDownNestingDetected |=
1747 VisitTopDown(BB, BBStates, Releases, ReleaseInsertPtToRCIdentityRoots);
1748 if (DisableRetainReleasePairing)
1752 return TopDownNestingDetected && BottomUpNestingDetected;
1769 Value *MyArg = ArgTy == ParamTy ?
Arg :
1771 Function *Decl = EP.
get(ARCRuntimeEntryPointKind::Retain);
1773 addOpBundleForFunclet(InsertPt->
getParent(), BundleList);
1775 Call->setDoesNotThrow();
1776 Call->setTailCall();
1780 "At insertion point: "
1781 << *InsertPt <<
"\n");
1784 Value *MyArg = ArgTy == ParamTy ?
Arg :
1786 Function *Decl = EP.
get(ARCRuntimeEntryPointKind::Release);
1788 addOpBundleForFunclet(InsertPt->
getParent(), BundleList);
1792 Call->setMetadata(MDKindCache.
get(ARCMDKindID::ImpreciseRelease), M);
1793 Call->setDoesNotThrow();
1795 Call->setTailCall();
1799 "At insertion point: "
1800 << *InsertPt <<
"\n");
1805 Retains.
blot(OrigRetain);
1807 LLVM_DEBUG(
dbgs() <<
"Deleting retain: " << *OrigRetain <<
"\n");
1810 Releases.
erase(OrigRelease);
1812 LLVM_DEBUG(
dbgs() <<
"Deleting release: " << *OrigRelease <<
"\n");
1816bool ObjCARCOpt::PairUpRetainsAndReleases(
1823 bool &AnyPairsCompletelyEliminated) {
1827 bool KnownSafeTD =
true, KnownSafeBU =
true;
1828 bool CFGHazardAfflicted =
false;
1834 unsigned OldDelta = 0;
1835 unsigned NewDelta = 0;
1836 unsigned OldCount = 0;
1837 unsigned NewCount = 0;
1838 bool FirstRelease =
true;
1842 auto It = Retains.
find(NewRetain);
1844 const RRInfo &NewRetainRRI = It->second;
1848 auto Jt = Releases.
find(NewRetainRelease);
1849 if (Jt == Releases.
end())
1851 const RRInfo &NewRetainReleaseRRI = Jt->second;
1858 if (!NewRetainReleaseRRI.
Calls.count(NewRetain))
1861 if (ReleasesToMove.
Calls.insert(NewRetainRelease).second) {
1864 const BBState &NRRBBState = BBStates[NewRetainRelease->
getParent()];
1866 if (NRRBBState.GetAllPathCountWithOverflow(PathCount))
1869 "PathCount at this point can not be "
1870 "OverflowOccurredValue.");
1871 OldDelta -= PathCount;
1879 FirstRelease =
false;
1895 const BBState &RIPBBState = BBStates[RIP->
getParent()];
1897 if (RIPBBState.GetAllPathCountWithOverflow(PathCount))
1900 "PathCount at this point can not be "
1901 "OverflowOccurredValue.");
1902 NewDelta -= PathCount;
1905 NewReleases.
push_back(NewRetainRelease);
1910 if (NewReleases.
empty())
break;
1914 auto It = Releases.
find(NewRelease);
1916 const RRInfo &NewReleaseRRI = It->second;
1920 auto Jt = Retains.
find(NewReleaseRetain);
1921 if (Jt == Retains.
end())
1923 const RRInfo &NewReleaseRetainRRI = Jt->second;
1930 if (!NewReleaseRetainRRI.
Calls.count(NewRelease))
1933 if (RetainsToMove.
Calls.insert(NewReleaseRetain).second) {
1936 const BBState &NRRBBState = BBStates[NewReleaseRetain->
getParent()];
1938 if (NRRBBState.GetAllPathCountWithOverflow(PathCount))
1941 "PathCount at this point can not be "
1942 "OverflowOccurredValue.");
1943 OldDelta += PathCount;
1944 OldCount += PathCount;
1952 const BBState &RIPBBState = BBStates[RIP->
getParent()];
1955 if (RIPBBState.GetAllPathCountWithOverflow(PathCount))
1958 "PathCount at this point can not be "
1959 "OverflowOccurredValue.");
1960 NewDelta += PathCount;
1961 NewCount += PathCount;
1964 NewRetains.push_back(NewReleaseRetain);
1968 if (NewRetains.empty())
break;
1972 bool UnconditionallySafe = KnownSafeTD && KnownSafeBU;
1973 if (UnconditionallySafe) {
1988 const bool WillPerformCodeMotion =
1991 if (CFGHazardAfflicted && WillPerformCodeMotion)
2004 assert(OldCount != 0 &&
"Unreachable code?");
2005 NumRRs += OldCount - NewCount;
2007 AnyPairsCompletelyEliminated = NewCount == 0;
2015bool ObjCARCOpt::PerformCodePlacement(
2019 LLVM_DEBUG(
dbgs() <<
"\n== ObjCARCOpt::PerformCodePlacement ==\n");
2021 bool AnyPairsCompletelyEliminated =
false;
2040 bool KnownSafe = isa<Constant>(
Arg) || isa<AllocaInst>(
Arg);
2046 dyn_cast<GlobalVariable>(
2048 if (GV->isConstant())
2053 RRInfo RetainsToMove, ReleasesToMove;
2055 bool PerformMoveCalls = PairUpRetainsAndReleases(
2056 BBStates, Retains, Releases, M,
Retain, DeadInsts,
2057 RetainsToMove, ReleasesToMove,
Arg, KnownSafe,
2058 AnyPairsCompletelyEliminated);
2060 if (PerformMoveCalls) {
2063 MoveCalls(
Arg, RetainsToMove, ReleasesToMove,
2064 Retains, Releases, DeadInsts, M);
2070 while (!DeadInsts.
empty())
2073 return AnyPairsCompletelyEliminated;
2077void ObjCARCOpt::OptimizeWeakCalls(
Function &
F) {
2089 if (Class != ARCInstKind::LoadWeak &&
2090 Class != ARCInstKind::LoadWeakRetained)
2094 if (Class == ARCInstKind::LoadWeak && Inst->
use_empty()) {
2111 switch (EarlierClass) {
2112 case ARCInstKind::LoadWeak:
2113 case ARCInstKind::LoadWeakRetained: {
2117 CallInst *EarlierCall = cast<CallInst>(EarlierInst);
2124 if (Class == ARCInstKind::LoadWeakRetained) {
2125 Function *Decl = EP.
get(ARCRuntimeEntryPointKind::Retain);
2130 Call->replaceAllUsesWith(EarlierCall);
2131 Call->eraseFromParent();
2141 case ARCInstKind::StoreWeak:
2142 case ARCInstKind::InitWeak: {
2146 CallInst *EarlierCall = cast<CallInst>(EarlierInst);
2153 if (Class == ARCInstKind::LoadWeakRetained) {
2154 Function *Decl = EP.
get(ARCRuntimeEntryPointKind::Retain);
2160 Call->eraseFromParent();
2170 case ARCInstKind::MoveWeak:
2171 case ARCInstKind::CopyWeak:
2174 case ARCInstKind::AutoreleasepoolPush:
2175 case ARCInstKind::None:
2176 case ARCInstKind::IntrinsicUser:
2177 case ARCInstKind::User:
2193 if (Class != ARCInstKind::DestroyWeak)
2200 const Instruction *UserInst = cast<Instruction>(U);
2202 case ARCInstKind::InitWeak:
2203 case ARCInstKind::StoreWeak:
2204 case ARCInstKind::DestroyWeak:
2212 CallInst *UserInst = cast<CallInst>(U);
2214 case ARCInstKind::InitWeak:
2215 case ARCInstKind::StoreWeak:
2219 case ARCInstKind::DestroyWeak:
2227 Alloca->eraseFromParent();
2235bool ObjCARCOpt::OptimizeSequences(
Function &
F) {
2248 bool NestingDetected = Visit(
F, BBStates, Retains, Releases);
2250 if (DisableRetainReleasePairing)
2254 bool AnyPairsCompletelyEliminated = PerformCodePlacement(BBStates, Retains,
2258 return AnyPairsCompletelyEliminated && NestingDetected;
2276 return Class == ARCInstKind::CallOrUser || Class == ARCInstKind::Call
2288 auto *
Retain = dyn_cast_or_null<CallInst>(
2330void ObjCARCOpt::OptimizeReturns(
Function &
F) {
2331 if (!
F.getReturnType()->isPointerTy())
2366 (!
Call->isTailCall() &&
2383ObjCARCOpt::GatherStatistics(
Function &
F,
bool AfterOptimization) {
2385 AfterOptimization ? NumRetainsAfterOpt : NumRetainsBeforeOpt;
2387 AfterOptimization ? NumReleasesAfterOpt : NumReleasesBeforeOpt;
2394 case ARCInstKind::Retain:
2397 case ARCInstKind::Release:
2412 MDKindCache.
init(
F.getParent());
2415 EP.
init(
F.getParent());
2418 if (
F.hasPersonalityFn() &&
2427 Changed = CFGChanged =
false;
2429 BundledInsts = &BRV;
2431 LLVM_DEBUG(
dbgs() <<
"<<< ObjCARCOpt: Visiting Function: " <<
F.getName()
2437 CFGChanged |=
R.second;
2443 GatherStatistics(
F,
false);
2452 OptimizeIndividualCalls(
F);
2455 if (UsedInThisFunction & ((1 <<
unsigned(ARCInstKind::LoadWeak)) |
2456 (1 <<
unsigned(ARCInstKind::LoadWeakRetained)) |
2457 (1 <<
unsigned(ARCInstKind::StoreWeak)) |
2458 (1 <<
unsigned(ARCInstKind::InitWeak)) |
2459 (1 <<
unsigned(ARCInstKind::CopyWeak)) |
2460 (1 <<
unsigned(ARCInstKind::MoveWeak)) |
2461 (1 <<
unsigned(ARCInstKind::DestroyWeak))))
2462 OptimizeWeakCalls(
F);
2465 if (UsedInThisFunction & ((1 <<
unsigned(ARCInstKind::Retain)) |
2466 (1 <<
unsigned(ARCInstKind::RetainRV)) |
2467 (1 <<
unsigned(ARCInstKind::RetainBlock))))
2468 if (UsedInThisFunction & (1 <<
unsigned(ARCInstKind::Release)))
2471 while (OptimizeSequences(
F)) {}
2474 if (UsedInThisFunction & ((1 <<
unsigned(ARCInstKind::Autorelease)) |
2475 (1 <<
unsigned(ARCInstKind::AutoreleaseRV))))
2481 GatherStatistics(
F,
true);
2499 bool CFGChanged = OCAO.hasCFGChanged();
amdgpu Simplify well known AMD library false FunctionCallee Value * Arg
This file contains a class ARCRuntimeEntryPoints for use in creating/managing references to entry poi...
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
#define LLVM_ATTRIBUTE_UNUSED
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file defines the DenseMap class.
This file declares special dependency analysis routines used in Objective C ARC Optimizations.
std::optional< std::vector< StOtherPiece > > Other
This file provides various utilities for inspecting and working with the control flow graph in LLVM I...
iv Induction Variable Users
print must be executed print the must be executed context for all instructions
This file declares a simple ARC-aware AliasAnalysis using special knowledge of Objective C to enhance...
This file defines common analysis utilities used by the ObjC ARC Optimizer.
static cl::opt< unsigned > MaxPtrStates("arc-opt-max-ptr-states", cl::Hidden, cl::desc("Maximum number of ptr states the optimizer keeps track of"), cl::init(4095))
This file defines ARC utility functions which are used by various parts of the compiler.
This file declares a special form of Alias Analysis called Provenance Analysis''.
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
A manager for alias analyses.
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB)
The main low level interface to the alias analysis implementation.
@ MayAlias
The two locations may or may not alias.
@ NoAlias
The two locations do not alias at all.
@ PartialAlias
The two locations alias, but only due to a partial overlap.
@ MustAlias
The two locations precisely alias each other.
an instruction to allocate memory on the stack
A container for analyses that lazily runs them and caches their results.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
LLVM Basic Block Representation.
iterator begin()
Instruction iterator methods.
InstListType::const_iterator const_iterator
bool hasNPredecessors(unsigned N) const
Return true if this block has exactly N predecessors.
InstListType::iterator iterator
Instruction iterators...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
const Instruction & back() const
This class represents a no-op cast from one type to another.
An associative container with fast insertion-order (deterministic) iteration over its elements.
void blot(const KeyT &Key)
This is similar to erase, but instead of removing the element from the vector, it just zeros out the ...
iterator find(const KeyT &Key)
typename VectorTy::const_iterator const_iterator
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &InsertPair)
Represents analyses that only rely on functions' control flow.
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
OperandBundleUse getOperandBundleAt(unsigned Index) const
Return the operand bundle at a specific index.
unsigned getNumOperandBundles() const
Return the number of operand bundles associated with this User.
Value * getArgOperand(unsigned i) const
void setArgOperand(unsigned i, Value *v)
void setCalledFunction(Function *Fn)
Sets the function called, including updating the function type.
This class represents a function call, abstracting a target machine's calling convention.
void setTailCall(bool IsTc=true)
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", Instruction *InsertBefore=nullptr)
static ConstantInt * getTrue(LLVMContext &Context)
iterator find(const_arg_type_t< KeyT > Val)
bool erase(const KeyT &Val)
an instruction for type-safe pointer arithmetic to access elements of arrays and structs
BIty & getInstructionIterator()
BBIty & getBasicBlockIterator()
unsigned getNumSuccessors() const LLVM_READONLY
Return the number of successors that this instruction has.
void insertBefore(Instruction *InsertPos)
Insert an unlinked instruction into a basic block immediately before the specified instruction.
const BasicBlock * getParent() const
MDNode * getMetadata(unsigned KindID) const
Get the metadata of given kind attached to this Instruction.
void setMetadata(unsigned KindID, MDNode *Node)
Set the metadata of the specified kind to the specified node.
SymbolTableList< Instruction >::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
This is an important class for using LLVM in a threaded context.
An instruction for reading from memory.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
A Module instance is used to store all the information related to an LLVM module.
op_range incoming_values()
BasicBlock * getIncomingBlock(unsigned i) const
Return incoming basic block number i.
Value * getIncomingValue(unsigned i) const
Return incoming value number x.
unsigned getNumIncomingValues() const
Return the number of incoming edges.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
void preserveSet()
Mark an analysis set as preserved.
Return a value (possibly void), from a function.
bool erase(PtrType Ptr)
erase - If the set contains the specified pointer, remove it and return true, otherwise return false.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
typename SuperClass::const_iterator const_iterator
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
An instruction for storing to memory.
TinyPtrVector - This class is specialized for cases where there are normally 0 or 1 element in a vect...
The instances of the Type class are immutable: once they are created, they are never changed.
static PointerType * getInt1PtrTy(LLVMContext &C, unsigned AS=0)
LLVMContext & getContext() const
Return the LLVMContext in which this type was uniqued.
static IntegerType * getInt8Ty(LLVMContext &C)
bool isVoidTy() const
Return true if this is 'void'.
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
LLVMContext & getContext() const
All values hold a context through their type.
A cache of MDKinds used by various ARC optimizations.
unsigned get(ARCMDKindID ID)
Declarations for ObjC runtime functions and constants.
Function * get(ARCRuntimeEntryPointKind kind)
CallInst * insertRVCall(Instruction *InsertPt, CallBase *AnnotatedCall)
Insert a retainRV/claimRV call.
bool contains(const Instruction *I) const
See if an instruction is a bundled retainRV/claimRV call.
std::pair< bool, bool > insertAfterInvokes(Function &F, DominatorTree *DT)
Insert a retainRV/claimRV call to the normal destination blocks of invokes with operand bundle "clang...
void eraseInst(CallInst *CI)
Remove a retainRV/claimRV call entirely.
This is similar to BasicAliasAnalysis, and it uses many of the same techniques, except it uses specia...
AAResults * getAA() const
void setAA(AAResults *aa)
This class summarizes several per-pointer runtime properties which are propagated through the flow gr...
void SetCFGHazardAfflicted(const bool NewValue)
const RRInfo & GetRRInfo() const
void ClearSequenceProgress()
This class implements an extremely fast bulk output stream that can only output to a stream.
static void CheckForUseCFGHazard(const Sequence SuccSSeq, const bool SuccSRRIKnownSafe, TopDownPtrState &S, bool &SomeSuccHasSame, bool &AllSuccsHaveSame, bool &NotAllSeqEqualButKnownSafe, bool &ShouldContinue)
If we have a top down pointer in the S_Use state, make sure that there are no CFG hazards by checking...
static void CheckForCanReleaseCFGHazard(const Sequence SuccSSeq, const bool SuccSRRIKnownSafe, TopDownPtrState &S, bool &SomeSuccHasSame, bool &AllSuccsHaveSame, bool &NotAllSeqEqualButKnownSafe)
If we have a Top Down pointer in the S_CanRelease state, make sure that there are no CFG hazards by c...
static bool isInertARCValue(Value *V, SmallPtrSet< Value *, 1 > &VisitedPhis)
This function returns true if the value is inert.
static CallInst * FindPredecessorAutoreleaseWithSafePath(const Value *Arg, BasicBlock *BB, ReturnInst *Ret, ProvenanceAnalysis &PA)
Look for an `‘autorelease’' instruction dependent on Arg such that there are no instructions dependen...
static void collectReleaseInsertPts(const BlotMapVector< Value *, RRInfo > &Retains, DenseMap< const Instruction *, SmallPtrSet< const Value *, 2 > > &ReleaseInsertPtToRCIdentityRoots)
static void ComputePostOrders(Function &F, SmallVectorImpl< BasicBlock * > &PostOrder, SmallVectorImpl< BasicBlock * > &ReverseCFGPostOrder, unsigned NoObjCARCExceptionsMDKind, DenseMap< const BasicBlock *, BBState > &BBStates)
static CallInst * FindPredecessorRetainWithSafePath(const Value *Arg, BasicBlock *BB, Instruction *Autorelease, ProvenanceAnalysis &PA)
Find a dependent retain that precedes the given autorelease for which there is nothing in between the...
static const SmallPtrSet< const Value *, 2 > * getRCIdentityRootsFromReleaseInsertPt(const Instruction *InsertPt, const DenseMap< const Instruction *, SmallPtrSet< const Value *, 2 > > &ReleaseInsertPtToRCIdentityRoots)
static const unsigned OverflowOccurredValue
static CallInst * HasSafePathToPredecessorCall(const Value *Arg, Instruction *Retain, ProvenanceAnalysis &PA)
Check if there is a dependent call earlier that does not have anything in between the Retain and the ...
static const Value * FindSingleUseIdentifiedObject(const Value *Arg)
This is similar to GetRCIdentityRoot but it stops as soon as it finds a value with multiple uses.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
@ C
The default llvm calling convention, compatible with C.
initializer< Ty > init(const Ty &Val)
PointerTypeMap run(const Module &M)
Compute the PointerTypeMap for the module M.
bool IsRetain(ARCInstKind Class)
Test if the given class is objc_retain or equivalent.
@ AutoreleasePoolBoundary
@ NeedsPositiveRetainCount
bool IsNeverTail(ARCInstKind Class)
Test if the given class represents instructions which are never safe to mark with the "tail" keyword.
bool IsAlwaysTail(ARCInstKind Class)
Test if the given class represents instructions which are always safe to mark with the "tail" keyword...
bool IsNullOrUndef(const Value *V)
bool IsAutorelease(ARCInstKind Class)
Test if the given class is objc_autorelease or equivalent.
ARCInstKind
Equivalence classes of instructions in the ARC Model.
@ Autorelease
objc_autorelease
@ RetainRV
objc_retainAutoreleasedReturnValue
@ AutoreleaseRV
objc_autoreleaseReturnValue
@ Call
could call objc_release
bool IsObjCIdentifiedObject(const Value *V)
Return true if this value refers to a distinct and identifiable object.
bool EnableARCOpts
A handy option to enable/disable all ARC Optimizations.
void getEquivalentPHIs(PHINodeTy &PN, VectorTy &PHIList)
Return the list of PHI nodes that are equivalent to PN.
bool IsForwarding(ARCInstKind Class)
Test if the given class represents instructions which return their argument verbatim.
bool IsNoopInstruction(const Instruction *I)
llvm::Instruction * findSingleDependency(DependenceKind Flavor, const Value *Arg, BasicBlock *StartBB, Instruction *StartInst, ProvenanceAnalysis &PA)
Find dependent instructions.
Sequence
A sequence of states that a pointer may go through in which an objc_retain and objc_release are actua...
@ S_CanRelease
foo(x) – x could possibly see a ref count decrement.
@ S_Retain
objc_retain(x).
@ S_Stop
code motion is stopped.
@ S_MovableRelease
objc_release(x), !clang.imprecise_release.
ARCInstKind GetBasicARCInstKind(const Value *V)
Determine which objc runtime call instruction class V belongs to.
ARCInstKind GetARCInstKind(const Value *V)
Map V to its ARCInstKind equivalence class.
Value * GetArgRCIdentityRoot(Value *Inst)
Assuming the given instruction is one of the special calls such as objc_retain or objc_release,...
bool IsNoThrow(ARCInstKind Class)
Test if the given class represents instructions which are always safe to mark with the nounwind attri...
const Value * GetRCIdentityRoot(const Value *V)
The RCIdentity root of a value V is a dominating value U for which retaining or releasing U is equiva...
bool IsNoopOnGlobal(ARCInstKind Class)
Test if the given class represents instructions which do nothing if passed a global variable.
bool IsNoopOnNull(ARCInstKind Class)
Test if the given class represents instructions which do nothing if passed a null pointer.
bool hasAttachedCallOpBundle(const CallBase *CB)
static void EraseInstruction(Instruction *CI)
Erase the given instruction.
This is an optimization pass for GlobalISel generic memory operations.
Interval::succ_iterator succ_end(Interval *I)
auto successors(const MachineBasicBlock *BB)
Interval::succ_iterator succ_begin(Interval *I)
succ_begin/succ_end - define methods so that Intervals may be used just like BasicBlocks can with the...
DenseMap< BasicBlock *, ColorVector > colorEHFunclets(Function &F)
If an EH funclet personality is in use (see isFuncletEHPersonality), this will recompute which blocks...
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
inst_iterator inst_begin(Function *F)
bool isScopedEHPersonality(EHPersonality Pers)
Returns true if this personality uses scope-style EH IR instructions: catchswitch,...
Interval::pred_iterator pred_end(Interval *I)
auto reverse(ContainerTy &&C)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
SuccIterator< Instruction, BasicBlock > succ_iterator
Interval::pred_iterator pred_begin(Interval *I)
pred_begin/pred_end - define methods so that Intervals may be used just like BasicBlocks can with the...
bool AreStatisticsEnabled()
Check if statistics are enabled.
EHPersonality classifyEHPersonality(const Value *Pers)
See if the given exception handling personality function is one that we understand.
inst_iterator inst_end(Function *F)
raw_ostream & operator<<(raw_ostream &OS, const APFixedPoint &FX)
bool is_contained(R &&Range, const E &Element)
Returns true if Element is found in Range.
PreservedAnalyses run(Function &F, FunctionAnalysisManager &AM)
A lightweight accessor for an operand bundle meant to be passed around by value.
bool HandlePotentialAlterRefCount(Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class)
bool InitBottomUp(ARCMDKindCache &Cache, Instruction *I)
(Re-)Initialize this bottom up pointer returning true if we detected a pointer with nested releases.
bool MatchWithRetain()
Return true if this set of releases can be paired with a release.
void HandlePotentialUse(BasicBlock *BB, Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class)
Unidirectional information about either a retain-decrement-use-release sequence or release-use-decrem...
bool KnownSafe
After an objc_retain, the reference count of the referenced object is known to be positive.
SmallPtrSet< Instruction *, 2 > Calls
For a top-down sequence, the set of objc_retains or objc_retainBlocks.
MDNode * ReleaseMetadata
If the Calls are objc_release calls and they all have a clang.imprecise_release tag,...
bool CFGHazardAfflicted
If this is true, we cannot perform code motion but can still remove retain/release pairs.
bool IsTailCallRelease
True of the objc_release calls are all marked with the "tail" keyword.
SmallPtrSet< Instruction *, 2 > ReverseInsertPts
The set of optimal insert positions for moving calls in the opposite sequence.
bool MatchWithRelease(ARCMDKindCache &Cache, Instruction *Release)
Return true if this set of retains can be paired with the given release.
bool InitTopDown(ARCInstKind Kind, Instruction *I)
(Re-)Initialize this bottom up pointer returning true if we detected a pointer with nested releases.
bool HandlePotentialAlterRefCount(Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class, const BundledRetainClaimRVs &BundledRVs)
void HandlePotentialUse(Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class)