74#define DEBUG_TYPE "objc-arc-opts"
78 cl::desc(
"Maximum number of ptr states the optimizer keeps track of"),
96 if (
GEP->hasAllZeroIndices())
159STATISTIC(NumNoops,
"Number of no-op objc calls eliminated");
160STATISTIC(NumPartialNoops,
"Number of partially no-op objc calls eliminated");
161STATISTIC(NumAutoreleases,
"Number of autoreleases converted to releases");
163 "retain+autoreleases eliminated");
164STATISTIC(NumRRs,
"Number of retain+release paths eliminated");
165STATISTIC(NumPeeps,
"Number of calls peephole-optimized");
168 "Number of retains before optimization");
170 "Number of releases before optimization");
172 "Number of retains after optimization");
174 "Number of releases after optimization");
183 unsigned TopDownPathCount = 0;
186 unsigned BottomUpPathCount = 0;
209 using top_down_ptr_iterator =
decltype(PerPtrTopDown)
::iterator;
210 using const_top_down_ptr_iterator =
decltype(PerPtrTopDown)
::const_iterator;
212 top_down_ptr_iterator top_down_ptr_begin() {
return PerPtrTopDown.
begin(); }
213 top_down_ptr_iterator top_down_ptr_end() {
return PerPtrTopDown.
end(); }
214 const_top_down_ptr_iterator top_down_ptr_begin()
const {
215 return PerPtrTopDown.begin();
217 const_top_down_ptr_iterator top_down_ptr_end()
const {
218 return PerPtrTopDown.end();
220 bool hasTopDownPtrs()
const {
221 return !PerPtrTopDown.empty();
224 unsigned top_down_ptr_list_size()
const {
225 return std::distance(top_down_ptr_begin(), top_down_ptr_end());
228 using bottom_up_ptr_iterator =
decltype(PerPtrBottomUp)::iterator;
229 using const_bottom_up_ptr_iterator =
230 decltype(PerPtrBottomUp)::const_iterator;
232 bottom_up_ptr_iterator bottom_up_ptr_begin() {
233 return PerPtrBottomUp.begin();
235 bottom_up_ptr_iterator bottom_up_ptr_end() {
return PerPtrBottomUp.end(); }
236 const_bottom_up_ptr_iterator bottom_up_ptr_begin()
const {
237 return PerPtrBottomUp.begin();
239 const_bottom_up_ptr_iterator bottom_up_ptr_end()
const {
240 return PerPtrBottomUp.end();
242 bool hasBottomUpPtrs()
const {
243 return !PerPtrBottomUp.empty();
246 unsigned bottom_up_ptr_list_size()
const {
247 return std::distance(bottom_up_ptr_begin(), bottom_up_ptr_end());
252 void SetAsEntry() { TopDownPathCount = 1; }
256 void SetAsExit() { BottomUpPathCount = 1; }
261 TopDownPtrState &getPtrTopDownState(
const Value *Arg) {
262 return PerPtrTopDown[Arg];
268 BottomUpPtrState &getPtrBottomUpState(
const Value *Arg) {
269 return PerPtrBottomUp[Arg];
274 bottom_up_ptr_iterator findPtrBottomUpState(
const Value *Arg) {
275 return PerPtrBottomUp.find(Arg);
278 void clearBottomUpPointers() {
279 PerPtrBottomUp.clear();
282 void clearTopDownPointers() {
283 PerPtrTopDown.clear();
286 void InitFromPred(
const BBState &
Other);
287 void InitFromSucc(
const BBState &
Other);
288 void MergePred(
const BBState &
Other);
289 void MergeSucc(
const BBState &
Other);
297 bool GetAllPathCountWithOverflow(
unsigned &PathCount)
const {
301 unsigned long long Product =
302 (
unsigned long long)TopDownPathCount*BottomUpPathCount;
305 return (Product >> 32) ||
312 edge_iterator
pred_begin()
const {
return Preds.begin(); }
313 edge_iterator
pred_end()
const {
return Preds.end(); }
314 edge_iterator
succ_begin()
const {
return Succs.begin(); }
315 edge_iterator
succ_end()
const {
return Succs.end(); }
317 void addSucc(BasicBlock *Succ) { Succs.push_back(Succ); }
318 void addPred(BasicBlock *Pred) { Preds.push_back(Pred); }
320 bool isExit()
const {
return Succs.empty(); }
333void BBState::InitFromPred(
const BBState &
Other) {
334 PerPtrTopDown =
Other.PerPtrTopDown;
335 TopDownPathCount =
Other.TopDownPathCount;
338void BBState::InitFromSucc(
const BBState &
Other) {
339 PerPtrBottomUp =
Other.PerPtrBottomUp;
340 BottomUpPathCount =
Other.BottomUpPathCount;
345void BBState::MergePred(
const BBState &
Other) {
346 if (TopDownPathCount == OverflowOccurredValue)
351 TopDownPathCount +=
Other.TopDownPathCount;
356 if (TopDownPathCount == OverflowOccurredValue) {
357 clearTopDownPointers();
363 if (TopDownPathCount <
Other.TopDownPathCount) {
364 TopDownPathCount = OverflowOccurredValue;
365 clearTopDownPointers();
372 for (
auto MI =
Other.top_down_ptr_begin(), ME =
Other.top_down_ptr_end();
374 auto Pair = PerPtrTopDown.
insert(*
MI);
375 Pair.first->second.Merge(Pair.second ? TopDownPtrState() :
MI->second,
381 for (
auto MI = top_down_ptr_begin(), ME = top_down_ptr_end();
MI != ME; ++
MI)
382 if (
Other.PerPtrTopDown.find(
MI->first) ==
Other.PerPtrTopDown.end())
383 MI->second.Merge(TopDownPtrState(),
true);
388void BBState::MergeSucc(
const BBState &
Other) {
389 if (BottomUpPathCount == OverflowOccurredValue)
394 BottomUpPathCount +=
Other.BottomUpPathCount;
399 if (BottomUpPathCount == OverflowOccurredValue) {
400 clearBottomUpPointers();
406 if (BottomUpPathCount <
Other.BottomUpPathCount) {
407 BottomUpPathCount = OverflowOccurredValue;
408 clearBottomUpPointers();
415 for (
auto MI =
Other.bottom_up_ptr_begin(), ME =
Other.bottom_up_ptr_end();
417 auto Pair = PerPtrBottomUp.
insert(*
MI);
418 Pair.first->second.Merge(Pair.second ? BottomUpPtrState() :
MI->second,
424 for (
auto MI = bottom_up_ptr_begin(), ME = bottom_up_ptr_end();
MI != ME;
426 if (
Other.PerPtrBottomUp.find(
MI->first) ==
Other.PerPtrBottomUp.end())
427 MI->second.Merge(BottomUpPtrState(),
false);
432 OS <<
" TopDown State:\n";
433 if (!BBInfo.hasTopDownPtrs()) {
436 for (
auto I = BBInfo.top_down_ptr_begin(), E = BBInfo.top_down_ptr_end();
439 OS <<
" Ptr: " << *
I->first
440 <<
"\n KnownSafe: " << (
P.IsKnownSafe()?
"true":
"false")
441 <<
"\n ImpreciseRelease: "
442 << (
P.IsTrackingImpreciseReleases()?
"true":
"false") <<
"\n"
443 <<
" HasCFGHazards: "
444 << (
P.IsCFGHazardAfflicted()?
"true":
"false") <<
"\n"
445 <<
" KnownPositive: "
446 << (
P.HasKnownPositiveRefCount()?
"true":
"false") <<
"\n"
448 <<
P.GetSeq() <<
"\n";
452 OS <<
" BottomUp State:\n";
453 if (!BBInfo.hasBottomUpPtrs()) {
456 for (
auto I = BBInfo.bottom_up_ptr_begin(), E = BBInfo.bottom_up_ptr_end();
459 OS <<
" Ptr: " << *
I->first
460 <<
"\n KnownSafe: " << (
P.IsKnownSafe()?
"true":
"false")
461 <<
"\n ImpreciseRelease: "
462 << (
P.IsTrackingImpreciseReleases()?
"true":
"false") <<
"\n"
463 <<
" HasCFGHazards: "
464 << (
P.IsCFGHazardAfflicted()?
"true":
"false") <<
"\n"
465 <<
" KnownPositive: "
466 << (
P.HasKnownPositiveRefCount()?
"true":
"false") <<
"\n"
468 <<
P.GetSeq() <<
"\n";
480 bool CFGChanged =
false;
494 bool DisableRetainReleasePairing =
false;
498 unsigned UsedInThisFunction;
505 void OptimizeIndividualCalls(
Function &
F);
518 const Value *&AutoreleaseRVArg);
522 BBState &MyStates)
const;
529 bool VisitInstructionTopDown(
532 &ReleaseInsertPtToRCIdentityRoots);
537 &ReleaseInsertPtToRCIdentityRoots);
553 Value *Arg,
bool KnownSafe,
554 bool &AnyPairsCompletelyEliminated);
566 void OptimizeAutoreleasePools(
Function &
F);
568 template <
typename PredicateT>
569 static void cloneOpBundlesIf(
CallBase *CI,
579 void addOpBundleForFunclet(BasicBlock *BB,
580 SmallVectorImpl<OperandBundleDef> &OpBundles) {
581 if (!BlockEHColors.
empty()) {
584 for (BasicBlock *EHPadBB : CV)
594 void GatherStatistics(Function &
F,
bool AfterOptimization =
false);
598 void init(Function &
F);
599 bool run(Function &
F, AAResults &AA);
600 bool hasCFGChanged()
const {
return CFGChanged; }
607ObjCARCOpt::OptimizeRetainRVCall(Function &
F, Instruction *
RetainRV) {
620 if (
II->getNormalDest() == RetainRVParent) {
631 "a bundled retainRV's argument should be a call");
637 LLVM_DEBUG(
dbgs() <<
"Transforming objc_retainAutoreleasedReturnValue => "
638 "objc_retain since the operand is not a return value.\n"
642 Function *NewDecl = EP.
get(ARCRuntimeEntryPointKind::Retain);
650bool ObjCARCOpt::OptimizeInlinedAutoreleaseRVCall(
662 if (Arg != AutoreleaseRVArg) {
676 LLVM_DEBUG(
dbgs() <<
"Found inlined objc_autoreleaseReturnValue '"
684 if (Class == ARCInstKind::RetainRV) {
693 assert(Class == ARCInstKind::UnsafeClaimRV);
699 "Expected UnsafeClaimRV to be safe to tail call");
705 OptimizeIndividualCallImpl(
F,
Release, ARCInstKind::Release, Arg);
711void ObjCARCOpt::OptimizeAutoreleaseRVCall(Function &
F,
722 SmallVector<const Value *, 2>
Users;
731 for (
const User *U :
Ptr->users()) {
737 }
while (!
Users.empty());
743 dbgs() <<
"Transforming objc_autoreleaseReturnValue => "
744 "objc_autorelease since its operand is not used as a return "
750 Function *NewDecl = EP.
get(ARCRuntimeEntryPointKind::Autorelease);
753 Class = ARCInstKind::Autorelease;
760void ObjCARCOpt::OptimizeIndividualCalls(Function &
F) {
761 LLVM_DEBUG(
dbgs() <<
"\n== ObjCARCOpt::OptimizeIndividualCalls ==\n");
763 UsedInThisFunction = 0;
768 const Value *DelayedAutoreleaseRVArg =
nullptr;
772 DelayedAutoreleaseRVArg =
nullptr;
774 auto optimizeDelayedAutoreleaseRV = [&]() {
775 if (!DelayedAutoreleaseRV)
777 OptimizeIndividualCallImpl(
F, DelayedAutoreleaseRV,
778 ARCInstKind::AutoreleaseRV,
779 DelayedAutoreleaseRVArg);
780 setDelayedAutoreleaseRV(
nullptr);
782 auto shouldDelayAutoreleaseRV = [&](
Instruction *NonARCInst) {
784 if (!DelayedAutoreleaseRV)
789 if (NonARCInst->isTerminator())
818 const Value *Arg =
nullptr;
821 optimizeDelayedAutoreleaseRV();
823 case ARCInstKind::CallOrUser:
824 case ARCInstKind::User:
825 case ARCInstKind::None:
829 if (!shouldDelayAutoreleaseRV(Inst))
830 optimizeDelayedAutoreleaseRV();
832 case ARCInstKind::AutoreleaseRV:
833 optimizeDelayedAutoreleaseRV();
834 setDelayedAutoreleaseRV(Inst);
836 case ARCInstKind::RetainRV:
837 case ARCInstKind::UnsafeClaimRV:
838 if (DelayedAutoreleaseRV) {
840 if (OptimizeInlinedAutoreleaseRVCall(
F, Inst, Arg, Class,
841 DelayedAutoreleaseRV,
842 DelayedAutoreleaseRVArg)) {
843 setDelayedAutoreleaseRV(
nullptr);
846 optimizeDelayedAutoreleaseRV();
851 OptimizeIndividualCallImpl(
F, Inst, Class, Arg);
855 optimizeDelayedAutoreleaseRV();
861 V = V->stripPointerCasts();
868 if (GV->hasAttribute(
"objc_arc_inert"))
873 if (!VisitedPhis.
insert(PN).second)
885void ObjCARCOpt::OptimizeIndividualCallImpl(Function &
F, Instruction *Inst,
888 LLVM_DEBUG(
dbgs() <<
"Visiting: Class: " << Class <<
"; " << *Inst <<
"\n");
891 SmallPtrSet<Value *, 1> VisitedPhis;
894 UsedInThisFunction |= 1 << unsigned(Class);
919 case ARCInstKind::NoopCast:
927 case ARCInstKind::StoreWeak:
928 case ARCInstKind::LoadWeak:
929 case ARCInstKind::LoadWeakRetained:
930 case ARCInstKind::InitWeak:
931 case ARCInstKind::DestroyWeak: {
940 dbgs() <<
"A null pointer-to-weak-pointer is undefined behavior."
942 << *CI <<
"\nNew = " << *NewValue <<
"\n");
949 case ARCInstKind::CopyWeak:
950 case ARCInstKind::MoveWeak: {
961 dbgs() <<
"A null pointer-to-weak-pointer is undefined behavior."
963 << *CI <<
"\nNew = " << *NewValue <<
"\n");
971 case ARCInstKind::RetainRV:
972 if (OptimizeRetainRVCall(
F, Inst))
975 case ARCInstKind::AutoreleaseRV:
976 OptimizeAutoreleaseRVCall(
F, Inst, Class);
992 Function *Decl = EP.
get(ARCRuntimeEntryPointKind::Release);
995 NewCall->
setMetadata(MDKindCache.
get(ARCMDKindID::ImpreciseRelease),
998 LLVM_DEBUG(
dbgs() <<
"Replacing autorelease{,RV}(x) with objc_release(x) "
999 "since x is otherwise unused.\nOld: "
1000 << *
Call <<
"\nNew: " << *NewCall <<
"\n");
1004 Class = ARCInstKind::Release;
1013 dbgs() <<
"Adding tail keyword to function since it can never be "
1014 "passed stack args: "
1023 LLVM_DEBUG(
dbgs() <<
"Removing tail keyword from function: " << *Inst
1031 LLVM_DEBUG(
dbgs() <<
"Found no throw class. Setting nounwind on: " << *Inst
1038 UsedInThisFunction |= 1 << unsigned(Class);
1050 LLVM_DEBUG(
dbgs() <<
"ARC calls with null are no-ops. Erasing: " << *Inst
1058 UsedInThisFunction |= 1 << unsigned(Class);
1066 if (Class == ARCInstKind::Release &&
1067 !Inst->
getMetadata(MDKindCache.
get(ARCMDKindID::ImpreciseRelease)))
1071 Worklist.
push_back(std::make_pair(Inst, Arg));
1073 std::pair<Instruction *, const Value *> Pair = Worklist.
pop_back_val();
1083 bool HasNull =
false;
1084 bool HasCriticalEdges =
false;
1091 HasCriticalEdges =
true;
1096 if (HasCriticalEdges)
1106 case ARCInstKind::Retain:
1107 case ARCInstKind::RetainBlock:
1110 case ARCInstKind::Release:
1116 case ARCInstKind::Autorelease:
1121 case ARCInstKind::UnsafeClaimRV:
1122 case ARCInstKind::RetainRV:
1123 case ARCInstKind::AutoreleaseRV:
1149 cloneOpBundlesIf(CInst, OpBundles, [](
const OperandBundleUse &
B) {
1152 addOpBundleForFunclet(InsertPos->getParent(), OpBundles);
1154 if (
Op->getType() != ParamTy)
1155 Op =
new BitCastInst(
Op, ParamTy,
"", InsertPos);
1157 Clone->
insertBefore(*InsertPos->getParent(), InsertPos);
1160 "And inserting clone at "
1161 << *InsertPos <<
"\n");
1162 Worklist.
push_back(std::make_pair(Clone, Incoming));
1167 }
while (!Worklist.
empty());
1173 const bool SuccSRRIKnownSafe,
1175 bool &SomeSuccHasSame,
1176 bool &AllSuccsHaveSame,
1177 bool &NotAllSeqEqualButKnownSafe,
1178 bool &ShouldContinue) {
1186 ShouldContinue =
true;
1190 SomeSuccHasSame =
true;
1195 AllSuccsHaveSame =
false;
1197 NotAllSeqEqualButKnownSafe =
true;
1210 const bool SuccSRRIKnownSafe,
1212 bool &SomeSuccHasSame,
1213 bool &AllSuccsHaveSame,
1214 bool &NotAllSeqEqualButKnownSafe) {
1217 SomeSuccHasSame =
true;
1223 AllSuccsHaveSame =
false;
1225 NotAllSeqEqualButKnownSafe =
true;
1238ObjCARCOpt::CheckForCFGHazards(
const BasicBlock *BB,
1239 DenseMap<const BasicBlock *, BBState> &BBStates,
1240 BBState &MyStates)
const {
1243 for (
auto I = MyStates.top_down_ptr_begin(),
E = MyStates.top_down_ptr_end();
1245 TopDownPtrState &S =
I->second;
1246 const Sequence Seq =
I->second.GetSeq();
1255 "Unknown top down sequence state.");
1257 const Value *Arg =
I->first;
1258 bool SomeSuccHasSame =
false;
1259 bool AllSuccsHaveSame =
true;
1260 bool NotAllSeqEqualButKnownSafe =
false;
1262 for (
const BasicBlock *Succ :
successors(BB)) {
1265 const DenseMap<const BasicBlock *, BBState>::iterator BBI =
1266 BBStates.
find(Succ);
1268 const BottomUpPtrState &SuccS = BBI->second.getPtrBottomUpState(Arg);
1276 if (SuccSSeq ==
S_None) {
1283 const bool SuccSRRIKnownSafe = SuccS.
IsKnownSafe();
1289 bool ShouldContinue =
false;
1291 AllSuccsHaveSame, NotAllSeqEqualButKnownSafe,
1299 SomeSuccHasSame, AllSuccsHaveSame,
1300 NotAllSeqEqualButKnownSafe);
1313 if (SomeSuccHasSame && !AllSuccsHaveSame) {
1315 }
else if (NotAllSeqEqualButKnownSafe) {
1325bool ObjCARCOpt::VisitInstructionBottomUp(
1326 Instruction *Inst, BasicBlock *BB, BlotMapVector<Value *, RRInfo> &Retains,
1327 BBState &MyStates) {
1328 bool NestingDetected =
false;
1330 const Value *Arg =
nullptr;
1335 case ARCInstKind::Release: {
1338 BottomUpPtrState &S = MyStates.getPtrBottomUpState(Arg);
1342 case ARCInstKind::RetainBlock:
1347 case ARCInstKind::Retain:
1348 case ARCInstKind::RetainRV: {
1350 BottomUpPtrState &S = MyStates.getPtrBottomUpState(Arg);
1354 if (Class != ARCInstKind::RetainRV) {
1363 case ARCInstKind::AutoreleasepoolPop:
1365 MyStates.clearBottomUpPointers();
1366 return NestingDetected;
1367 case ARCInstKind::AutoreleasepoolPush:
1368 case ARCInstKind::None:
1370 return NestingDetected;
1377 for (
auto MI = MyStates.bottom_up_ptr_begin(),
1378 ME = MyStates.bottom_up_ptr_end();
1383 BottomUpPtrState &S =
MI->second;
1391 return NestingDetected;
1394bool ObjCARCOpt::VisitBottomUp(BasicBlock *BB,
1395 DenseMap<const BasicBlock *, BBState> &BBStates,
1396 BlotMapVector<Value *, RRInfo> &Retains) {
1399 bool NestingDetected =
false;
1400 BBState &MyStates = BBStates[BB];
1404 BBState::edge_iterator
SI(MyStates.succ_begin()),
1405 SE(MyStates.succ_end());
1408 DenseMap<const BasicBlock *, BBState>::iterator
I = BBStates.
find(Succ);
1410 MyStates.InitFromSucc(
I->second);
1412 for (;
SI != SE; ++
SI) {
1414 I = BBStates.
find(Succ);
1416 MyStates.MergeSucc(
I->second);
1421 << BBStates[BB] <<
"\n"
1422 <<
"Performing Dataflow:\n");
1434 NestingDetected |= VisitInstructionBottomUp(Inst, BB, Retains, MyStates);
1438 if (MyStates.bottom_up_ptr_list_size() >
MaxPtrStates) {
1439 DisableRetainReleasePairing =
true;
1447 for (BBState::edge_iterator PI(MyStates.pred_begin()),
1448 PE(MyStates.pred_end()); PI != PE; ++PI) {
1451 NestingDetected |= VisitInstructionBottomUp(
II, BB, Retains, MyStates);
1454 LLVM_DEBUG(
dbgs() <<
"\nFinal State:\n" << BBStates[BB] <<
"\n");
1456 return NestingDetected;
1465 &ReleaseInsertPtToRCIdentityRoots) {
1466 for (
const auto &
P : Retains) {
1473 for (
const Instruction *InsertPt :
P.second.ReverseInsertPts)
1474 ReleaseInsertPtToRCIdentityRoots[InsertPt].insert(Root);
1480static const SmallPtrSet<const Value *, 2> *
1484 &ReleaseInsertPtToRCIdentityRoots) {
1485 auto I = ReleaseInsertPtToRCIdentityRoots.find(InsertPt);
1486 if (
I == ReleaseInsertPtToRCIdentityRoots.end())
1491bool ObjCARCOpt::VisitInstructionTopDown(
1492 Instruction *Inst, DenseMap<Value *, RRInfo> &Releases, BBState &MyStates,
1493 const DenseMap<
const Instruction *, SmallPtrSet<const Value *, 2>>
1494 &ReleaseInsertPtToRCIdentityRoots) {
1495 bool NestingDetected =
false;
1497 const Value *Arg =
nullptr;
1501 if (
const SmallPtrSet<const Value *, 2> *Roots =
1503 Inst, ReleaseInsertPtToRCIdentityRoots))
1504 for (
const auto *Root : *Roots) {
1505 TopDownPtrState &S = MyStates.getPtrTopDownState(Root);
1518 case ARCInstKind::RetainBlock:
1524 case ARCInstKind::Retain:
1525 case ARCInstKind::RetainRV: {
1527 TopDownPtrState &S = MyStates.getPtrTopDownState(Arg);
1533 case ARCInstKind::Release: {
1535 TopDownPtrState &S = MyStates.getPtrTopDownState(Arg);
1547 case ARCInstKind::AutoreleasepoolPop:
1549 MyStates.clearTopDownPointers();
1551 case ARCInstKind::AutoreleasepoolPush:
1552 case ARCInstKind::None:
1561 for (
auto MI = MyStates.top_down_ptr_begin(),
1562 ME = MyStates.top_down_ptr_end();
1567 TopDownPtrState &S =
MI->second;
1574 return NestingDetected;
1577bool ObjCARCOpt::VisitTopDown(
1578 BasicBlock *BB, DenseMap<const BasicBlock *, BBState> &BBStates,
1579 DenseMap<Value *, RRInfo> &Releases,
1580 const DenseMap<
const Instruction *, SmallPtrSet<const Value *, 2>>
1581 &ReleaseInsertPtToRCIdentityRoots) {
1583 bool NestingDetected =
false;
1584 BBState &MyStates = BBStates[BB];
1588 BBState::edge_iterator PI(MyStates.pred_begin()),
1589 PE(MyStates.pred_end());
1592 DenseMap<const BasicBlock *, BBState>::iterator
I = BBStates.
find(Pred);
1594 MyStates.InitFromPred(
I->second);
1596 for (; PI != PE; ++PI) {
1598 I = BBStates.
find(Pred);
1600 MyStates.MergePred(
I->second);
1608 for (
auto I = MyStates.top_down_ptr_begin(),
1609 E = MyStates.top_down_ptr_end();
1611 I->second.SetCFGHazardAfflicted(
true);
1614 << BBStates[BB] <<
"\n"
1615 <<
"Performing Dataflow:\n");
1618 for (Instruction &Inst : *BB) {
1621 NestingDetected |= VisitInstructionTopDown(
1622 &Inst, Releases, MyStates, ReleaseInsertPtToRCIdentityRoots);
1626 if (MyStates.top_down_ptr_list_size() >
MaxPtrStates) {
1627 DisableRetainReleasePairing =
true;
1632 LLVM_DEBUG(
dbgs() <<
"\nState Before Checking for CFG Hazards:\n"
1633 << BBStates[BB] <<
"\n\n");
1634 CheckForCFGHazards(BB, BBStates, MyStates);
1636 return NestingDetected;
1643 unsigned NoObjCARCExceptionsMDKind,
1655 BBState &MyStates = BBStates[EntryBB];
1656 MyStates.SetAsEntry();
1666 while (SuccStack.
back().second != SE) {
1668 if (Visited.
insert(SuccBB).second) {
1671 BBStates[CurrBB].addSucc(SuccBB);
1672 BBState &SuccStates = BBStates[SuccBB];
1673 SuccStates.addPred(CurrBB);
1678 if (!OnStack.
count(SuccBB)) {
1679 BBStates[CurrBB].addSucc(SuccBB);
1680 BBStates[SuccBB].addPred(CurrBB);
1683 OnStack.
erase(CurrBB);
1686 }
while (!SuccStack.
empty());
1695 BBState &MyStates = BBStates[&ExitBB];
1696 if (!MyStates.isExit())
1699 MyStates.SetAsExit();
1701 PredStack.
push_back(std::make_pair(&ExitBB, MyStates.pred_begin()));
1703 while (!PredStack.
empty()) {
1704 reverse_dfs_next_succ:
1705 BBState::edge_iterator PE = BBStates[PredStack.
back().first].pred_end();
1706 while (PredStack.
back().second != PE) {
1708 if (Visited.
insert(BB).second) {
1710 goto reverse_dfs_next_succ;
1719bool ObjCARCOpt::Visit(Function &
F,
1720 DenseMap<const BasicBlock *, BBState> &BBStates,
1721 BlotMapVector<Value *, RRInfo> &Retains,
1722 DenseMap<Value *, RRInfo> &Releases) {
1728 SmallVector<BasicBlock *, 16> PostOrder;
1729 SmallVector<BasicBlock *, 16> ReverseCFGPostOrder;
1731 MDKindCache.
get(ARCMDKindID::NoObjCARCExceptions),
1735 bool BottomUpNestingDetected =
false;
1737 BottomUpNestingDetected |= VisitBottomUp(BB, BBStates, Retains);
1738 if (DisableRetainReleasePairing)
1742 DenseMap<const Instruction *, SmallPtrSet<const Value *, 2>>
1743 ReleaseInsertPtToRCIdentityRoots;
1747 bool TopDownNestingDetected =
false;
1749 TopDownNestingDetected |=
1750 VisitTopDown(BB, BBStates, Releases, ReleaseInsertPtToRCIdentityRoots);
1751 if (DisableRetainReleasePairing)
1755 return TopDownNestingDetected && BottomUpNestingDetected;
1759void ObjCARCOpt::MoveCalls(
Value *Arg, RRInfo &RetainsToMove,
1760 RRInfo &ReleasesToMove,
1761 BlotMapVector<Value *, RRInfo> &Retains,
1762 DenseMap<Value *, RRInfo> &Releases,
1763 SmallVectorImpl<Instruction *> &DeadInsts,
1769 Function *Decl = EP.
get(ARCRuntimeEntryPointKind::Retain);
1771 addOpBundleForFunclet(InsertPt->
getParent(), BundleList);
1779 "At insertion point: "
1780 << *InsertPt <<
"\n");
1783 Function *Decl = EP.
get(ARCRuntimeEntryPointKind::Release);
1785 addOpBundleForFunclet(InsertPt->
getParent(), BundleList);
1797 "At insertion point: "
1798 << *InsertPt <<
"\n");
1802 for (Instruction *OrigRetain : RetainsToMove.
Calls) {
1803 Retains.
blot(OrigRetain);
1805 LLVM_DEBUG(
dbgs() <<
"Deleting retain: " << *OrigRetain <<
"\n");
1807 for (Instruction *OrigRelease : ReleasesToMove.
Calls) {
1808 Releases.
erase(OrigRelease);
1810 LLVM_DEBUG(
dbgs() <<
"Deleting release: " << *OrigRelease <<
"\n");
1814bool ObjCARCOpt::PairUpRetainsAndReleases(
1815 DenseMap<const BasicBlock *, BBState> &BBStates,
1816 BlotMapVector<Value *, RRInfo> &Retains,
1817 DenseMap<Value *, RRInfo> &Releases,
Module *M,
1819 SmallVectorImpl<Instruction *> &DeadInsts, RRInfo &RetainsToMove,
1820 RRInfo &ReleasesToMove,
Value *Arg,
bool KnownSafe,
1821 bool &AnyPairsCompletelyEliminated) {
1825 bool KnownSafeTD =
true, KnownSafeBU =
true;
1826 bool CFGHazardAfflicted =
false;
1832 unsigned OldDelta = 0;
1833 unsigned NewDelta = 0;
1834 unsigned OldCount = 0;
1835 unsigned NewCount = 0;
1836 bool FirstRelease =
true;
1837 for (SmallVector<Instruction *, 4> NewRetains{
Retain};;) {
1838 SmallVector<Instruction *, 4> NewReleases;
1839 for (Instruction *NewRetain : NewRetains) {
1840 auto It = Retains.
find(NewRetain);
1842 const RRInfo &NewRetainRRI = It->second;
1845 for (Instruction *NewRetainRelease : NewRetainRRI.
Calls) {
1846 auto Jt = Releases.
find(NewRetainRelease);
1847 if (Jt == Releases.
end())
1849 const RRInfo &NewRetainReleaseRRI = Jt->second;
1856 if (!NewRetainReleaseRRI.
Calls.count(NewRetain))
1859 if (ReleasesToMove.
Calls.insert(NewRetainRelease).second) {
1862 const BBState &NRRBBState = BBStates[NewRetainRelease->
getParent()];
1864 if (NRRBBState.GetAllPathCountWithOverflow(PathCount))
1867 "PathCount at this point can not be "
1868 "OverflowOccurredValue.");
1869 OldDelta -= PathCount;
1877 FirstRelease =
false;
1893 const BBState &RIPBBState = BBStates[RIP->
getParent()];
1895 if (RIPBBState.GetAllPathCountWithOverflow(PathCount))
1898 "PathCount at this point can not be "
1899 "OverflowOccurredValue.");
1900 NewDelta -= PathCount;
1903 NewReleases.
push_back(NewRetainRelease);
1908 if (NewReleases.
empty())
break;
1911 for (Instruction *NewRelease : NewReleases) {
1912 auto It = Releases.
find(NewRelease);
1914 const RRInfo &NewReleaseRRI = It->second;
1917 for (Instruction *NewReleaseRetain : NewReleaseRRI.
Calls) {
1918 auto Jt = Retains.
find(NewReleaseRetain);
1919 if (Jt == Retains.
end())
1921 const RRInfo &NewReleaseRetainRRI = Jt->second;
1928 if (!NewReleaseRetainRRI.
Calls.count(NewRelease))
1931 if (RetainsToMove.
Calls.insert(NewReleaseRetain).second) {
1934 const BBState &NRRBBState = BBStates[NewReleaseRetain->
getParent()];
1936 if (NRRBBState.GetAllPathCountWithOverflow(PathCount))
1939 "PathCount at this point can not be "
1940 "OverflowOccurredValue.");
1941 OldDelta += PathCount;
1942 OldCount += PathCount;
1950 const BBState &RIPBBState = BBStates[RIP->
getParent()];
1953 if (RIPBBState.GetAllPathCountWithOverflow(PathCount))
1956 "PathCount at this point can not be "
1957 "OverflowOccurredValue.");
1958 NewDelta += PathCount;
1959 NewCount += PathCount;
1962 NewRetains.push_back(NewReleaseRetain);
1966 if (NewRetains.empty())
break;
1970 bool UnconditionallySafe = KnownSafeTD && KnownSafeBU;
1971 if (UnconditionallySafe) {
1986 const bool WillPerformCodeMotion =
1989 if (CFGHazardAfflicted && WillPerformCodeMotion)
2002 assert(OldCount != 0 &&
"Unreachable code?");
2003 NumRRs += OldCount - NewCount;
2005 AnyPairsCompletelyEliminated = NewCount == 0;
2013bool ObjCARCOpt::PerformCodePlacement(
2014 DenseMap<const BasicBlock *, BBState> &BBStates,
2015 BlotMapVector<Value *, RRInfo> &Retains,
2016 DenseMap<Value *, RRInfo> &Releases,
Module *M) {
2017 LLVM_DEBUG(
dbgs() <<
"\n== ObjCARCOpt::PerformCodePlacement ==\n");
2019 bool AnyPairsCompletelyEliminated =
false;
2020 SmallVector<Instruction *, 8> DeadInsts;
2043 if (
const GlobalVariable *GV =
2046 if (GV->isConstant())
2051 RRInfo RetainsToMove, ReleasesToMove;
2053 bool PerformMoveCalls = PairUpRetainsAndReleases(
2054 BBStates, Retains, Releases, M,
Retain, DeadInsts,
2055 RetainsToMove, ReleasesToMove, Arg, KnownSafe,
2056 AnyPairsCompletelyEliminated);
2058 if (PerformMoveCalls) {
2061 MoveCalls(Arg, RetainsToMove, ReleasesToMove,
2062 Retains, Releases, DeadInsts, M);
2068 while (!DeadInsts.
empty())
2071 return AnyPairsCompletelyEliminated;
2075void ObjCARCOpt::OptimizeWeakCalls(Function &
F) {
2087 if (Class != ARCInstKind::LoadWeak &&
2088 Class != ARCInstKind::LoadWeakRetained)
2092 if (Class == ARCInstKind::LoadWeak && Inst->
use_empty()) {
2109 switch (EarlierClass) {
2110 case ARCInstKind::LoadWeak:
2111 case ARCInstKind::LoadWeakRetained: {
2118 switch (PA.
getAA()->
alias(Arg, EarlierArg)) {
2122 if (Class == ARCInstKind::LoadWeakRetained) {
2123 Function *Decl = EP.
get(ARCRuntimeEntryPointKind::Retain);
2140 case ARCInstKind::StoreWeak:
2141 case ARCInstKind::InitWeak: {
2148 switch (PA.
getAA()->
alias(Arg, EarlierArg)) {
2152 if (Class == ARCInstKind::LoadWeakRetained) {
2153 Function *Decl = EP.
get(ARCRuntimeEntryPointKind::Retain);
2170 case ARCInstKind::MoveWeak:
2171 case ARCInstKind::CopyWeak:
2174 case ARCInstKind::AutoreleasepoolPush:
2175 case ARCInstKind::None:
2176 case ARCInstKind::IntrinsicUser:
2177 case ARCInstKind::User:
2193 if (Class != ARCInstKind::DestroyWeak)
2199 for (
User *U : Alloca->users()) {
2202 case ARCInstKind::InitWeak:
2203 case ARCInstKind::StoreWeak:
2204 case ARCInstKind::DestroyWeak:
2214 case ARCInstKind::InitWeak:
2215 case ARCInstKind::StoreWeak:
2219 case ARCInstKind::DestroyWeak:
2227 Alloca->eraseFromParent();
2235bool ObjCARCOpt::OptimizeSequences(Function &
F) {
2240 DenseMap<Value *, RRInfo> Releases;
2241 BlotMapVector<Value *, RRInfo> Retains;
2245 DenseMap<const BasicBlock *, BBState> BBStates;
2248 bool NestingDetected = Visit(
F, BBStates, Retains, Releases);
2250 if (DisableRetainReleasePairing)
2254 bool AnyPairsCompletelyEliminated = PerformCodePlacement(BBStates, Retains,
2258 return AnyPairsCompletelyEliminated && NestingDetected;
2303static CallInst *FindPredecessorAutoreleaseWithSafePath(
2304 const Value *Arg, BasicBlock *BB, ReturnInst *Ret, ProvenanceAnalysis &PA) {
2327void ObjCARCOpt::OptimizeReturns(Function &
F) {
2328 if (!
F.getReturnType()->isPointerTy())
2333 for (BasicBlock &BB:
F) {
2346 FindPredecessorAutoreleaseWithSafePath(Arg, &BB, Ret, PA);
2363 (!
Call->isTailCall() &&
2380ObjCARCOpt::GatherStatistics(Function &
F,
bool AfterOptimization) {
2382 AfterOptimization ? NumRetainsAfterOpt : NumRetainsBeforeOpt;
2384 AfterOptimization ? NumReleasesAfterOpt : NumReleasesBeforeOpt;
2391 case ARCInstKind::Retain:
2394 case ARCInstKind::Release:
2402void ObjCARCOpt::init(Function &
F) {
2409 MDKindCache.
init(
F.getParent());
2412 EP.
init(
F.getParent());
2415 if (
F.hasPersonalityFn() &&
2420bool ObjCARCOpt::run(Function &
F, AAResults &AA) {
2425 BundledRetainClaimRVs BRV(EP,
false,
false);
2426 BundledInsts = &BRV;
2428 LLVM_DEBUG(
dbgs() <<
"<<< ObjCARCOpt: Visiting Function: " <<
F.getName()
2434 CFGChanged |=
R.second;
2440 GatherStatistics(
F,
false);
2449 OptimizeIndividualCalls(
F);
2452 if (UsedInThisFunction & ((1 <<
unsigned(ARCInstKind::LoadWeak)) |
2453 (1 <<
unsigned(ARCInstKind::LoadWeakRetained)) |
2454 (1 <<
unsigned(ARCInstKind::StoreWeak)) |
2455 (1 <<
unsigned(ARCInstKind::InitWeak)) |
2456 (1 <<
unsigned(ARCInstKind::CopyWeak)) |
2457 (1 <<
unsigned(ARCInstKind::MoveWeak)) |
2458 (1 <<
unsigned(ARCInstKind::DestroyWeak))))
2459 OptimizeWeakCalls(
F);
2462 if (UsedInThisFunction & ((1 <<
unsigned(ARCInstKind::Retain)) |
2463 (1 <<
unsigned(ARCInstKind::RetainRV)) |
2464 (1 <<
unsigned(ARCInstKind::RetainBlock))))
2465 if (UsedInThisFunction & (1 <<
unsigned(ARCInstKind::Release)))
2468 while (OptimizeSequences(
F)) {}
2471 if (UsedInThisFunction & ((1 <<
unsigned(ARCInstKind::Autorelease)) |
2472 (1 <<
unsigned(ARCInstKind::AutoreleaseRV))))
2476 if (UsedInThisFunction & ((1 <<
unsigned(ARCInstKind::AutoreleasepoolPush)) |
2477 (1 <<
unsigned(ARCInstKind::AutoreleasepoolPop))))
2478 OptimizeAutoreleasePools(
F);
2483 GatherStatistics(
F,
true);
2504 if (!Callee->hasExactDefinition())
2559void ObjCARCOpt::OptimizeAutoreleasePools(Function &
F) {
2560 LLVM_DEBUG(
dbgs() <<
"\n== ObjCARCOpt::OptimizeAutoreleasePools ==\n");
2562 OptimizationRemarkEmitter ORE(&
F);
2567 for (BasicBlock &BB :
F) {
2576 case ARCInstKind::AutoreleasepoolPush: {
2581 LLVM_DEBUG(
dbgs() <<
"Found autorelease pool push: " << *Push <<
"\n");
2585 case ARCInstKind::AutoreleasepoolPop: {
2588 if (PoolStack.
empty())
2591 auto &TopPool = PoolStack.
back();
2592 CallInst *PendingPush = TopPool.first;
2593 bool HasAutoreleaseInScope = TopPool.second;
2599 if (Pop->getArgOperand(0)->stripPointerCasts() != PendingPush)
2603 if (HasAutoreleaseInScope)
2608 return OptimizationRemark(
DEBUG_TYPE,
"AutoreleasePoolElimination",
2610 <<
"eliminated empty autorelease pool pair";
2617 Pop->eraseFromParent();
2624 case ARCInstKind::CallOrUser:
2625 case ARCInstKind::Call:
2629 case ARCInstKind::Autorelease:
2630 case ARCInstKind::AutoreleaseRV:
2631 case ARCInstKind::FusedRetainAutorelease:
2632 case ARCInstKind::FusedRetainAutoreleaseRV:
2633 case ARCInstKind::LoadWeak: {
2635 if (!PoolStack.
empty()) {
2636 PoolStack.
back().second =
true;
2639 <<
"Found autorelease or potential autorelease in pool scope: "
2646 case ARCInstKind::Retain:
2647 case ARCInstKind::RetainRV:
2648 case ARCInstKind::UnsafeClaimRV:
2649 case ARCInstKind::RetainBlock:
2650 case ARCInstKind::Release:
2651 case ARCInstKind::NoopCast:
2652 case ARCInstKind::LoadWeakRetained:
2653 case ARCInstKind::StoreWeak:
2654 case ARCInstKind::InitWeak:
2655 case ARCInstKind::MoveWeak:
2656 case ARCInstKind::CopyWeak:
2657 case ARCInstKind::DestroyWeak:
2658 case ARCInstKind::StoreStrong:
2659 case ARCInstKind::IntrinsicUser:
2660 case ARCInstKind::User:
2661 case ARCInstKind::None:
2678 bool CFGChanged = OCAO.hasCFGChanged();
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
This file contains a class ARCRuntimeEntryPoints for use in creating/managing references to entry poi...
Expand Atomic instructions
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file defines the DenseMap class.
This file declares special dependency analysis routines used in Objective C ARC Optimizations.
This file provides various utilities for inspecting and working with the control flow graph in LLVM I...
iv Induction Variable Users
Machine Check Debug Module
uint64_t IntrinsicInst * II
This file defines common analysis utilities used by the ObjC ARC Optimizer.
static cl::opt< unsigned > MaxPtrStates("arc-opt-max-ptr-states", cl::Hidden, cl::desc("Maximum number of ptr states the optimizer keeps track of"), cl::init(4095))
This file defines ARC utility functions which are used by various parts of the compiler.
This file declares a special form of Alias Analysis called Provenance / Analysis''.
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
void setAA(AAResults *aa)
AAResults * getAA() const
A manager for alias analyses.
LLVM_ABI AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB)
The main low level interface to the alias analysis implementation.
@ MayAlias
The two locations may or may not alias.
@ NoAlias
The two locations do not alias at all.
@ PartialAlias
The two locations alias, but only due to a partial overlap.
@ MustAlias
The two locations precisely alias each other.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
LLVM Basic Block Representation.
iterator begin()
Instruction iterator methods.
const Instruction & back() const
InstListType::const_iterator const_iterator
LLVM_ABI bool hasNPredecessors(unsigned N) const
Return true if this block has exactly N predecessors.
InstListType::iterator iterator
Instruction iterators...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
This class represents a no-op cast from one type to another.
An associative container with fast insertion-order (deterministic) iteration over its elements.
void blot(const KeyT &Key)
This is similar to erase, but instead of removing the element from the vector, it just zeros out the ...
iterator find(const KeyT &Key)
typename VectorTy::const_iterator const_iterator
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &InsertPair)
Represents analyses that only rely on functions' control flow.
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
OperandBundleUse getOperandBundleAt(unsigned Index) const
Return the operand bundle at a specific index.
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
unsigned getNumOperandBundles() const
Return the number of operand bundles associated with this User.
bool onlyReadsMemory(unsigned OpNo) const
Value * getArgOperand(unsigned i) const
void setArgOperand(unsigned i, Value *v)
void setCalledFunction(Function *Fn)
Sets the function called, including updating the function type.
This class represents a function call, abstracting a target machine's calling convention.
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
void setTailCall(bool IsTc=true)
static LLVM_ABI ConstantInt * getTrue(LLVMContext &Context)
iterator find(const_arg_type_t< KeyT > Val)
bool erase(const KeyT &Val)
an instruction for type-safe pointer arithmetic to access elements of arrays and structs
BIty & getInstructionIterator()
BBIty & getBasicBlockIterator()
LLVM_ABI unsigned getNumSuccessors() const LLVM_READONLY
Return the number of successors that this instruction has.
LLVM_ABI void insertBefore(InstListType::iterator InsertPos)
Insert an unlinked instruction into a basic block immediately before the specified position.
LLVM_ABI InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
MDNode * getMetadata(unsigned KindID) const
Get the metadata of given kind attached to this Instruction.
LLVM_ABI void setMetadata(unsigned KindID, MDNode *Node)
Set the metadata of the specified kind to the specified node.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
A Module instance is used to store all the information related to an LLVM module.
op_range incoming_values()
BasicBlock * getIncomingBlock(unsigned i) const
Return incoming basic block number i.
Value * getIncomingValue(unsigned i) const
Return incoming value number x.
unsigned getNumIncomingValues() const
Return the number of incoming edges.
static LLVM_ABI PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
PreservedAnalyses & preserveSet()
Mark an analysis set as preserved.
bool erase(PtrType Ptr)
Remove pointer from the set.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
reference emplace_back(ArgTypes &&... Args)
typename SuperClass::const_iterator const_iterator
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
bool isVoidTy() const
Return true if this is 'void'.
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
bool hasOneUse() const
Return true if there is exactly one use of this value.
LLVM_ABI void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
LLVM_ABI LLVMContext & getContext() const
All values hold a context through their type.
const ParentTy * getParent() const
self_iterator getIterator()
A cache of MDKinds used by various ARC optimizations.
unsigned get(ARCMDKindID ID)
Declarations for ObjC runtime functions and constants.
Function * get(ARCRuntimeEntryPointKind kind)
bool contains(const Instruction *I) const
See if an instruction is a bundled retainRV/claimRV call.
std::pair< bool, bool > insertAfterInvokes(Function &F, DominatorTree *DT)
Insert a retainRV/claimRV call to the normal destination blocks of invokes with operand bundle "clang...
CallInst * insertRVCall(BasicBlock::iterator InsertPt, CallBase *AnnotatedCall)
Insert a retainRV/claimRV call.
void eraseInst(CallInst *CI)
Remove a retainRV/claimRV call entirely.
This class summarizes several per-pointer runtime properties which are propagated through the flow gr...
void SetCFGHazardAfflicted(const bool NewValue)
const RRInfo & GetRRInfo() const
void ClearSequenceProgress()
This class implements an extremely fast bulk output stream that can only output to a stream.
static void CheckForUseCFGHazard(const Sequence SuccSSeq, const bool SuccSRRIKnownSafe, TopDownPtrState &S, bool &SomeSuccHasSame, bool &AllSuccsHaveSame, bool &NotAllSeqEqualButKnownSafe, bool &ShouldContinue)
If we have a top down pointer in the S_Use state, make sure that there are no CFG hazards by checking...
static void CheckForCanReleaseCFGHazard(const Sequence SuccSSeq, const bool SuccSRRIKnownSafe, TopDownPtrState &S, bool &SomeSuccHasSame, bool &AllSuccsHaveSame, bool &NotAllSeqEqualButKnownSafe)
If we have a Top Down pointer in the S_CanRelease state, make sure that there are no CFG hazards by c...
static bool isInertARCValue(Value *V, SmallPtrSet< Value *, 1 > &VisitedPhis)
This function returns true if the value is inert.
static void collectReleaseInsertPts(const BlotMapVector< Value *, RRInfo > &Retains, DenseMap< const Instruction *, SmallPtrSet< const Value *, 2 > > &ReleaseInsertPtToRCIdentityRoots)
CallInst * Autorelease
Look for an `‘autorelease’' instruction dependent on Arg such that there are / no instructions depend...
static void ComputePostOrders(Function &F, SmallVectorImpl< BasicBlock * > &PostOrder, SmallVectorImpl< BasicBlock * > &ReverseCFGPostOrder, unsigned NoObjCARCExceptionsMDKind, DenseMap< const BasicBlock *, BBState > &BBStates)
static CallInst * FindPredecessorRetainWithSafePath(const Value *Arg, BasicBlock *BB, Instruction *Autorelease, ProvenanceAnalysis &PA)
Find a dependent retain that precedes the given autorelease for which there is nothing in between the...
static const SmallPtrSet< const Value *, 2 > * getRCIdentityRootsFromReleaseInsertPt(const Instruction *InsertPt, const DenseMap< const Instruction *, SmallPtrSet< const Value *, 2 > > &ReleaseInsertPtToRCIdentityRoots)
bool MayAutorelease(const CallBase &CB, unsigned Depth=0)
Interprocedurally determine if calls made by the given call site can possibly produce autoreleases.
static const unsigned OverflowOccurredValue
static CallInst * HasSafePathToPredecessorCall(const Value *Arg, Instruction *Retain, ProvenanceAnalysis &PA)
Check if there is a dependent call earlier that does not have anything in between the Retain and the ...
static const Value * FindSingleUseIdentifiedObject(const Value *Arg)
This is similar to GetRCIdentityRoot but it stops as soon as it finds a value with multiple uses.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
@ C
The default llvm calling convention, compatible with C.
@ BasicBlock
Various leaf nodes.
initializer< Ty > init(const Ty &Val)
PointerTypeMap run(const Module &M)
Compute the PointerTypeMap for the module M.
bool IsRetain(ARCInstKind Class)
Test if the given class is objc_retain or equivalent.
@ AutoreleasePoolBoundary
@ NeedsPositiveRetainCount
bool IsNeverTail(ARCInstKind Class)
Test if the given class represents instructions which are never safe to mark with the "tail" keyword.
bool IsAlwaysTail(ARCInstKind Class)
Test if the given class represents instructions which are always safe to mark with the "tail" keyword...
bool IsNullOrUndef(const Value *V)
bool IsAutorelease(ARCInstKind Class)
Test if the given class is objc_autorelease or equivalent.
ARCInstKind
Equivalence classes of instructions in the ARC Model.
@ DestroyWeak
objc_destroyWeak (derived)
@ FusedRetainAutorelease
objc_retainAutorelease
@ CallOrUser
could call objc_release and/or "use" pointers
@ StoreStrong
objc_storeStrong (derived)
@ LoadWeakRetained
objc_loadWeakRetained (primitive)
@ StoreWeak
objc_storeWeak (primitive)
@ AutoreleasepoolPop
objc_autoreleasePoolPop
@ AutoreleasepoolPush
objc_autoreleasePoolPush
@ InitWeak
objc_initWeak (derived)
@ Autorelease
objc_autorelease
@ LoadWeak
objc_loadWeak (derived)
@ None
anything that is inert from an ARC perspective.
@ MoveWeak
objc_moveWeak (derived)
@ User
could "use" a pointer
@ RetainRV
objc_retainAutoreleasedReturnValue
@ RetainBlock
objc_retainBlock
@ FusedRetainAutoreleaseRV
objc_retainAutoreleaseReturnValue
@ AutoreleaseRV
objc_autoreleaseReturnValue
@ Call
could call objc_release
@ CopyWeak
objc_copyWeak (derived)
@ NoopCast
objc_retainedObject, etc.
@ UnsafeClaimRV
objc_unsafeClaimAutoreleasedReturnValue
@ IntrinsicUser
llvm.objc.clang.arc.use
bool IsObjCIdentifiedObject(const Value *V)
Return true if this value refers to a distinct and identifiable object.
bool EnableARCOpts
A handy option to enable/disable all ARC Optimizations.
void getEquivalentPHIs(PHINodeTy &PN, VectorTy &PHIList)
Return the list of PHI nodes that are equivalent to PN.
bool IsForwarding(ARCInstKind Class)
Test if the given class represents instructions which return their argument verbatim.
bool IsNoopInstruction(const Instruction *I)
llvm::Instruction * findSingleDependency(DependenceKind Flavor, const Value *Arg, BasicBlock *StartBB, Instruction *StartInst, ProvenanceAnalysis &PA)
Find dependent instructions.
Sequence
A sequence of states that a pointer may go through in which an objc_retain and objc_release are actua...
@ S_CanRelease
foo(x) – x could possibly see a ref count decrement.
@ S_Retain
objc_retain(x).
@ S_Stop
code motion is stopped.
@ S_MovableRelease
objc_release(x), !clang.imprecise_release.
ARCInstKind GetBasicARCInstKind(const Value *V)
Determine which objc runtime call instruction class V belongs to.
ARCInstKind GetARCInstKind(const Value *V)
Map V to its ARCInstKind equivalence class.
Value * GetArgRCIdentityRoot(Value *Inst)
Assuming the given instruction is one of the special calls such as objc_retain or objc_release,...
bool IsNoThrow(ARCInstKind Class)
Test if the given class represents instructions which are always safe to mark with the nounwind attri...
const Value * GetRCIdentityRoot(const Value *V)
The RCIdentity root of a value V is a dominating value U for which retaining or releasing U is equiva...
bool IsNoopOnGlobal(ARCInstKind Class)
Test if the given class represents instructions which do nothing if passed a global variable.
bool IsNoopOnNull(ARCInstKind Class)
Test if the given class represents instructions which do nothing if passed a null pointer.
bool hasAttachedCallOpBundle(const CallBase *CB)
static void EraseInstruction(Instruction *CI)
Erase the given instruction.
friend class Instruction
Iterator for Instructions in a `BasicBlock.
This is an optimization pass for GlobalISel generic memory operations.
FunctionAddr VTableAddr Value
InstIterator< SymbolTableList< BasicBlock >, Function::iterator, BasicBlock::iterator, Instruction > inst_iterator
auto pred_end(const MachineBasicBlock *BB)
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
auto successors(const MachineBasicBlock *BB)
LLVM_ABI DenseMap< BasicBlock *, ColorVector > colorEHFunclets(Function &F)
If an EH funclet personality is in use (see isFuncletEHPersonality), this will recompute which blocks...
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
inst_iterator inst_begin(Function *F)
bool isScopedEHPersonality(EHPersonality Pers)
Returns true if this personality uses scope-style EH IR instructions: catchswitch,...
auto dyn_cast_or_null(const Y &Val)
auto reverse(ContainerTy &&C)
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
LLVM_ABI bool AreStatisticsEnabled()
Check if statistics are enabled.
LLVM_ABI EHPersonality classifyEHPersonality(const Value *Pers)
See if the given exception handling personality function is one that we understand.
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
inst_iterator inst_end(Function *F)
RNSuccIterator< NodeRef, BlockT, RegionT > succ_begin(NodeRef Node)
RNSuccIterator< NodeRef, BlockT, RegionT > succ_end(NodeRef Node)
DWARFExpression::Operation Op
raw_ostream & operator<<(raw_ostream &OS, const APFixedPoint &FX)
TinyPtrVector< BasicBlock * > ColorVector
auto pred_begin(const MachineBasicBlock *BB)
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
SuccIterator< Instruction, BasicBlock > succ_iterator
bool is_contained(R &&Range, const E &Element)
Returns true if Element is found in Range.
AnalysisManager< Function > FunctionAnalysisManager
Convenience typedef for the Function analysis manager.
LLVM_ABI PreservedAnalyses run(Function &F, FunctionAnalysisManager &AM)
A lightweight accessor for an operand bundle meant to be passed around by value.
bool HandlePotentialAlterRefCount(Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class)
bool InitBottomUp(ARCMDKindCache &Cache, Instruction *I)
(Re-)Initialize this bottom up pointer returning true if we detected a pointer with nested releases.
bool MatchWithRetain()
Return true if this set of releases can be paired with a release.
void HandlePotentialUse(BasicBlock *BB, Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class)
Unidirectional information about either a retain-decrement-use-release sequence or release-use-decrem...
bool KnownSafe
After an objc_retain, the reference count of the referenced object is known to be positive.
SmallPtrSet< Instruction *, 2 > Calls
For a top-down sequence, the set of objc_retains or objc_retainBlocks.
MDNode * ReleaseMetadata
If the Calls are objc_release calls and they all have a clang.imprecise_release tag,...
bool CFGHazardAfflicted
If this is true, we cannot perform code motion but can still remove retain/release pairs.
bool IsTailCallRelease
True of the objc_release calls are all marked with the "tail" keyword.
SmallPtrSet< Instruction *, 2 > ReverseInsertPts
The set of optimal insert positions for moving calls in the opposite sequence.
bool MatchWithRelease(ARCMDKindCache &Cache, Instruction *Release)
Return true if this set of retains can be paired with the given release.
bool InitTopDown(ARCInstKind Kind, Instruction *I)
(Re-)Initialize this bottom up pointer returning true if we detected a pointer with nested releases.
bool HandlePotentialAlterRefCount(Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class, const BundledRetainClaimRVs &BundledRVs)
void HandlePotentialUse(Instruction *Inst, const Value *Ptr, ProvenanceAnalysis &PA, ARCInstKind Class)