53#include "llvm/IR/IntrinsicsAMDGPU.h"
54#include "llvm/IR/IntrinsicsNVPTX.h"
73#define DEBUG_TYPE "attributor"
77 cl::desc(
"Manifest Attributor internal string attributes."),
90 cl::desc(
"Maximum number of potential values to be "
91 "tracked for each position."),
96 "attributor-max-potential-values-iterations",
cl::Hidden,
98 "Maximum number of iterations we keep dismantling potential values."),
101STATISTIC(NumAAs,
"Number of abstract attributes created");
116#define BUILD_STAT_MSG_IR_ATTR(TYPE, NAME) \
117 ("Number of " #TYPE " marked '" #NAME "'")
118#define BUILD_STAT_NAME(NAME, TYPE) NumIR##TYPE##_##NAME
119#define STATS_DECL_(NAME, MSG) STATISTIC(NAME, MSG);
120#define STATS_DECL(NAME, TYPE, MSG) \
121 STATS_DECL_(BUILD_STAT_NAME(NAME, TYPE), MSG);
122#define STATS_TRACK(NAME, TYPE) ++(BUILD_STAT_NAME(NAME, TYPE));
123#define STATS_DECLTRACK(NAME, TYPE, MSG) \
125 STATS_DECL(NAME, TYPE, MSG) \
126 STATS_TRACK(NAME, TYPE) \
128#define STATS_DECLTRACK_ARG_ATTR(NAME) \
129 STATS_DECLTRACK(NAME, Arguments, BUILD_STAT_MSG_IR_ATTR(arguments, NAME))
130#define STATS_DECLTRACK_CSARG_ATTR(NAME) \
131 STATS_DECLTRACK(NAME, CSArguments, \
132 BUILD_STAT_MSG_IR_ATTR(call site arguments, NAME))
133#define STATS_DECLTRACK_FN_ATTR(NAME) \
134 STATS_DECLTRACK(NAME, Function, BUILD_STAT_MSG_IR_ATTR(functions, NAME))
135#define STATS_DECLTRACK_CS_ATTR(NAME) \
136 STATS_DECLTRACK(NAME, CS, BUILD_STAT_MSG_IR_ATTR(call site, NAME))
137#define STATS_DECLTRACK_FNRET_ATTR(NAME) \
138 STATS_DECLTRACK(NAME, FunctionReturn, \
139 BUILD_STAT_MSG_IR_ATTR(function returns, NAME))
140#define STATS_DECLTRACK_CSRET_ATTR(NAME) \
141 STATS_DECLTRACK(NAME, CSReturn, \
142 BUILD_STAT_MSG_IR_ATTR(call site returns, NAME))
143#define STATS_DECLTRACK_FLOATING_ATTR(NAME) \
144 STATS_DECLTRACK(NAME, Floating, \
145 ("Number of floating values known to be '" #NAME "'"))
150#define PIPE_OPERATOR(CLASS) \
151 raw_ostream &operator<<(raw_ostream &OS, const CLASS &AA) { \
152 return OS << static_cast<const AbstractAttribute &>(AA); \
207 if (
DL.getTypeSizeInBits(Ty) !=
DL.getTypeAllocSizeInBits(Ty))
212 if (
VectorType *SeqTy = dyn_cast<VectorType>(Ty))
216 if (
ArrayType *SeqTy = dyn_cast<ArrayType>(Ty))
219 if (!isa<StructType>(Ty))
232 StartPos +=
DL.getTypeAllocSizeInBits(ElTy);
242 bool AllowVolatile) {
243 if (!AllowVolatile &&
I->isVolatile())
246 if (
auto *LI = dyn_cast<LoadInst>(
I)) {
247 return LI->getPointerOperand();
250 if (
auto *
SI = dyn_cast<StoreInst>(
I)) {
251 return SI->getPointerOperand();
254 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(
I)) {
255 return CXI->getPointerOperand();
258 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(
I)) {
259 return RMWI->getPointerOperand();
276 assert(
Offset >= 0 &&
"Negative offset not supported yet!");
278 <<
"-bytes as " << *ResTy <<
"\n");
281 Type *Ty = PtrElemTy;
286 std::string GEPName =
Ptr->getName().str();
289 GEPName +=
"." + std::to_string(
Index.getZExtValue());
296 if (IntOffset != 0) {
305 Ptr->getName() +
".cast");
314 bool GetMinOffset,
bool AllowNonInbounds,
315 bool UseAssumed =
false) {
317 auto AttributorAnalysis = [&](
Value &V,
APInt &ROffset) ->
bool {
322 UseAssumed ? DepClassTy::OPTIONAL
326 if (Range.isFullSet())
332 ROffset = Range.getSignedMin();
334 ROffset = Range.getSignedMax();
345 const Value *
Ptr, int64_t &BytesOffset,
347 APInt OffsetAPInt(
DL.getIndexTypeSizeInBits(
Ptr->getType()), 0);
350 true, AllowNonInbounds);
358template <
typename AAType,
typename StateType =
typename AAType::StateType>
360 Attributor &
A,
const AAType &QueryingAA, StateType &S,
362 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp return value states for "
363 << QueryingAA <<
" into " << S <<
"\n");
365 assert((QueryingAA.getIRPosition().getPositionKind() ==
367 QueryingAA.getIRPosition().getPositionKind() ==
369 "Can only clamp returned value states for a function returned or call "
370 "site returned position!");
374 std::optional<StateType>
T;
377 auto CheckReturnValue = [&](
Value &RV) ->
bool {
380 A.getAAFor<AAType>(QueryingAA, RVPos, DepClassTy::REQUIRED);
381 LLVM_DEBUG(
dbgs() <<
"[Attributor] RV: " << RV <<
" AA: " << AA.getAsStr()
382 <<
" @ " << RVPos <<
"\n");
383 const StateType &AAS = AA.getState();
385 T = StateType::getBestState(AAS);
387 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" RV State: " <<
T
389 return T->isValidState();
392 if (!
A.checkForAllReturnedValues(CheckReturnValue, QueryingAA))
393 S.indicatePessimisticFixpoint();
400template <
typename AAType,
typename BaseType,
401 typename StateType =
typename BaseType::StateType,
402 bool PropagateCallBaseContext =
false>
403struct AAReturnedFromReturnedValues :
public BaseType {
409 StateType S(StateType::getBestState(this->getState()));
410 clampReturnedValueStates<AAType, StateType>(
412 PropagateCallBaseContext ? this->getCallBaseContext() :
nullptr);
415 return clampStateAndIndicateChange<StateType>(this->getState(), S);
421template <
typename AAType,
typename StateType =
typename AAType::StateType>
422static void clampCallSiteArgumentStates(
Attributor &
A,
const AAType &QueryingAA,
424 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp call site argument states for "
425 << QueryingAA <<
" into " << S <<
"\n");
427 assert(QueryingAA.getIRPosition().getPositionKind() ==
429 "Can only clamp call site argument states for an argument position!");
433 std::optional<StateType>
T;
436 unsigned ArgNo = QueryingAA.getIRPosition().getCallSiteArgNo();
446 A.getAAFor<AAType>(QueryingAA, ACSArgPos, DepClassTy::REQUIRED);
447 LLVM_DEBUG(
dbgs() <<
"[Attributor] ACS: " << *ACS.getInstruction()
448 <<
" AA: " << AA.getAsStr() <<
" @" << ACSArgPos <<
"\n");
449 const StateType &AAS = AA.getState();
451 T = StateType::getBestState(AAS);
453 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" CSA State: " <<
T
455 return T->isValidState();
458 bool UsedAssumedInformation =
false;
459 if (!
A.checkForAllCallSites(CallSiteCheck, QueryingAA,
true,
460 UsedAssumedInformation))
461 S.indicatePessimisticFixpoint();
468template <
typename AAType,
typename BaseType,
469 typename StateType =
typename AAType::StateType>
470bool getArgumentStateFromCallBaseContext(
Attributor &
A,
474 "Expected an 'argument' position !");
480 assert(ArgNo >= 0 &&
"Invalid Arg No!");
482 const auto &AA =
A.getAAFor<AAType>(
484 DepClassTy::REQUIRED);
485 const StateType &CBArgumentState =
486 static_cast<const StateType &
>(AA.getState());
488 LLVM_DEBUG(
dbgs() <<
"[Attributor] Briding Call site context to argument"
489 <<
"Position:" << Pos <<
"CB Arg state:" << CBArgumentState
493 State ^= CBArgumentState;
498template <
typename AAType,
typename BaseType,
499 typename StateType =
typename AAType::StateType,
500 bool BridgeCallBaseContext =
false>
501struct AAArgumentFromCallSiteArguments :
public BaseType {
507 StateType S = StateType::getBestState(this->getState());
509 if (BridgeCallBaseContext) {
511 getArgumentStateFromCallBaseContext<AAType, BaseType, StateType>(
512 A, *
this, this->getIRPosition(), S);
514 return clampStateAndIndicateChange<StateType>(this->getState(), S);
516 clampCallSiteArgumentStates<AAType, StateType>(
A, *
this, S);
520 return clampStateAndIndicateChange<StateType>(this->getState(), S);
525template <
typename AAType,
typename BaseType,
526 typename StateType =
typename BaseType::StateType,
527 bool IntroduceCallBaseContext =
false>
528struct AACallSiteReturnedFromReturned :
public BaseType {
534 assert(this->getIRPosition().getPositionKind() ==
536 "Can only wrap function returned positions for call site returned "
538 auto &S = this->getState();
540 const Function *AssociatedFunction =
541 this->getIRPosition().getAssociatedFunction();
542 if (!AssociatedFunction)
543 return S.indicatePessimisticFixpoint();
545 CallBase &CBContext = cast<CallBase>(this->getAnchorValue());
546 if (IntroduceCallBaseContext)
547 LLVM_DEBUG(
dbgs() <<
"[Attributor] Introducing call base context:"
548 << CBContext <<
"\n");
551 *AssociatedFunction, IntroduceCallBaseContext ? &CBContext :
nullptr);
552 const AAType &AA =
A.getAAFor<AAType>(*
this, FnPos, DepClassTy::REQUIRED);
558template <
class AAType,
typename StateType =
typename AAType::StateType>
559static void followUsesInContext(AAType &AA,
Attributor &
A,
564 auto EIt = Explorer.
begin(CtxI), EEnd = Explorer.
end(CtxI);
565 for (
unsigned u = 0; u <
Uses.size(); ++u) {
567 if (
const Instruction *UserI = dyn_cast<Instruction>(U->getUser())) {
569 if (Found && AA.followUseInMBEC(
A, U, UserI, State))
570 for (
const Use &Us : UserI->
uses())
585template <
class AAType,
typename StateType =
typename AAType::StateType>
586static void followUsesInMBEC(AAType &AA,
Attributor &
A, StateType &S,
591 for (
const Use &U : AA.getIRPosition().getAssociatedValue().uses())
595 A.getInfoCache().getMustBeExecutedContextExplorer();
597 followUsesInContext<AAType>(AA,
A, Explorer, &CtxI,
Uses, S);
599 if (S.isAtFixpoint())
604 if (
const BranchInst *Br = dyn_cast<BranchInst>(
I))
605 if (Br->isConditional())
644 StateType ParentState;
648 ParentState.indicateOptimisticFixpoint();
650 for (
const BasicBlock *BB : Br->successors()) {
651 StateType ChildState;
653 size_t BeforeSize =
Uses.size();
654 followUsesInContext(AA,
A, Explorer, &BB->front(),
Uses, ChildState);
657 for (
auto It =
Uses.begin() + BeforeSize; It !=
Uses.end();)
660 ParentState &= ChildState;
673namespace PointerInfo {
734 R.indicatePessimisticFixpoint();
830 if (!Range.mayOverlap(ItRange))
832 bool IsExact = Range == ItRange && !Range.offsetOrSizeAreUnknown();
833 for (
auto Index : It.getSecond()) {
835 if (!CB(Access, IsExact))
855 for (
unsigned Index : LocalList->getSecond()) {
858 if (Range.offsetOrSizeAreUnknown())
874 RemoteI = RemoteI ? RemoteI : &
I;
878 bool AccExists =
false;
880 for (
auto Index : LocalList) {
882 if (
A.getLocalInst() == &
I) {
892 dbgs() <<
"[AAPointerInfo] Inserting access in new offset bins\n";
895 for (
auto Key : ToAdd) {
904 "New Access should have been at AccIndex");
905 LocalList.push_back(AccIndex);
914 auto Before = Current;
916 if (Current == Before)
919 auto &ExistingRanges = Before.getRanges();
920 auto &NewRanges = Current.getRanges();
928 dbgs() <<
"[AAPointerInfo] Removing access from old offset bins\n";
936 "Expected bin to actually contain the Access.");
955 using const_iterator = VecTy::const_iterator;
958 const_iterator begin()
const {
return Offsets.begin(); }
959 const_iterator end()
const {
return Offsets.end(); }
962 return Offsets ==
RHS.Offsets;
968 bool isUnassigned()
const {
return Offsets.size() == 0; }
970 bool isUnknown()
const {
983 void addToAll(int64_t Inc) {
984 for (
auto &
Offset : Offsets) {
993 void merge(
const OffsetInfo &R) {
Offsets.append(
R.Offsets); }
1008struct AAPointerInfoImpl
1009 :
public StateWrapper<AA::PointerInfo::State, AAPointerInfo> {
1014 const std::string getAsStr()
const override {
1015 return std::string(
"PointerInfo ") +
1016 (isValidState() ? (std::string(
"#") +
1017 std::to_string(OffsetBins.
size()) +
" bins")
1023 return AAPointerInfo::manifest(
A);
1026 bool forallInterferingAccesses(
1030 return State::forallInterferingAccesses(Range, CB);
1033 bool forallInterferingAccesses(
1035 function_ref<
bool(
const Access &,
bool)> UserCB,
bool &HasBeenWrittenTo,
1037 HasBeenWrittenTo =
false;
1043 const auto &NoSyncAA =
A.getAAFor<
AANoSync>(
1047 bool AllInSameNoSyncFn = NoSyncAA.isAssumedNoSync();
1048 bool InstIsExecutedByInitialThreadOnly =
1049 ExecDomainAA && ExecDomainAA->isExecutedByInitialThreadOnly(
I);
1050 bool InstIsExecutedInAlignedRegion =
1051 ExecDomainAA && ExecDomainAA->isExecutedInAlignedRegion(
A,
I);
1052 if (InstIsExecutedInAlignedRegion || InstIsExecutedByInitialThreadOnly)
1053 A.recordDependence(*ExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1056 bool IsThreadLocalObj =
1065 auto CanIgnoreThreadingForInst = [&](
const Instruction &
I) ->
bool {
1066 if (IsThreadLocalObj || AllInSameNoSyncFn)
1068 const auto *FnExecDomainAA =
1069 I.getFunction() == &
Scope
1074 if (!FnExecDomainAA)
1076 if (InstIsExecutedInAlignedRegion ||
1077 FnExecDomainAA->isExecutedInAlignedRegion(
A,
I)) {
1078 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1081 if (InstIsExecutedByInitialThreadOnly &&
1082 FnExecDomainAA->isExecutedByInitialThreadOnly(
I)) {
1083 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1092 auto CanIgnoreThreading = [&](
const Access &Acc) ->
bool {
1093 return CanIgnoreThreadingForInst(*Acc.getRemoteInst()) ||
1094 (Acc.getRemoteInst() != Acc.getLocalInst() &&
1095 CanIgnoreThreadingForInst(*Acc.getLocalInst()));
1102 const bool FindInterferingWrites =
I.mayReadFromMemory();
1103 const bool FindInterferingReads =
I.mayWriteToMemory();
1104 const bool UseDominanceReasoning =
1105 FindInterferingWrites && NoRecurseAA.isKnownNoRecurse();
1114 if (!(
T.isAMDGPU() ||
T.isNVPTX()))
1117 case AA::GPUAddressSpace::Shared:
1118 case AA::GPUAddressSpace::Constant:
1119 case AA::GPUAddressSpace::Local:
1131 std::function<
bool(
const Function &)> IsLiveInCalleeCB;
1133 if (
auto *AI = dyn_cast<AllocaInst>(&getAssociatedValue())) {
1139 if (NoRecurseAA.isAssumedNoRecurse()) {
1140 IsLiveInCalleeCB = [AIFn](
const Function &Fn) {
return AIFn != &Fn; };
1142 }
else if (
auto *GV = dyn_cast<GlobalValue>(&getAssociatedValue())) {
1145 if (HasKernelLifetime(GV, *GV->getParent()))
1146 IsLiveInCalleeCB = [](
const Function &Fn) {
1155 auto AccessCB = [&](
const Access &Acc,
bool Exact) {
1156 if (Exact && Acc.isMustAccess() && Acc.getRemoteInst() != &
I) {
1157 if (Acc.isWrite() || (isa<LoadInst>(
I) && Acc.isWriteOrAssumption()))
1158 ExclusionSet.
insert(Acc.getRemoteInst());
1161 if ((!FindInterferingWrites || !Acc.isWriteOrAssumption()) &&
1162 (!FindInterferingReads || !Acc.isRead()))
1165 bool Dominates = FindInterferingWrites && DT && Exact &&
1166 Acc.isMustAccess() &&
1167 (Acc.getRemoteInst()->getFunction() == &
Scope) &&
1170 DominatingWrites.
insert(&Acc);
1174 AllInSameNoSyncFn &= Acc.getRemoteInst()->getFunction() == &
Scope;
1176 InterferingAccesses.
push_back({&Acc, Exact});
1179 if (!State::forallInterferingAccesses(
I, AccessCB, Range))
1182 HasBeenWrittenTo = !DominatingWrites.
empty();
1186 for (
const Access *Acc : DominatingWrites) {
1187 if (!LeastDominatingWriteInst) {
1188 LeastDominatingWriteInst = Acc->getRemoteInst();
1189 }
else if (DT->
dominates(LeastDominatingWriteInst,
1190 Acc->getRemoteInst())) {
1191 LeastDominatingWriteInst = Acc->getRemoteInst();
1196 auto CanSkipAccess = [&](
const Access &Acc,
bool Exact) {
1197 if (!CanIgnoreThreading(Acc))
1203 bool ReadChecked = !FindInterferingReads;
1204 bool WriteChecked = !FindInterferingWrites;
1210 &ExclusionSet, IsLiveInCalleeCB))
1215 if (!WriteChecked) {
1217 &ExclusionSet, IsLiveInCalleeCB))
1218 WriteChecked =
true;
1232 if (!WriteChecked && HasBeenWrittenTo &&
1233 Acc.getRemoteInst()->getFunction() != &
Scope) {
1243 if (!FnReachabilityAA.instructionCanReach(
1244 A, *LeastDominatingWriteInst,
1245 *Acc.getRemoteInst()->getFunction(), &ExclusionSet))
1246 WriteChecked =
true;
1252 if (ReadChecked && WriteChecked)
1255 if (!DT || !UseDominanceReasoning)
1257 if (!DominatingWrites.count(&Acc))
1259 return LeastDominatingWriteInst != Acc.getRemoteInst();
1264 for (
auto &It : InterferingAccesses) {
1265 if ((!AllInSameNoSyncFn && !IsThreadLocalObj && !ExecDomainAA) ||
1266 !CanSkipAccess(*It.first, It.second)) {
1267 if (!UserCB(*It.first, It.second))
1277 using namespace AA::PointerInfo;
1279 return indicatePessimisticFixpoint();
1281 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1282 bool IsByval = OtherAAImpl.getAssociatedArgument()->hasByValAttr();
1286 const auto &State = OtherAAImpl.getState();
1287 for (
const auto &It : State) {
1288 for (
auto Index : It.getSecond()) {
1289 const auto &RAcc = State.getAccess(
Index);
1290 if (IsByval && !RAcc.isRead())
1292 bool UsedAssumedInformation =
false;
1294 auto Content =
A.translateArgumentToCallSiteContent(
1295 RAcc.getContent(), CB, *
this, UsedAssumedInformation);
1296 AK =
AccessKind(AK & (IsByval ? AccessKind::AK_R : AccessKind::AK_RW));
1297 AK =
AccessKind(AK | (RAcc.isMayAccess() ? AK_MAY : AK_MUST));
1299 Changed |= addAccess(
A, RAcc.getRanges(), CB,
Content, AK,
1300 RAcc.getType(), RAcc.getRemoteInst());
1307 const OffsetInfo &Offsets,
CallBase &CB) {
1308 using namespace AA::PointerInfo;
1310 return indicatePessimisticFixpoint();
1312 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1316 const auto &State = OtherAAImpl.getState();
1317 for (
const auto &It : State) {
1318 for (
auto Index : It.getSecond()) {
1319 const auto &RAcc = State.getAccess(
Index);
1320 for (
auto Offset : Offsets) {
1324 if (!NewRanges.isUnknown()) {
1325 NewRanges.addToAllOffsets(
Offset);
1328 addAccess(
A, NewRanges, CB, RAcc.getContent(), RAcc.getKind(),
1329 RAcc.getType(), RAcc.getRemoteInst());
1338 void trackPointerInfoStatistics(
const IRPosition &IRP)
const {}
1342 for (
auto &It : OffsetBins) {
1343 O <<
"[" << It.first.Offset <<
"-" << It.first.Offset + It.first.Size
1344 <<
"] : " << It.getSecond().size() <<
"\n";
1345 for (
auto AccIndex : It.getSecond()) {
1346 auto &Acc = AccessList[AccIndex];
1347 O <<
" - " << Acc.getKind() <<
" - " << *Acc.getLocalInst() <<
"\n";
1348 if (Acc.getLocalInst() != Acc.getRemoteInst())
1349 O <<
" --> " << *Acc.getRemoteInst()
1351 if (!Acc.isWrittenValueYetUndetermined()) {
1352 if (isa_and_nonnull<Function>(Acc.getWrittenValue()))
1353 O <<
" - c: func " << Acc.getWrittenValue()->getName()
1355 else if (Acc.getWrittenValue())
1356 O <<
" - c: " << *Acc.getWrittenValue() <<
"\n";
1358 O <<
" - c: <unknown>\n";
1365struct AAPointerInfoFloating :
public AAPointerInfoImpl {
1368 : AAPointerInfoImpl(IRP,
A) {}
1375 using namespace AA::PointerInfo;
1378 TypeSize AccessSize =
DL.getTypeStoreSize(&Ty);
1388 if (!VT || VT->getElementCount().isScalable() ||
1390 (*Content)->getType() != VT ||
1391 DL.getTypeStoreSize(VT->getElementType()).isScalable()) {
1400 Type *ElementType = VT->getElementType();
1401 int64_t ElementSize =
DL.getTypeStoreSize(ElementType).getFixedValue();
1402 auto *ConstContent = cast<Constant>(*
Content);
1406 for (
int i = 0, e = VT->getElementCount().getFixedValue(); i != e; ++i) {
1411 Changed = Changed | addAccess(
A, {ElementOffsets, ElementSize},
I,
1412 ElementContent,
Kind, ElementType);
1415 for (
auto &ElementOffset : ElementOffsets)
1416 ElementOffset += ElementSize;
1430 OffsetInfo &UsrOI,
const OffsetInfo &PtrOI,
1434 void trackStatistics()
const override {
1435 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1439bool AAPointerInfoFloating::collectConstantsForGEP(
Attributor &
A,
1442 const OffsetInfo &PtrOI,
1444 unsigned BitWidth =
DL.getIndexTypeSizeInBits(
GEP->getType());
1448 assert(!UsrOI.isUnknown() && !PtrOI.isUnknown() &&
1449 "Don't look for constant values if the offset has already been "
1450 "determined to be unknown.");
1452 if (!
GEP->collectOffset(
DL,
BitWidth, VariableOffsets, ConstantOffset)) {
1458 << (VariableOffsets.
empty() ?
"" :
"not") <<
" constant "
1462 Union.addToAll(ConstantOffset.getSExtValue());
1467 for (
const auto &VI : VariableOffsets) {
1470 if (!PotentialConstantsAA.isValidState()) {
1476 if (PotentialConstantsAA.undefIsContained())
1484 if (AssumedSet.empty())
1488 for (
const auto &ConstOffset : AssumedSet) {
1489 auto CopyPerOffset =
Union;
1490 CopyPerOffset.addToAll(ConstOffset.getSExtValue() *
1491 VI.second.getZExtValue());
1492 Product.merge(CopyPerOffset);
1497 UsrOI = std::move(Union);
1502 using namespace AA::PointerInfo;
1505 Value &AssociatedValue = getAssociatedValue();
1508 OffsetInfoMap[&AssociatedValue].
insert(0);
1510 auto HandlePassthroughUser = [&](
Value *Usr,
Value *CurPtr,
bool &Follow) {
1519 auto &UsrOI = OffsetInfoMap[Usr];
1520 auto &PtrOI = OffsetInfoMap[CurPtr];
1521 assert(!PtrOI.isUnassigned() &&
1522 "Cannot pass through if the input Ptr was not visited!");
1528 const auto *
F = getAnchorScope();
1533 F ?
A.getInfoCache().getTargetLibraryInfoForFunction(*
F) :
nullptr;
1535 auto UsePred = [&](
const Use &U,
bool &Follow) ->
bool {
1538 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Analyze " << *CurPtr <<
" in " << *Usr
1541 "The current pointer offset should have been seeded!");
1545 return HandlePassthroughUser(Usr, CurPtr, Follow);
1546 if (
CE->isCompare())
1548 if (!isa<GEPOperator>(CE)) {
1549 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled constant user " << *CE
1554 if (
auto *
GEP = dyn_cast<GEPOperator>(Usr)) {
1557 auto &UsrOI = OffsetInfoMap[Usr];
1558 auto &PtrOI = OffsetInfoMap[CurPtr];
1560 if (UsrOI.isUnknown())
1563 if (PtrOI.isUnknown()) {
1569 Follow = collectConstantsForGEP(
A,
DL, UsrOI, PtrOI,
GEP);
1572 if (isa<PtrToIntInst>(Usr))
1574 if (isa<CastInst>(Usr) || isa<SelectInst>(Usr) || isa<ReturnInst>(Usr))
1575 return HandlePassthroughUser(Usr, CurPtr, Follow);
1580 if (isa<PHINode>(Usr)) {
1583 bool IsFirstPHIUser = !OffsetInfoMap.
count(Usr);
1584 auto &UsrOI = OffsetInfoMap[Usr];
1585 auto &PtrOI = OffsetInfoMap[CurPtr];
1589 if (PtrOI.isUnknown()) {
1590 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand offset unknown "
1591 << *CurPtr <<
" in " << *Usr <<
"\n");
1592 Follow = !UsrOI.isUnknown();
1598 if (UsrOI == PtrOI) {
1599 assert(!PtrOI.isUnassigned() &&
1600 "Cannot assign if the current Ptr was not visited!");
1601 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant (so far)");
1611 auto It = OffsetInfoMap.
find(CurPtrBase);
1612 if (It == OffsetInfoMap.
end()) {
1613 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand is too complex "
1614 << *CurPtr <<
" in " << *Usr <<
"\n");
1623 auto *BB =
I->getParent();
1627 return BB ==
C->getHeader();
1637 if (mayBeInCycleHeader(CI, cast<Instruction>(Usr))) {
1638 auto BaseOI = It->getSecond();
1639 BaseOI.addToAll(
Offset.getZExtValue());
1640 if (IsFirstPHIUser || BaseOI == UsrOI) {
1641 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant " << *CurPtr
1642 <<
" in " << *Usr <<
"\n");
1643 return HandlePassthroughUser(Usr, CurPtr, Follow);
1647 dbgs() <<
"[AAPointerInfo] PHI operand pointer offset mismatch "
1648 << *CurPtr <<
" in " << *Usr <<
"\n");
1659 if (
auto *LoadI = dyn_cast<LoadInst>(Usr)) {
1667 if (!handleAccess(
A, *LoadI,
nullptr, AK,
1668 OffsetInfoMap[CurPtr].Offsets, Changed,
1673 if (
auto *II = dyn_cast<IntrinsicInst>(&
I))
1674 return II->isAssumeLikeIntrinsic();
1685 }
while (FromI && FromI != ToI);
1691 if (IntrI.getIntrinsicID() != Intrinsic::assume)
1694 if (IntrI.getParent() == BB) {
1695 if (IsImpactedInRange(LoadI->getNextNonDebugInstruction(), &IntrI))
1699 if ((*PredIt) != BB)
1704 if (SuccBB == IntrBB)
1706 if (isa<UnreachableInst>(SuccBB->getTerminator()))
1710 if (IsImpactedInRange(LoadI->getNextNonDebugInstruction(),
1713 if (IsImpactedInRange(&IntrBB->
front(), &IntrI))
1719 std::pair<Value *, IntrinsicInst *> Assumption;
1720 for (
const Use &LoadU : LoadI->
uses()) {
1721 if (
auto *CmpI = dyn_cast<CmpInst>(LoadU.getUser())) {
1722 if (!CmpI->isEquality() || !CmpI->isTrueWhenEqual())
1724 for (
const Use &CmpU : CmpI->
uses()) {
1725 if (
auto *IntrI = dyn_cast<IntrinsicInst>(CmpU.getUser())) {
1726 if (!IsValidAssume(*IntrI))
1728 int Idx = CmpI->getOperandUse(0) == LoadU;
1729 Assumption = {CmpI->getOperand(
Idx), IntrI};
1734 if (Assumption.first)
1739 if (!Assumption.first || !Assumption.second)
1743 << *Assumption.second <<
": " << *LoadI
1744 <<
" == " << *Assumption.first <<
"\n");
1746 return handleAccess(
1747 A, *Assumption.second, Assumption.first, AccessKind::AK_ASSUMPTION,
1748 OffsetInfoMap[CurPtr].Offsets, Changed, *LoadI->getType());
1753 for (
auto *OtherOp : OtherOps) {
1754 if (OtherOp == CurPtr) {
1757 <<
"[AAPointerInfo] Escaping use in store like instruction " <<
I
1769 bool UsedAssumedInformation =
false;
1770 std::optional<Value *>
Content =
nullptr;
1774 return handleAccess(
A,
I,
Content, AK, OffsetInfoMap[CurPtr].Offsets,
1778 if (
auto *StoreI = dyn_cast<StoreInst>(Usr))
1779 return HandleStoreLike(*StoreI, StoreI->getValueOperand(),
1780 *StoreI->getValueOperand()->getType(),
1781 {StoreI->getValueOperand()}, AccessKind::AK_W);
1782 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(Usr))
1783 return HandleStoreLike(*RMWI,
nullptr, *RMWI->getValOperand()->getType(),
1784 {RMWI->getValOperand()}, AccessKind::AK_RW);
1785 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(Usr))
1786 return HandleStoreLike(
1787 *CXI,
nullptr, *CXI->getNewValOperand()->getType(),
1788 {CXI->getCompareOperand(), CXI->getNewValOperand()},
1791 if (
auto *CB = dyn_cast<CallBase>(Usr)) {
1801 Changed = translateAndAddState(
A, CSArgPI, OffsetInfoMap[CurPtr], *CB) |
1803 return isValidState();
1805 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Call user not handled " << *CB
1811 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] User not handled " << *Usr <<
"\n");
1814 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
1815 assert(OffsetInfoMap.
count(OldU) &&
"Old use should be known already!");
1816 if (OffsetInfoMap.
count(NewU)) {
1818 if (!(OffsetInfoMap[NewU] == OffsetInfoMap[OldU])) {
1819 dbgs() <<
"[AAPointerInfo] Equivalent use callback failed: "
1820 << OffsetInfoMap[NewU] <<
" vs " << OffsetInfoMap[OldU]
1824 return OffsetInfoMap[NewU] == OffsetInfoMap[OldU];
1826 OffsetInfoMap[NewU] = OffsetInfoMap[OldU];
1829 if (!
A.checkForAllUses(UsePred, *
this, AssociatedValue,
1831 true, EquivalentUseCB)) {
1832 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Check for all uses failed, abort!\n");
1833 return indicatePessimisticFixpoint();
1837 dbgs() <<
"Accesses by bin after update:\n";
1844struct AAPointerInfoReturned final : AAPointerInfoImpl {
1846 : AAPointerInfoImpl(IRP,
A) {}
1850 return indicatePessimisticFixpoint();
1854 void trackStatistics()
const override {
1855 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1859struct AAPointerInfoArgument final : AAPointerInfoFloating {
1861 : AAPointerInfoFloating(IRP,
A) {}
1865 AAPointerInfoFloating::initialize(
A);
1866 if (getAnchorScope()->isDeclaration())
1867 indicatePessimisticFixpoint();
1871 void trackStatistics()
const override {
1872 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1876struct AAPointerInfoCallSiteArgument final : AAPointerInfoFloating {
1878 : AAPointerInfoFloating(IRP,
A) {}
1882 using namespace AA::PointerInfo;
1886 if (
auto *
MI = dyn_cast_or_null<MemIntrinsic>(getCtxI())) {
1890 LengthVal =
Length->getSExtValue();
1891 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
1894 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled memory intrinsic "
1896 return indicatePessimisticFixpoint();
1899 ArgNo == 0 ? AccessKind::AK_MUST_WRITE : AccessKind::AK_MUST_READ;
1901 Changed | addAccess(
A, {0, LengthVal}, *
MI,
nullptr,
Kind,
nullptr);
1904 dbgs() <<
"Accesses by bin after update:\n";
1920 if (ArgAA.getState().isValidState())
1921 return translateAndAddStateFromCallee(
A, ArgAA,
1922 *cast<CallBase>(getCtxI()));
1923 if (!
Arg->getParent()->isDeclaration())
1924 return indicatePessimisticFixpoint();
1927 const auto &NoCaptureAA =
1928 A.getAAFor<
AANoCapture>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
1930 if (!NoCaptureAA.isAssumedNoCapture())
1931 return indicatePessimisticFixpoint();
1933 bool IsKnown =
false;
1935 return ChangeStatus::UNCHANGED;
1938 ReadOnly ? AccessKind::AK_MAY_READ : AccessKind::AK_MAY_READ_WRITE;
1944 void trackStatistics()
const override {
1945 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1949struct AAPointerInfoCallSiteReturned final : AAPointerInfoFloating {
1951 : AAPointerInfoFloating(IRP,
A) {}
1954 void trackStatistics()
const override {
1955 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1966 const std::string getAsStr()
const override {
1967 return getAssumed() ?
"nounwind" :
"may-unwind";
1973 (
unsigned)Instruction::Invoke, (
unsigned)Instruction::CallBr,
1974 (
unsigned)Instruction::Call, (
unsigned)Instruction::CleanupRet,
1975 (
unsigned)Instruction::CatchSwitch, (
unsigned)Instruction::Resume};
1981 if (
const auto *CB = dyn_cast<CallBase>(&
I)) {
1989 bool UsedAssumedInformation =
false;
1990 if (!
A.checkForAllInstructions(CheckForNoUnwind, *
this, Opcodes,
1991 UsedAssumedInformation))
1992 return indicatePessimisticFixpoint();
1994 return ChangeStatus::UNCHANGED;
1998struct AANoUnwindFunction final :
public AANoUnwindImpl {
2000 : AANoUnwindImpl(IRP,
A) {}
2007struct AANoUnwindCallSite final : AANoUnwindImpl {
2009 : AANoUnwindImpl(IRP,
A) {}
2013 AANoUnwindImpl::initialize(
A);
2015 if (!
F ||
F->isDeclaration())
2016 indicatePessimisticFixpoint();
2027 auto &FnAA =
A.getAAFor<
AANoUnwind>(*
this, FnPos, DepClassTy::REQUIRED);
2053 bool IsFixed =
false;
2054 bool IsValidState =
true;
2065 IsValidState =
true;
2066 ReturnedValues.
clear();
2069 if (!
F ||
F->isDeclaration()) {
2070 indicatePessimisticFixpoint();
2073 assert(!
F->getReturnType()->isVoidTy() &&
2074 "Did not expect a void return type!");
2077 auto &OpcodeInstMap =
A.getInfoCache().getOpcodeInstMapForFunction(*
F);
2081 if (
Arg.hasReturnedAttr()) {
2082 auto &ReturnInstSet = ReturnedValues[&
Arg];
2083 if (
auto *Insts = OpcodeInstMap.lookup(Instruction::Ret))
2085 ReturnInstSet.
insert(cast<ReturnInst>(RI));
2087 indicateOptimisticFixpoint();
2092 if (!
A.isFunctionIPOAmendable(*
F))
2093 indicatePessimisticFixpoint();
2103 const AbstractState &getState()
const override {
return *
this; }
2117 size_t getNumReturnValues()
const override {
2118 return isValidState() ? ReturnedValues.
size() : -1;
2124 std::optional<Value *> getAssumedUniqueReturnValue(
Attributor &
A)
const;
2127 bool checkForAllReturnedValuesAndReturnInsts(
2132 const std::string getAsStr()
const override;
2135 bool isAtFixpoint()
const override {
return IsFixed; }
2138 bool isValidState()
const override {
return IsValidState; }
2143 return ChangeStatus::UNCHANGED;
2148 IsValidState =
false;
2149 return ChangeStatus::CHANGED;
2159 "Number of function with known return values");
2162 std::optional<Value *> UniqueRV = getAssumedUniqueReturnValue(
A);
2164 if (!UniqueRV || !*UniqueRV)
2169 "Number of function with unique return");
2171 if (
auto *UniqueRVArg = dyn_cast<Argument>(*UniqueRV)) {
2172 if (UniqueRVArg->getType()->canLosslesslyBitCastTo(
2173 getAssociatedFunction()->getReturnType())) {
2181const std::string AAReturnedValuesImpl::getAsStr()
const {
2182 return (isAtFixpoint() ?
"returns(#" :
"may-return(#") +
2183 (isValidState() ? std::to_string(getNumReturnValues()) :
"?") +
")";
2186std::optional<Value *>
2187AAReturnedValuesImpl::getAssumedUniqueReturnValue(
Attributor &
A)
const {
2193 std::optional<Value *> UniqueRV;
2194 Type *Ty = getAssociatedFunction()->getReturnType();
2196 auto Pred = [&](
Value &RV) ->
bool {
2198 return UniqueRV != std::optional<Value *>(
nullptr);
2201 if (!
A.checkForAllReturnedValues(Pred, *
this))
2207bool AAReturnedValuesImpl::checkForAllReturnedValuesAndReturnInsts(
2210 if (!isValidState())
2215 for (
const auto &It : ReturnedValues) {
2216 Value *RV = It.first;
2217 if (!Pred(*RV, It.second))
2228 bool UsedAssumedInformation =
false;
2234 UsedAssumedInformation))
2237 for (
auto &VAC : Values) {
2239 "Assumed returned value should be valid in function scope!");
2240 if (ReturnedValues[VAC.getValue()].insert(&Ret))
2248 if (!
A.checkForAllInstructions(ReturnInstCB, *
this, {Instruction::Ret},
2249 UsedAssumedInformation))
2250 return indicatePessimisticFixpoint();
2254struct AAReturnedValuesFunction final :
public AAReturnedValuesImpl {
2256 : AAReturnedValuesImpl(IRP,
A) {}
2263struct AAReturnedValuesCallSite final : AAReturnedValuesImpl {
2265 : AAReturnedValuesImpl(IRP,
A) {}
2274 "supported for call sites yet!");
2279 return indicatePessimisticFixpoint();
2283 void trackStatistics()
const override {}
2291 case Intrinsic::nvvm_barrier0:
2292 case Intrinsic::nvvm_barrier0_and:
2293 case Intrinsic::nvvm_barrier0_or:
2294 case Intrinsic::nvvm_barrier0_popc:
2296 case Intrinsic::amdgcn_s_barrier:
2297 if (ExecutedAligned)
2310 if (
auto *FI = dyn_cast<FenceInst>(
I))
2313 if (
auto *AI = dyn_cast<AtomicCmpXchgInst>(
I)) {
2320 switch (
I->getOpcode()) {
2321 case Instruction::AtomicRMW:
2322 Ordering = cast<AtomicRMWInst>(
I)->getOrdering();
2324 case Instruction::Store:
2325 Ordering = cast<StoreInst>(
I)->getOrdering();
2327 case Instruction::Load:
2328 Ordering = cast<LoadInst>(
I)->getOrdering();
2332 "New atomic operations need to be known in the attributor.");
2343 if (
auto *
MI = dyn_cast<MemIntrinsic>(
I))
2344 return !
MI->isVolatile();
2352 const std::string getAsStr()
const override {
2353 return getAssumed() ?
"nosync" :
"may-sync";
2369 if (
I.mayReadOrWriteMemory())
2373 return !cast<CallBase>(
I).isConvergent();
2376 bool UsedAssumedInformation =
false;
2377 if (!
A.checkForAllReadWriteInstructions(CheckRWInstForNoSync, *
this,
2378 UsedAssumedInformation) ||
2379 !
A.checkForAllCallLikeInstructions(CheckForNoSync, *
this,
2380 UsedAssumedInformation))
2381 return indicatePessimisticFixpoint();
2386struct AANoSyncFunction final :
public AANoSyncImpl {
2388 : AANoSyncImpl(IRP,
A) {}
2395struct AANoSyncCallSite final : AANoSyncImpl {
2397 : AANoSyncImpl(IRP,
A) {}
2401 AANoSyncImpl::initialize(
A);
2403 if (!
F ||
F->isDeclaration())
2404 indicatePessimisticFixpoint();
2415 auto &FnAA =
A.getAAFor<
AANoSync>(*
this, FnPos, DepClassTy::REQUIRED);
2427struct AANoFreeImpl :
public AANoFree {
2433 const auto &CB = cast<CallBase>(
I);
2437 const auto &NoFreeAA =
A.getAAFor<
AANoFree>(
2442 bool UsedAssumedInformation =
false;
2443 if (!
A.checkForAllCallLikeInstructions(CheckForNoFree, *
this,
2444 UsedAssumedInformation))
2445 return indicatePessimisticFixpoint();
2446 return ChangeStatus::UNCHANGED;
2450 const std::string getAsStr()
const override {
2451 return getAssumed() ?
"nofree" :
"may-free";
2455struct AANoFreeFunction final :
public AANoFreeImpl {
2457 : AANoFreeImpl(IRP,
A) {}
2464struct AANoFreeCallSite final : AANoFreeImpl {
2466 : AANoFreeImpl(IRP,
A) {}
2470 AANoFreeImpl::initialize(
A);
2472 if (!
F ||
F->isDeclaration())
2473 indicatePessimisticFixpoint();
2484 auto &FnAA =
A.getAAFor<
AANoFree>(*
this, FnPos, DepClassTy::REQUIRED);
2493struct AANoFreeFloating : AANoFreeImpl {
2495 : AANoFreeImpl(IRP,
A) {}
2504 const auto &NoFreeAA =
A.getAAFor<
AANoFree>(
2506 if (NoFreeAA.isAssumedNoFree())
2507 return ChangeStatus::UNCHANGED;
2509 Value &AssociatedValue = getIRPosition().getAssociatedValue();
2510 auto Pred = [&](
const Use &U,
bool &Follow) ->
bool {
2512 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
2519 const auto &NoFreeArg =
A.getAAFor<
AANoFree>(
2521 DepClassTy::REQUIRED);
2525 if (isa<GetElementPtrInst>(UserI) || isa<BitCastInst>(UserI) ||
2526 isa<PHINode>(UserI) || isa<SelectInst>(UserI)) {
2530 if (isa<StoreInst>(UserI) || isa<LoadInst>(UserI) ||
2531 isa<ReturnInst>(UserI))
2537 if (!
A.checkForAllUses(Pred, *
this, AssociatedValue))
2538 return indicatePessimisticFixpoint();
2540 return ChangeStatus::UNCHANGED;
2545struct AANoFreeArgument final : AANoFreeFloating {
2547 : AANoFreeFloating(IRP,
A) {}
2554struct AANoFreeCallSiteArgument final : AANoFreeFloating {
2556 : AANoFreeFloating(IRP,
A) {}
2566 return indicatePessimisticFixpoint();
2568 auto &ArgAA =
A.getAAFor<
AANoFree>(*
this, ArgPos, DepClassTy::REQUIRED);
2577struct AANoFreeReturned final : AANoFreeFloating {
2579 : AANoFreeFloating(IRP,
A) {
2594 void trackStatistics()
const override {}
2598struct AANoFreeCallSiteReturned final : AANoFreeFloating {
2600 : AANoFreeFloating(IRP,
A) {}
2603 return ChangeStatus::UNCHANGED;
2612static int64_t getKnownNonNullAndDerefBytesForUse(
2614 const Use *U,
const Instruction *
I,
bool &IsNonNull,
bool &TrackUse) {
2617 const Value *UseV = U->get();
2624 if (isa<CastInst>(
I)) {
2629 if (isa<GetElementPtrInst>(
I)) {
2639 if (
const auto *CB = dyn_cast<CallBase>(
I)) {
2642 U, {Attribute::NonNull, Attribute::Dereferenceable})) {
2661 IsNonNull |= DerefAA.isKnownNonNull();
2662 return DerefAA.getKnownDereferenceableBytes();
2666 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
I->isVolatile())
2672 if (
Base &&
Base == &AssociatedValue) {
2673 int64_t DerefBytes = Loc->Size.getValue() +
Offset;
2675 return std::max(int64_t(0), DerefBytes);
2682 int64_t DerefBytes = Loc->Size.getValue();
2684 return std::max(int64_t(0), DerefBytes);
2695 getAssociatedValue().
getType()->getPointerAddressSpace())) {}
2700 if (!NullIsDefined &&
2701 hasAttr({Attribute::NonNull, Attribute::Dereferenceable},
2703 indicateOptimisticFixpoint();
2707 if (isa<ConstantPointerNull>(V)) {
2708 indicatePessimisticFixpoint();
2712 AANonNull::initialize(
A);
2714 bool CanBeNull, CanBeFreed;
2718 indicateOptimisticFixpoint();
2723 if (isa<GlobalValue>(V)) {
2724 indicatePessimisticFixpoint();
2729 followUsesInMBEC(*
this,
A, getState(), *CtxI);
2735 bool IsNonNull =
false;
2736 bool TrackUse =
false;
2737 getKnownNonNullAndDerefBytesForUse(
A, *
this, getAssociatedValue(), U,
I,
2738 IsNonNull, TrackUse);
2739 State.setKnown(IsNonNull);
2744 const std::string getAsStr()
const override {
2745 return getAssumed() ?
"nonnull" :
"may-null";
2750 const bool NullIsDefined;
2754struct AANonNullFloating :
public AANonNullImpl {
2756 : AANonNullImpl(IRP,
A) {}
2763 bool UsedAssumedInformation =
false;
2765 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
2767 Values.
push_back({getAssociatedValue(), getCtxI()});
2770 Stripped = Values.
size() != 1 ||
2771 Values.
front().getValue() != &getAssociatedValue();
2777 if (
const Function *Fn = getAnchorScope()) {
2785 DepClassTy::REQUIRED);
2786 if (!Stripped &&
this == &AA) {
2788 T.indicatePessimisticFixpoint();
2794 return T.isValidState();
2797 for (
const auto &VAC : Values)
2798 if (!VisitValueCB(*VAC.getValue(), VAC.getCtxI()))
2799 return indicatePessimisticFixpoint();
2809struct AANonNullReturned final
2810 : AAReturnedFromReturnedValues<AANonNull, AANonNull> {
2815 const std::string getAsStr()
const override {
2816 return getAssumed() ?
"nonnull" :
"may-null";
2824struct AANonNullArgument final
2825 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl> {
2827 : AAArgumentFromCallSiteArguments<
AANonNull, AANonNullImpl>(IRP,
A) {}
2833struct AANonNullCallSiteArgument final : AANonNullFloating {
2835 : AANonNullFloating(IRP,
A) {}
2842struct AANonNullCallSiteReturned final
2843 : AACallSiteReturnedFromReturned<AANonNull, AANonNullImpl> {
2845 : AACallSiteReturnedFromReturned<
AANonNull, AANonNullImpl>(IRP,
A) {}
2859 const std::string getAsStr()
const override {
2860 return getAssumed() ?
"norecurse" :
"may-recurse";
2864struct AANoRecurseFunction final : AANoRecurseImpl {
2866 : AANoRecurseImpl(IRP,
A) {}
2878 bool UsedAssumedInformation =
false;
2879 if (
A.checkForAllCallSites(CallSitePred, *
this,
true,
2880 UsedAssumedInformation)) {
2886 if (!UsedAssumedInformation)
2887 indicateOptimisticFixpoint();
2888 return ChangeStatus::UNCHANGED;
2893 DepClassTy::REQUIRED);
2894 if (EdgeReachability.
canReach(
A, *getAnchorScope()))
2895 return indicatePessimisticFixpoint();
2896 return ChangeStatus::UNCHANGED;
2903struct AANoRecurseCallSite final : AANoRecurseImpl {
2905 : AANoRecurseImpl(IRP,
A) {}
2909 AANoRecurseImpl::initialize(
A);
2911 if (!
F ||
F->isDeclaration())
2912 indicatePessimisticFixpoint();
2923 auto &FnAA =
A.getAAFor<
AANoRecurse>(*
this, FnPos, DepClassTy::REQUIRED);
2942 const size_t UBPrevSize = KnownUBInsts.size();
2943 const size_t NoUBPrevSize = AssumedNoUBInsts.size();
2947 if (
I.isVolatile() &&
I.mayWriteToMemory())
2951 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2960 "Expected pointer operand of memory accessing instruction");
2964 std::optional<Value *> SimplifiedPtrOp =
2965 stopOnUndefOrAssumed(
A, PtrOp, &
I);
2966 if (!SimplifiedPtrOp || !*SimplifiedPtrOp)
2968 const Value *PtrOpVal = *SimplifiedPtrOp;
2973 if (!isa<ConstantPointerNull>(PtrOpVal)) {
2974 AssumedNoUBInsts.insert(&
I);
2986 AssumedNoUBInsts.insert(&
I);
2988 KnownUBInsts.insert(&
I);
2997 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3001 auto *BrInst = cast<BranchInst>(&
I);
3004 if (BrInst->isUnconditional())
3009 std::optional<Value *> SimplifiedCond =
3010 stopOnUndefOrAssumed(
A, BrInst->getCondition(), BrInst);
3011 if (!SimplifiedCond || !*SimplifiedCond)
3013 AssumedNoUBInsts.insert(&
I);
3021 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3030 for (
unsigned idx = 0; idx < CB.
arg_size(); idx++) {
3036 if (idx >=
Callee->arg_size())
3049 A.getAAFor<
AANoUndef>(*
this, CalleeArgumentIRP, DepClassTy::NONE);
3050 if (!NoUndefAA.isKnownNoUndef())
3052 bool UsedAssumedInformation =
false;
3053 std::optional<Value *> SimplifiedVal =
3056 if (UsedAssumedInformation)
3058 if (SimplifiedVal && !*SimplifiedVal)
3060 if (!SimplifiedVal || isa<UndefValue>(**SimplifiedVal)) {
3061 KnownUBInsts.insert(&
I);
3065 !isa<ConstantPointerNull>(**SimplifiedVal))
3068 A.getAAFor<
AANonNull>(*
this, CalleeArgumentIRP, DepClassTy::NONE);
3069 if (NonNullAA.isKnownNonNull())
3070 KnownUBInsts.insert(&
I);
3076 auto &RI = cast<ReturnInst>(
I);
3079 std::optional<Value *> SimplifiedRetValue =
3080 stopOnUndefOrAssumed(
A, RI.getReturnValue(), &
I);
3081 if (!SimplifiedRetValue || !*SimplifiedRetValue)
3098 if (isa<ConstantPointerNull>(*SimplifiedRetValue)) {
3101 if (NonNullAA.isKnownNonNull())
3102 KnownUBInsts.insert(&
I);
3108 bool UsedAssumedInformation =
false;
3109 A.checkForAllInstructions(InspectMemAccessInstForUB, *
this,
3110 {Instruction::Load, Instruction::Store,
3111 Instruction::AtomicCmpXchg,
3112 Instruction::AtomicRMW},
3113 UsedAssumedInformation,
3115 A.checkForAllInstructions(InspectBrInstForUB, *
this, {Instruction::Br},
3116 UsedAssumedInformation,
3118 A.checkForAllCallLikeInstructions(InspectCallSiteForUB, *
this,
3119 UsedAssumedInformation);
3123 if (!getAnchorScope()->getReturnType()->isVoidTy()) {
3125 if (!
A.isAssumedDead(ReturnIRP,
this,
nullptr, UsedAssumedInformation)) {
3126 auto &RetPosNoUndefAA =
3127 A.getAAFor<
AANoUndef>(*
this, ReturnIRP, DepClassTy::NONE);
3128 if (RetPosNoUndefAA.isKnownNoUndef())
3129 A.checkForAllInstructions(InspectReturnInstForUB, *
this,
3130 {Instruction::Ret}, UsedAssumedInformation,
3135 if (NoUBPrevSize != AssumedNoUBInsts.size() ||
3136 UBPrevSize != KnownUBInsts.size())
3137 return ChangeStatus::CHANGED;
3138 return ChangeStatus::UNCHANGED;
3142 return KnownUBInsts.count(
I);
3145 bool isAssumedToCauseUB(
Instruction *
I)
const override {
3152 switch (
I->getOpcode()) {
3153 case Instruction::Load:
3154 case Instruction::Store:
3155 case Instruction::AtomicCmpXchg:
3156 case Instruction::AtomicRMW:
3157 return !AssumedNoUBInsts.count(
I);
3158 case Instruction::Br: {
3159 auto *BrInst = cast<BranchInst>(
I);
3160 if (BrInst->isUnconditional())
3162 return !AssumedNoUBInsts.count(
I);
3171 if (KnownUBInsts.empty())
3174 A.changeToUnreachableAfterManifest(
I);
3179 const std::string getAsStr()
const override {
3180 return getAssumed() ?
"undefined-behavior" :
"no-ub";
3225 bool UsedAssumedInformation =
false;
3226 std::optional<Value *> SimplifiedV =
3229 if (!UsedAssumedInformation) {
3234 KnownUBInsts.insert(
I);
3235 return std::nullopt;
3241 if (isa<UndefValue>(V)) {
3242 KnownUBInsts.insert(
I);
3243 return std::nullopt;
3249struct AAUndefinedBehaviorFunction final : AAUndefinedBehaviorImpl {
3251 : AAUndefinedBehaviorImpl(IRP,
A) {}
3254 void trackStatistics()
const override {
3256 "Number of instructions known to have UB");
3258 KnownUBInsts.size();
3279 if (SCCI.hasCycle())
3289 for (
auto *L : LI->getLoopsInPreorder()) {
3302 AAWillReturn::initialize(
A);
3304 if (isImpliedByMustprogressAndReadonly(
A,
true)) {
3305 indicateOptimisticFixpoint();
3311 bool isImpliedByMustprogressAndReadonly(
Attributor &
A,
bool KnownOnly) {
3314 if ((!getAnchorScope() || !getAnchorScope()->mustProgress()) &&
3315 (!getAssociatedFunction() || !getAssociatedFunction()->mustProgress()))
3320 return IsKnown || !KnownOnly;
3326 if (isImpliedByMustprogressAndReadonly(
A,
false))
3327 return ChangeStatus::UNCHANGED;
3331 const auto &WillReturnAA =
3332 A.getAAFor<
AAWillReturn>(*
this, IPos, DepClassTy::REQUIRED);
3333 if (WillReturnAA.isKnownWillReturn())
3335 if (!WillReturnAA.isAssumedWillReturn())
3337 const auto &NoRecurseAA =
3338 A.getAAFor<
AANoRecurse>(*
this, IPos, DepClassTy::REQUIRED);
3342 bool UsedAssumedInformation =
false;
3343 if (!
A.checkForAllCallLikeInstructions(CheckForWillReturn, *
this,
3344 UsedAssumedInformation))
3345 return indicatePessimisticFixpoint();
3347 return ChangeStatus::UNCHANGED;
3351 const std::string getAsStr()
const override {
3352 return getAssumed() ?
"willreturn" :
"may-noreturn";
3356struct AAWillReturnFunction final : AAWillReturnImpl {
3358 : AAWillReturnImpl(IRP,
A) {}
3362 AAWillReturnImpl::initialize(
A);
3365 if (!
F ||
F->isDeclaration() || mayContainUnboundedCycle(*
F,
A))
3366 indicatePessimisticFixpoint();
3374struct AAWillReturnCallSite final : AAWillReturnImpl {
3376 : AAWillReturnImpl(IRP,
A) {}
3380 AAWillReturnImpl::initialize(
A);
3382 if (!
F || !
A.isFunctionIPOAmendable(*
F))
3383 indicatePessimisticFixpoint();
3388 if (isImpliedByMustprogressAndReadonly(
A,
false))
3389 return ChangeStatus::UNCHANGED;
3397 auto &FnAA =
A.getAAFor<
AAWillReturn>(*
this, FnPos, DepClassTy::REQUIRED);
3420 const ToTy *To =
nullptr;
3433 :
From(&
From), To(&To), ExclusionSet(ES) {
3435 if (ExclusionSet && !ExclusionSet->
empty()) {
3437 A.getInfoCache().getOrCreateUniqueBlockExecutionSet(ExclusionSet);
3439 ExclusionSet =
nullptr;
3444 :
From(RQI.
From), To(RQI.To), ExclusionSet(RQI.ExclusionSet) {
3446 "Didn't expect to copy an explored RQI!");
3460 return &TombstoneKey;
3463 unsigned H = PairDMI ::getHashValue({RQI->
From, RQI->
To});
3469 if (!PairDMI::isEqual({
LHS->From,
LHS->To}, {
RHS->From,
RHS->To}))
3471 return InstSetDMI::isEqual(
LHS->ExclusionSet,
RHS->ExclusionSet);
3475#define DefineKeys(ToTy) \
3477 ReachabilityQueryInfo<ToTy> \
3478 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::EmptyKey = \
3479 ReachabilityQueryInfo<ToTy>( \
3480 DenseMapInfo<const Instruction *>::getEmptyKey(), \
3481 DenseMapInfo<const ToTy *>::getEmptyKey()); \
3483 ReachabilityQueryInfo<ToTy> \
3484 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::TombstoneKey = \
3485 ReachabilityQueryInfo<ToTy>( \
3486 DenseMapInfo<const Instruction *>::getTombstoneKey(), \
3487 DenseMapInfo<const ToTy *>::getTombstoneKey());
3496template <
typename BaseTy,
typename ToTy>
3497struct CachedReachabilityAA :
public BaseTy {
3504 bool isQueryAA()
const override {
return true; }
3510 for (RQITy *RQI : QueryVector) {
3511 if (RQI->Result == RQITy::Reachable::No && isReachableImpl(
A, *RQI))
3512 Changed = ChangeStatus::CHANGED;
3518 virtual bool isReachableImpl(
Attributor &
A, RQITy &RQI) = 0;
3522 if (
Result == RQITy::Reachable::No) {
3524 A.registerForUpdate(*
this);
3527 assert(RQI.Result == RQITy::Reachable::No &&
"Already reachable?");
3532 const std::string getAsStr()
const override {
3534 return "#queries(" + std::to_string(QueryVector.size()) +
")";
3537 RQITy *checkQueryCache(
Attributor &
A, RQITy &StackRQI,
3538 typename RQITy::Reachable &
Result) {
3539 if (!this->getState().isValidState()) {
3540 Result = RQITy::Reachable::Yes;
3544 auto It = QueryCache.find(&StackRQI);
3545 if (It != QueryCache.end()) {
3550 RQITy *RQIPtr =
new (
A.Allocator) RQITy(StackRQI);
3551 QueryVector.push_back(RQIPtr);
3552 QueryCache.insert(RQIPtr);
3557 bool InUpdate =
false;
3562struct AAIntraFnReachabilityFunction final
3563 :
public CachedReachabilityAA<AAIntraFnReachability, Instruction> {
3567 bool isAssumedReachable(
3570 auto *NonConstThis =
const_cast<AAIntraFnReachabilityFunction *
>(
this);
3574 RQITy StackRQI(
A,
From, To, ExclusionSet);
3575 typename RQITy::Reachable
Result;
3576 if (RQITy *RQIPtr = NonConstThis->checkQueryCache(
A, StackRQI, Result)) {
3577 return NonConstThis->isReachableImpl(
A, *RQIPtr);
3579 return Result == RQITy::Reachable::Yes;
3582 bool isReachableImpl(
Attributor &
A, RQITy &RQI)
override {
3588 while (IP && IP != &To) {
3589 if (ExclusionSet && IP != Origin && ExclusionSet->
count(IP))
3599 "Not an intra-procedural query!");
3603 if (FromBB == ToBB &&
3604 WillReachInBlock(*RQI.From, *RQI.To, RQI.ExclusionSet))
3605 return rememberResult(
A, RQITy::Reachable::Yes, RQI);
3608 if (RQI.ExclusionSet)
3609 for (
auto *
I : *RQI.ExclusionSet)
3610 ExclusionBlocks.
insert(
I->getParent());
3613 if (ExclusionBlocks.
count(FromBB) &&
3616 return rememberResult(
A, RQITy::Reachable::No, RQI);
3623 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3624 while (!Worklist.
empty()) {
3626 if (!Visited.
insert(BB).second)
3629 if (LivenessAA.isEdgeDead(BB, SuccBB))
3631 if (SuccBB == ToBB &&
3632 WillReachInBlock(SuccBB->front(), *RQI.To, RQI.ExclusionSet))
3633 return rememberResult(
A, RQITy::Reachable::Yes, RQI);
3634 if (ExclusionBlocks.
count(SuccBB))
3640 return rememberResult(
A, RQITy::Reachable::No, RQI);
3644 void trackStatistics()
const override {}
3653 assert(getAssociatedType()->isPointerTy() &&
3654 "Noalias is a pointer attribute");
3657 const std::string getAsStr()
const override {
3658 return getAssumed() ?
"noalias" :
"may-alias";
3663struct AANoAliasFloating final : AANoAliasImpl {
3665 : AANoAliasImpl(IRP,
A) {}
3669 AANoAliasImpl::initialize(
A);
3670 Value *Val = &getAssociatedValue();
3672 CastInst *CI = dyn_cast<CastInst>(Val);
3676 if (!
Base->hasOneUse())
3682 indicatePessimisticFixpoint();
3686 if (isa<AllocaInst>(Val))
3687 indicateOptimisticFixpoint();
3688 else if (isa<ConstantPointerNull>(Val) &&
3691 indicateOptimisticFixpoint();
3692 else if (Val != &getAssociatedValue()) {
3693 const auto &ValNoAliasAA =
A.getAAFor<
AANoAlias>(
3695 if (ValNoAliasAA.isKnownNoAlias())
3696 indicateOptimisticFixpoint();
3703 return indicatePessimisticFixpoint();
3707 void trackStatistics()
const override {
3713struct AANoAliasArgument final
3714 : AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl> {
3715 using Base = AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl>;
3720 Base::initialize(
A);
3722 if (hasAttr({Attribute::ByVal}))
3723 indicateOptimisticFixpoint();
3734 const auto &NoSyncAA =
3736 DepClassTy::OPTIONAL);
3737 if (NoSyncAA.isAssumedNoSync())
3738 return Base::updateImpl(
A);
3743 return Base::updateImpl(
A);
3747 bool UsedAssumedInformation =
false;
3748 if (
A.checkForAllCallSites(
3750 true, UsedAssumedInformation))
3751 return Base::updateImpl(
A);
3759 return indicatePessimisticFixpoint();
3766struct AANoAliasCallSiteArgument final : AANoAliasImpl {
3768 : AANoAliasImpl(IRP,
A) {}
3773 const auto &CB = cast<CallBase>(getAnchorValue());
3774 if (CB.
paramHasAttr(getCallSiteArgNo(), Attribute::NoAlias))
3775 indicateOptimisticFixpoint();
3776 Value &Val = getAssociatedValue();
3777 if (isa<ConstantPointerNull>(Val) &&
3780 indicateOptimisticFixpoint();
3787 const CallBase &CB,
unsigned OtherArgNo) {
3789 if (this->getCalleeArgNo() == (
int)OtherArgNo)
3801 if (CBArgMemBehaviorAA.isAssumedReadNone()) {
3802 A.recordDependence(CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3809 if (CBArgMemBehaviorAA.isAssumedReadOnly() && IsReadOnly) {
3810 A.recordDependence(MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3811 A.recordDependence(CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3817 AAR =
A.getInfoCache().getAAResultsForFunction(*getAnchorScope());
3820 bool IsAliasing = !AAR || !AAR->
isNoAlias(&getAssociatedValue(), ArgOp);
3822 "callsite arguments: "
3823 << getAssociatedValue() <<
" " << *ArgOp <<
" => "
3824 << (IsAliasing ?
"" :
"no-") <<
"alias \n");
3841 if (!AssociatedValueIsNoAliasAtDef) {
3843 <<
" is not no-alias at the definition\n");
3853 A.recordDependence(NoAliasAA, *
this, DepClassTy::OPTIONAL);
3857 auto &NoCaptureAA =
A.getAAFor<
AANoCapture>(*
this, VIRP, DepClassTy::NONE);
3861 auto UsePred = [&](
const Use &U,
bool &Follow) ->
bool {
3872 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
3879 DepClassTy::OPTIONAL);
3881 if (NoCaptureAA.isAssumedNoCapture())
3887 A, *UserI, *getCtxI(), *
this,
nullptr,
3888 [ScopeFn](
const Function &Fn) {
return &Fn != ScopeFn; }))
3896 case UseCaptureKind::NO_CAPTURE:
3898 case UseCaptureKind::MAY_CAPTURE:
3902 case UseCaptureKind::PASSTHROUGH:
3909 if (!NoCaptureAA.isAssumedNoCaptureMaybeReturned()) {
3910 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue())) {
3912 dbgs() <<
"[AANoAliasCSArg] " << getAssociatedValue()
3913 <<
" cannot be noalias as it is potentially captured\n");
3917 A.recordDependence(NoCaptureAA, *
this, DepClassTy::OPTIONAL);
3922 const auto &CB = cast<CallBase>(getAnchorValue());
3923 for (
unsigned OtherArgNo = 0; OtherArgNo < CB.
arg_size(); OtherArgNo++)
3924 if (mayAliasWithArgument(
A, AAR, MemBehaviorAA, CB, OtherArgNo))
3934 auto &MemBehaviorAA =
3937 A.recordDependence(MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3938 return ChangeStatus::UNCHANGED;
3942 const auto &NoAliasAA =
3943 A.getAAFor<
AANoAlias>(*
this, VIRP, DepClassTy::NONE);
3946 if (isKnownNoAliasDueToNoAliasPreservation(
A, AAR, MemBehaviorAA,
3949 dbgs() <<
"[AANoAlias] No-Alias deduced via no-alias preservation\n");
3950 return ChangeStatus::UNCHANGED;
3953 return indicatePessimisticFixpoint();
3961struct AANoAliasReturned final : AANoAliasImpl {
3963 : AANoAliasImpl(IRP,
A) {}
3967 AANoAliasImpl::initialize(
A);
3969 if (!
F ||
F->isDeclaration())
3970 indicatePessimisticFixpoint();
3976 auto CheckReturnValue = [&](
Value &RV) ->
bool {
3977 if (
Constant *
C = dyn_cast<Constant>(&RV))
3978 if (
C->isNullValue() || isa<UndefValue>(
C))
3983 if (!isa<CallBase>(&RV))
3987 const auto &NoAliasAA =
3988 A.getAAFor<
AANoAlias>(*
this, RVPos, DepClassTy::REQUIRED);
3992 const auto &NoCaptureAA =
3993 A.getAAFor<
AANoCapture>(*
this, RVPos, DepClassTy::REQUIRED);
3997 if (!
A.checkForAllReturnedValues(CheckReturnValue, *
this))
3998 return indicatePessimisticFixpoint();
4000 return ChangeStatus::UNCHANGED;
4008struct AANoAliasCallSiteReturned final : AANoAliasImpl {
4010 : AANoAliasImpl(IRP,
A) {}
4014 AANoAliasImpl::initialize(
A);
4016 if (!
F ||
F->isDeclaration())
4017 indicatePessimisticFixpoint();
4028 auto &FnAA =
A.getAAFor<
AANoAlias>(*
this, FnPos, DepClassTy::REQUIRED);
4040struct AAIsDeadValueImpl :
public AAIsDead {
4045 if (
auto *Scope = getAnchorScope())
4046 if (!
A.isRunOn(*Scope))
4047 indicatePessimisticFixpoint();
4051 bool isAssumedDead()
const override {
return isAssumed(IS_DEAD); }
4054 bool isKnownDead()
const override {
return isKnown(IS_DEAD); }
4057 bool isAssumedDead(
const BasicBlock *BB)
const override {
return false; }
4060 bool isKnownDead(
const BasicBlock *BB)
const override {
return false; }
4063 bool isAssumedDead(
const Instruction *
I)
const override {
4064 return I == getCtxI() && isAssumedDead();
4068 bool isKnownDead(
const Instruction *
I)
const override {
4069 return isAssumedDead(
I) && isKnownDead();
4073 const std::string getAsStr()
const override {
4074 return isAssumedDead() ?
"assumed-dead" :
"assumed-live";
4084 if (!isa<Constant>(V)) {
4085 if (
auto *
I = dyn_cast<Instruction>(&V))
4086 if (!
A.isRunOn(*
I->getFunction()))
4088 bool UsedAssumedInformation =
false;
4089 std::optional<Constant *>
C =
4090 A.getAssumedConstant(V, *
this, UsedAssumedInformation);
4095 auto UsePred = [&](
const Use &U,
bool &Follow) {
return false; };
4100 return A.checkForAllUses(UsePred, *
this, V,
false,
4101 DepClassTy::REQUIRED,
4110 auto *CB = dyn_cast<CallBase>(
I);
4111 if (!CB || isa<IntrinsicInst>(CB))
4115 const auto &NoUnwindAA =
4116 A.getAndUpdateAAFor<
AANoUnwind>(*
this, CallIRP, DepClassTy::NONE);
4117 if (!NoUnwindAA.isAssumedNoUnwind())
4119 if (!NoUnwindAA.isKnownNoUnwind())
4120 A.recordDependence(NoUnwindAA, *
this, DepClassTy::OPTIONAL);
4127struct AAIsDeadFloating :
public AAIsDeadValueImpl {
4129 : AAIsDeadValueImpl(IRP,
A) {}
4133 AAIsDeadValueImpl::initialize(
A);
4135 if (isa<UndefValue>(getAssociatedValue())) {
4136 indicatePessimisticFixpoint();
4140 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4141 if (!isAssumedSideEffectFree(
A,
I)) {
4142 if (!isa_and_nonnull<StoreInst>(
I))
4143 indicatePessimisticFixpoint();
4145 removeAssumedBits(HAS_NO_EFFECT);
4152 if (
SI.isVolatile())
4158 bool UsedAssumedInformation =
false;
4159 if (!AssumeOnlyInst) {
4160 PotentialCopies.clear();
4162 UsedAssumedInformation)) {
4165 <<
"[AAIsDead] Could not determine potential copies of store!\n");
4169 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Store has " << PotentialCopies.size()
4170 <<
" potential copies.\n");
4175 UsedAssumedInformation))
4177 if (
auto *LI = dyn_cast<LoadInst>(V)) {
4179 auto &UserI = cast<Instruction>(*U.getUser());
4180 if (InfoCache.isOnlyUsedByAssume(UserI)) {
4182 AssumeOnlyInst->insert(&UserI);
4185 return A.isAssumedDead(U,
this,
nullptr, UsedAssumedInformation);
4191 <<
" is assumed live!\n");
4197 const std::string getAsStr()
const override {
4198 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4199 if (isa_and_nonnull<StoreInst>(
I))
4201 return "assumed-dead-store";
4202 return AAIsDeadValueImpl::getAsStr();
4207 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4208 if (
auto *SI = dyn_cast_or_null<StoreInst>(
I)) {
4209 if (!isDeadStore(
A, *SI))
4210 return indicatePessimisticFixpoint();
4212 if (!isAssumedSideEffectFree(
A,
I))
4213 return indicatePessimisticFixpoint();
4214 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4215 return indicatePessimisticFixpoint();
4220 bool isRemovableStore()
const override {
4221 return isAssumed(IS_REMOVABLE) && isa<StoreInst>(&getAssociatedValue());
4226 Value &V = getAssociatedValue();
4227 if (
auto *
I = dyn_cast<Instruction>(&V)) {
4232 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
4234 bool IsDead = isDeadStore(
A, *SI, &AssumeOnlyInst);
4237 A.deleteAfterManifest(*
I);
4238 for (
size_t i = 0; i < AssumeOnlyInst.
size(); ++i) {
4240 for (
auto *Usr : AOI->
users())
4241 AssumeOnlyInst.
insert(cast<Instruction>(Usr));
4242 A.deleteAfterManifest(*AOI);
4246 if (isAssumedSideEffectFree(
A,
I) && !isa<InvokeInst>(
I)) {
4247 A.deleteAfterManifest(*
I);
4255 void trackStatistics()
const override {
4264struct AAIsDeadArgument :
public AAIsDeadFloating {
4266 : AAIsDeadFloating(IRP,
A) {}
4270 AAIsDeadFloating::initialize(
A);
4271 if (!
A.isFunctionIPOAmendable(*getAnchorScope()))
4272 indicatePessimisticFixpoint();
4278 if (
A.isValidFunctionSignatureRewrite(
Arg, {}))
4279 if (
A.registerFunctionSignatureRewrite(
4283 return ChangeStatus::CHANGED;
4285 return ChangeStatus::UNCHANGED;
4292struct AAIsDeadCallSiteArgument :
public AAIsDeadValueImpl {
4294 : AAIsDeadValueImpl(IRP,
A) {}
4298 AAIsDeadValueImpl::initialize(
A);
4299 if (isa<UndefValue>(getAssociatedValue()))
4300 indicatePessimisticFixpoint();
4311 return indicatePessimisticFixpoint();
4313 auto &ArgAA =
A.getAAFor<
AAIsDead>(*
this, ArgPos, DepClassTy::REQUIRED);
4319 CallBase &CB = cast<CallBase>(getAnchorValue());
4322 "Expected undef values to be filtered out!");
4324 if (
A.changeUseAfterManifest(U, UV))
4325 return ChangeStatus::CHANGED;
4326 return ChangeStatus::UNCHANGED;
4333struct AAIsDeadCallSiteReturned :
public AAIsDeadFloating {
4335 : AAIsDeadFloating(IRP,
A) {}
4338 bool isAssumedDead()
const override {
4339 return AAIsDeadFloating::isAssumedDead() && IsAssumedSideEffectFree;
4344 AAIsDeadFloating::initialize(
A);
4345 if (isa<UndefValue>(getAssociatedValue())) {
4346 indicatePessimisticFixpoint();
4351 IsAssumedSideEffectFree = isAssumedSideEffectFree(
A, getCtxI());
4357 if (IsAssumedSideEffectFree && !isAssumedSideEffectFree(
A, getCtxI())) {
4358 IsAssumedSideEffectFree =
false;
4359 Changed = ChangeStatus::CHANGED;
4361 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4362 return indicatePessimisticFixpoint();
4367 void trackStatistics()
const override {
4368 if (IsAssumedSideEffectFree)
4375 const std::string getAsStr()
const override {
4376 return isAssumedDead()
4378 : (getAssumed() ?
"assumed-dead-users" :
"assumed-live");
4382 bool IsAssumedSideEffectFree =
true;
4385struct AAIsDeadReturned :
public AAIsDeadValueImpl {
4387 : AAIsDeadValueImpl(IRP,
A) {}
4392 bool UsedAssumedInformation =
false;
4393 A.checkForAllInstructions([](
Instruction &) {
return true; }, *
this,
4394 {Instruction::Ret}, UsedAssumedInformation);
4397 if (ACS.isCallbackCall() || !ACS.getInstruction())
4399 return areAllUsesAssumedDead(
A, *ACS.getInstruction());
4402 if (!
A.checkForAllCallSites(PredForCallSite, *
this,
true,
4403 UsedAssumedInformation))
4404 return indicatePessimisticFixpoint();
4406 return ChangeStatus::UNCHANGED;
4412 bool AnyChange =
false;
4420 bool UsedAssumedInformation =
false;
4421 A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
4422 UsedAssumedInformation);
4423 return AnyChange ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
4430struct AAIsDeadFunction :
public AAIsDead {
4436 if (!
F ||
F->isDeclaration() || !
A.isRunOn(*
F)) {
4437 indicatePessimisticFixpoint();
4440 if (!isAssumedDeadInternalFunction(
A)) {
4441 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4442 assumeLive(
A,
F->getEntryBlock());
4446 bool isAssumedDeadInternalFunction(
Attributor &
A) {
4447 if (!getAnchorScope()->hasLocalLinkage())
4449 bool UsedAssumedInformation =
false;
4451 true, UsedAssumedInformation);
4455 const std::string getAsStr()
const override {
4456 return "Live[#BB " + std::to_string(AssumedLiveBlocks.size()) +
"/" +
4457 std::to_string(getAnchorScope()->
size()) +
"][#TBEP " +
4458 std::to_string(ToBeExploredFrom.size()) +
"][#KDE " +
4459 std::to_string(KnownDeadEnds.size()) +
"]";
4464 assert(getState().isValidState() &&
4465 "Attempted to manifest an invalid state!");
4470 if (AssumedLiveBlocks.empty()) {
4471 A.deleteAfterManifest(
F);
4472 return ChangeStatus::CHANGED;
4478 bool Invoke2CallAllowed = !mayCatchAsynchronousExceptions(
F);
4480 KnownDeadEnds.set_union(ToBeExploredFrom);
4481 for (
const Instruction *DeadEndI : KnownDeadEnds) {
4482 auto *CB = dyn_cast<CallBase>(DeadEndI);
4485 const auto &NoReturnAA =
A.getAndUpdateAAFor<
AANoReturn>(
4488 if (MayReturn && (!Invoke2CallAllowed || !isa<InvokeInst>(CB)))
4491 if (
auto *II = dyn_cast<InvokeInst>(DeadEndI))
4492 A.registerInvokeWithDeadSuccessor(
const_cast<InvokeInst &
>(*II));
4494 A.changeToUnreachableAfterManifest(
4495 const_cast<Instruction *
>(DeadEndI->getNextNode()));
4496 HasChanged = ChangeStatus::CHANGED;
4501 if (!AssumedLiveBlocks.count(&BB)) {
4502 A.deleteAfterManifest(BB);
4504 HasChanged = ChangeStatus::CHANGED;
4514 assert(
From->getParent() == getAnchorScope() &&
4516 "Used AAIsDead of the wrong function");
4517 return isValidState() && !AssumedLiveEdges.count(std::make_pair(
From, To));
4521 void trackStatistics()
const override {}
4524 bool isAssumedDead()
const override {
return false; }
4527 bool isKnownDead()
const override {
return false; }
4530 bool isAssumedDead(
const BasicBlock *BB)
const override {
4532 "BB must be in the same anchor scope function.");
4536 return !AssumedLiveBlocks.count(BB);
4540 bool isKnownDead(
const BasicBlock *BB)
const override {
4541 return getKnown() && isAssumedDead(BB);
4545 bool isAssumedDead(
const Instruction *
I)
const override {
4546 assert(
I->getParent()->getParent() == getAnchorScope() &&
4547 "Instruction must be in the same anchor scope function.");
4554 if (!AssumedLiveBlocks.count(
I->getParent()))
4560 if (KnownDeadEnds.count(PrevI) || ToBeExploredFrom.count(PrevI))
4568 bool isKnownDead(
const Instruction *
I)
const override {
4569 return getKnown() && isAssumedDead(
I);
4575 if (!AssumedLiveBlocks.insert(&BB).second)
4583 if (
const auto *CB = dyn_cast<CallBase>(&
I))
4585 if (
F->hasLocalLinkage())
4586 A.markLiveInternalFunction(*
F);
4610 const auto &NoReturnAA =
4612 if (NoReturnAA.isAssumedNoReturn())
4625 bool UsedAssumedInformation =
4626 identifyAliveSuccessors(
A, cast<CallBase>(II), AA, AliveSuccessors);
4631 if (AAIsDeadFunction::mayCatchAsynchronousExceptions(*II.
getFunction())) {
4635 const auto &AANoUnw =
4637 if (AANoUnw.isAssumedNoUnwind()) {
4638 UsedAssumedInformation |= !AANoUnw.isKnownNoUnwind();
4643 return UsedAssumedInformation;
4650 bool UsedAssumedInformation =
false;
4654 std::optional<Constant *>
C =
4655 A.getAssumedConstant(*BI.
getCondition(), AA, UsedAssumedInformation);
4656 if (!
C || isa_and_nonnull<UndefValue>(*
C)) {
4658 }
else if (isa_and_nonnull<ConstantInt>(*
C)) {
4660 BI.
getSuccessor(1 - cast<ConstantInt>(*C)->getValue().getZExtValue());
4665 UsedAssumedInformation =
false;
4668 return UsedAssumedInformation;
4675 bool UsedAssumedInformation =
false;
4676 std::optional<Constant *>
C =
4677 A.getAssumedConstant(*
SI.getCondition(), AA, UsedAssumedInformation);
4678 if (!
C || isa_and_nonnull<UndefValue>(*
C)) {
4680 }
else if (isa_and_nonnull<ConstantInt>(*
C)) {
4681 for (
const auto &CaseIt :
SI.cases()) {
4682 if (CaseIt.getCaseValue() == *
C) {
4683 AliveSuccessors.
push_back(&CaseIt.getCaseSuccessor()->front());
4684 return UsedAssumedInformation;
4687 AliveSuccessors.
push_back(&
SI.getDefaultDest()->front());
4688 return UsedAssumedInformation;
4693 return UsedAssumedInformation;
4699 if (AssumedLiveBlocks.empty()) {
4700 if (isAssumedDeadInternalFunction(
A))
4704 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4705 assumeLive(
A,
F->getEntryBlock());
4709 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Live [" << AssumedLiveBlocks.size() <<
"/"
4710 << getAnchorScope()->
size() <<
"] BBs and "
4711 << ToBeExploredFrom.size() <<
" exploration points and "
4712 << KnownDeadEnds.size() <<
" known dead ends\n");
4717 ToBeExploredFrom.end());
4718 decltype(ToBeExploredFrom) NewToBeExploredFrom;
4721 while (!Worklist.
empty()) {
4727 while (!
I->isTerminator() && !isa<CallBase>(
I))
4728 I =
I->getNextNode();
4730 AliveSuccessors.
clear();
4732 bool UsedAssumedInformation =
false;
4733 switch (
I->getOpcode()) {
4737 "Expected non-terminators to be handled already!");
4741 case Instruction::Call:
4742 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<CallInst>(*
I),
4743 *
this, AliveSuccessors);
4745 case Instruction::Invoke:
4746 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<InvokeInst>(*
I),
4747 *
this, AliveSuccessors);
4749 case Instruction::Br:
4750 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<BranchInst>(*
I),
4751 *
this, AliveSuccessors);
4753 case Instruction::Switch:
4754 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<SwitchInst>(*
I),
4755 *
this, AliveSuccessors);
4759 if (UsedAssumedInformation) {
4760 NewToBeExploredFrom.insert(
I);
4761 }
else if (AliveSuccessors.
empty() ||
4762 (
I->isTerminator() &&
4763 AliveSuccessors.
size() <
I->getNumSuccessors())) {
4764 if (KnownDeadEnds.insert(
I))
4769 << AliveSuccessors.
size() <<
" UsedAssumedInformation: "
4770 << UsedAssumedInformation <<
"\n");
4772 for (
const Instruction *AliveSuccessor : AliveSuccessors) {
4773 if (!
I->isTerminator()) {
4774 assert(AliveSuccessors.size() == 1 &&
4775 "Non-terminator expected to have a single successor!");
4779 auto Edge = std::make_pair(
I->getParent(), AliveSuccessor->getParent());
4780 if (AssumedLiveEdges.insert(Edge).second)
4782 if (assumeLive(
A, *AliveSuccessor->getParent()))
4789 if (NewToBeExploredFrom.size() != ToBeExploredFrom.size() ||
4791 return !ToBeExploredFrom.count(I);
4794 ToBeExploredFrom = std::move(NewToBeExploredFrom);
4803 if (ToBeExploredFrom.empty() &&
4804 getAnchorScope()->
size() == AssumedLiveBlocks.size() &&
4806 return DeadEndI->isTerminator() && DeadEndI->getNumSuccessors() == 0;
4808 return indicatePessimisticFixpoint();
4813struct AAIsDeadCallSite final : AAIsDeadFunction {
4815 : AAIsDeadFunction(IRP,
A) {}
4824 "supported for call sites yet!");
4829 return indicatePessimisticFixpoint();
4833 void trackStatistics()
const override {}
4849 getAttrs({Attribute::Dereferenceable, Attribute::DereferenceableOrNull},
4852 takeKnownDerefBytesMaximum(Attr.getValueAsInt());
4854 const IRPosition &IRP = this->getIRPosition();
4855 NonNullAA = &
A.getAAFor<
AANonNull>(*
this, IRP, DepClassTy::NONE);
4857 bool CanBeNull, CanBeFreed;
4859 A.getDataLayout(), CanBeNull, CanBeFreed));
4863 if (IsFnInterface && (!FnScope || !
A.isFunctionIPOAmendable(*FnScope))) {
4864 indicatePessimisticFixpoint();
4869 followUsesInMBEC(*
this,
A, getState(), *CtxI);
4874 StateType &getState()
override {
return *
this; }
4875 const StateType &getState()
const override {
return *
this; }
4881 const Value *UseV = U->get();
4886 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
I->isVolatile())
4891 Loc->Ptr,
Offset,
A.getDataLayout(),
true);
4892 if (
Base &&
Base == &getAssociatedValue())
4893 State.addAccessedBytes(
Offset, Loc->Size.getValue());
4899 bool IsNonNull =
false;
4900 bool TrackUse =
false;
4901 int64_t DerefBytes = getKnownNonNullAndDerefBytesForUse(
4902 A, *
this, getAssociatedValue(), U,
I, IsNonNull, TrackUse);
4903 LLVM_DEBUG(
dbgs() <<
"[AADereferenceable] Deref bytes: " << DerefBytes
4904 <<
" for instruction " << *
I <<
"\n");
4906 addAccessedBytesForUse(
A, U,
I, State);
4907 State.takeKnownDerefBytesMaximum(DerefBytes);
4914 if (isAssumedNonNull() && hasAttr(Attribute::DereferenceableOrNull)) {
4915 removeAttrs({Attribute::DereferenceableOrNull});
4916 return ChangeStatus::CHANGED;
4924 if (isAssumedNonNull())
4926 Ctx, getAssumedDereferenceableBytes()));
4929 Ctx, getAssumedDereferenceableBytes()));
4933 const std::string getAsStr()
const override {
4934 if (!getAssumedDereferenceableBytes())
4935 return "unknown-dereferenceable";
4936 return std::string(
"dereferenceable") +
4937 (isAssumedNonNull() ?
"" :
"_or_null") +
4938 (isAssumedGlobal() ?
"_globally" :
"") +
"<" +
4939 std::to_string(getKnownDereferenceableBytes()) +
"-" +
4940 std::to_string(getAssumedDereferenceableBytes()) +
">";
4945struct AADereferenceableFloating : AADereferenceableImpl {
4947 : AADereferenceableImpl(IRP,
A) {}
4953 bool UsedAssumedInformation =
false;
4955 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
4957 Values.
push_back({getAssociatedValue(), getCtxI()});
4960 Stripped = Values.
size() != 1 ||
4961 Values.
front().getValue() != &getAssociatedValue();
4967 auto VisitValueCB = [&](
const Value &V) ->
bool {
4977 int64_t DerefBytes = 0;