53#include "llvm/IR/IntrinsicsAMDGPU.h"
54#include "llvm/IR/IntrinsicsNVPTX.h"
73#define DEBUG_TYPE "attributor"
77 cl::desc(
"Manifest Attributor internal string attributes."),
90 cl::desc(
"Maximum number of potential values to be "
91 "tracked for each position."),
96 "attributor-max-potential-values-iterations",
cl::Hidden,
98 "Maximum number of iterations we keep dismantling potential values."),
101STATISTIC(NumAAs,
"Number of abstract attributes created");
116#define BUILD_STAT_MSG_IR_ATTR(TYPE, NAME) \
117 ("Number of " #TYPE " marked '" #NAME "'")
118#define BUILD_STAT_NAME(NAME, TYPE) NumIR##TYPE##_##NAME
119#define STATS_DECL_(NAME, MSG) STATISTIC(NAME, MSG);
120#define STATS_DECL(NAME, TYPE, MSG) \
121 STATS_DECL_(BUILD_STAT_NAME(NAME, TYPE), MSG);
122#define STATS_TRACK(NAME, TYPE) ++(BUILD_STAT_NAME(NAME, TYPE));
123#define STATS_DECLTRACK(NAME, TYPE, MSG) \
125 STATS_DECL(NAME, TYPE, MSG) \
126 STATS_TRACK(NAME, TYPE) \
128#define STATS_DECLTRACK_ARG_ATTR(NAME) \
129 STATS_DECLTRACK(NAME, Arguments, BUILD_STAT_MSG_IR_ATTR(arguments, NAME))
130#define STATS_DECLTRACK_CSARG_ATTR(NAME) \
131 STATS_DECLTRACK(NAME, CSArguments, \
132 BUILD_STAT_MSG_IR_ATTR(call site arguments, NAME))
133#define STATS_DECLTRACK_FN_ATTR(NAME) \
134 STATS_DECLTRACK(NAME, Function, BUILD_STAT_MSG_IR_ATTR(functions, NAME))
135#define STATS_DECLTRACK_CS_ATTR(NAME) \
136 STATS_DECLTRACK(NAME, CS, BUILD_STAT_MSG_IR_ATTR(call site, NAME))
137#define STATS_DECLTRACK_FNRET_ATTR(NAME) \
138 STATS_DECLTRACK(NAME, FunctionReturn, \
139 BUILD_STAT_MSG_IR_ATTR(function returns, NAME))
140#define STATS_DECLTRACK_CSRET_ATTR(NAME) \
141 STATS_DECLTRACK(NAME, CSReturn, \
142 BUILD_STAT_MSG_IR_ATTR(call site returns, NAME))
143#define STATS_DECLTRACK_FLOATING_ATTR(NAME) \
144 STATS_DECLTRACK(NAME, Floating, \
145 ("Number of floating values known to be '" #NAME "'"))
150#define PIPE_OPERATOR(CLASS) \
151 raw_ostream &operator<<(raw_ostream &OS, const CLASS &AA) { \
152 return OS << static_cast<const AbstractAttribute &>(AA); \
203 bool HeaderOnly,
Cycle **CPtr =
nullptr) {
206 auto *BB =
I->getParent();
212 return !HeaderOnly || BB ==
C->getHeader();
223 if (
DL.getTypeSizeInBits(Ty) !=
DL.getTypeAllocSizeInBits(Ty))
228 if (
VectorType *SeqTy = dyn_cast<VectorType>(Ty))
232 if (
ArrayType *SeqTy = dyn_cast<ArrayType>(Ty))
235 if (!isa<StructType>(Ty))
248 StartPos +=
DL.getTypeAllocSizeInBits(ElTy);
258 bool AllowVolatile) {
259 if (!AllowVolatile &&
I->isVolatile())
262 if (
auto *LI = dyn_cast<LoadInst>(
I)) {
263 return LI->getPointerOperand();
266 if (
auto *
SI = dyn_cast<StoreInst>(
I)) {
267 return SI->getPointerOperand();
270 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(
I)) {
271 return CXI->getPointerOperand();
274 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(
I)) {
275 return RMWI->getPointerOperand();
292 assert(
Offset >= 0 &&
"Negative offset not supported yet!");
294 <<
"-bytes as " << *ResTy <<
"\n");
297 Type *Ty = PtrElemTy;
302 std::string GEPName =
Ptr->getName().str();
305 GEPName +=
"." + std::to_string(
Index.getZExtValue());
312 if (IntOffset != 0) {
321 Ptr->getName() +
".cast");
330 bool GetMinOffset,
bool AllowNonInbounds,
331 bool UseAssumed =
false) {
333 auto AttributorAnalysis = [&](
Value &V,
APInt &ROffset) ->
bool {
338 UseAssumed ? DepClassTy::OPTIONAL
342 if (Range.isFullSet())
348 ROffset = Range.getSignedMin();
350 ROffset = Range.getSignedMax();
361 const Value *
Ptr, int64_t &BytesOffset,
363 APInt OffsetAPInt(
DL.getIndexTypeSizeInBits(
Ptr->getType()), 0);
366 true, AllowNonInbounds);
374template <
typename AAType,
typename StateType =
typename AAType::StateType>
376 Attributor &
A,
const AAType &QueryingAA, StateType &S,
378 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp return value states for "
379 << QueryingAA <<
" into " << S <<
"\n");
381 assert((QueryingAA.getIRPosition().getPositionKind() ==
383 QueryingAA.getIRPosition().getPositionKind() ==
385 "Can only clamp returned value states for a function returned or call "
386 "site returned position!");
390 std::optional<StateType>
T;
393 auto CheckReturnValue = [&](
Value &RV) ->
bool {
396 A.getAAFor<AAType>(QueryingAA, RVPos, DepClassTy::REQUIRED);
397 LLVM_DEBUG(
dbgs() <<
"[Attributor] RV: " << RV <<
" AA: " << AA.getAsStr()
398 <<
" @ " << RVPos <<
"\n");
399 const StateType &AAS = AA.getState();
401 T = StateType::getBestState(AAS);
403 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" RV State: " <<
T
405 return T->isValidState();
408 if (!
A.checkForAllReturnedValues(CheckReturnValue, QueryingAA))
409 S.indicatePessimisticFixpoint();
416template <
typename AAType,
typename BaseType,
417 typename StateType =
typename BaseType::StateType,
418 bool PropagateCallBaseContext =
false>
419struct AAReturnedFromReturnedValues :
public BaseType {
425 StateType S(StateType::getBestState(this->getState()));
426 clampReturnedValueStates<AAType, StateType>(
428 PropagateCallBaseContext ? this->getCallBaseContext() :
nullptr);
431 return clampStateAndIndicateChange<StateType>(this->getState(), S);
437template <
typename AAType,
typename StateType =
typename AAType::StateType>
438static void clampCallSiteArgumentStates(
Attributor &
A,
const AAType &QueryingAA,
440 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp call site argument states for "
441 << QueryingAA <<
" into " << S <<
"\n");
443 assert(QueryingAA.getIRPosition().getPositionKind() ==
445 "Can only clamp call site argument states for an argument position!");
449 std::optional<StateType>
T;
452 unsigned ArgNo = QueryingAA.getIRPosition().getCallSiteArgNo();
462 A.getAAFor<AAType>(QueryingAA, ACSArgPos, DepClassTy::REQUIRED);
463 LLVM_DEBUG(
dbgs() <<
"[Attributor] ACS: " << *ACS.getInstruction()
464 <<
" AA: " << AA.getAsStr() <<
" @" << ACSArgPos <<
"\n");
465 const StateType &AAS = AA.getState();
467 T = StateType::getBestState(AAS);
469 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" CSA State: " <<
T
471 return T->isValidState();
474 bool UsedAssumedInformation =
false;
475 if (!
A.checkForAllCallSites(CallSiteCheck, QueryingAA,
true,
476 UsedAssumedInformation))
477 S.indicatePessimisticFixpoint();
484template <
typename AAType,
typename BaseType,
485 typename StateType =
typename AAType::StateType>
486bool getArgumentStateFromCallBaseContext(
Attributor &
A,
490 "Expected an 'argument' position !");
496 assert(ArgNo >= 0 &&
"Invalid Arg No!");
498 const auto &AA =
A.getAAFor<AAType>(
500 DepClassTy::REQUIRED);
501 const StateType &CBArgumentState =
502 static_cast<const StateType &
>(AA.getState());
504 LLVM_DEBUG(
dbgs() <<
"[Attributor] Briding Call site context to argument"
505 <<
"Position:" << Pos <<
"CB Arg state:" << CBArgumentState
509 State ^= CBArgumentState;
514template <
typename AAType,
typename BaseType,
515 typename StateType =
typename AAType::StateType,
516 bool BridgeCallBaseContext =
false>
517struct AAArgumentFromCallSiteArguments :
public BaseType {
523 StateType S = StateType::getBestState(this->getState());
525 if (BridgeCallBaseContext) {
527 getArgumentStateFromCallBaseContext<AAType, BaseType, StateType>(
528 A, *
this, this->getIRPosition(), S);
530 return clampStateAndIndicateChange<StateType>(this->getState(), S);
532 clampCallSiteArgumentStates<AAType, StateType>(
A, *
this, S);
536 return clampStateAndIndicateChange<StateType>(this->getState(), S);
541template <
typename AAType,
typename BaseType,
542 typename StateType =
typename BaseType::StateType,
543 bool IntroduceCallBaseContext =
false>
544struct AACallSiteReturnedFromReturned :
public BaseType {
550 assert(this->getIRPosition().getPositionKind() ==
552 "Can only wrap function returned positions for call site returned "
554 auto &S = this->getState();
556 const Function *AssociatedFunction =
557 this->getIRPosition().getAssociatedFunction();
558 if (!AssociatedFunction)
559 return S.indicatePessimisticFixpoint();
561 CallBase &CBContext = cast<CallBase>(this->getAnchorValue());
562 if (IntroduceCallBaseContext)
563 LLVM_DEBUG(
dbgs() <<
"[Attributor] Introducing call base context:"
564 << CBContext <<
"\n");
567 *AssociatedFunction, IntroduceCallBaseContext ? &CBContext :
nullptr);
568 const AAType &AA =
A.getAAFor<AAType>(*
this, FnPos, DepClassTy::REQUIRED);
574template <
class AAType,
typename StateType =
typename AAType::StateType>
575static void followUsesInContext(AAType &AA,
Attributor &
A,
580 auto EIt = Explorer.
begin(CtxI), EEnd = Explorer.
end(CtxI);
581 for (
unsigned u = 0;
u <
Uses.size(); ++
u) {
583 if (
const Instruction *UserI = dyn_cast<Instruction>(
U->getUser())) {
585 if (Found && AA.followUseInMBEC(
A, U, UserI, State))
586 for (
const Use &Us : UserI->
uses())
601template <
class AAType,
typename StateType =
typename AAType::StateType>
602static void followUsesInMBEC(AAType &AA,
Attributor &
A, StateType &S,
607 for (
const Use &U : AA.getIRPosition().getAssociatedValue().uses())
611 A.getInfoCache().getMustBeExecutedContextExplorer();
613 followUsesInContext<AAType>(AA,
A, Explorer, &CtxI,
Uses, S);
615 if (S.isAtFixpoint())
620 if (
const BranchInst *Br = dyn_cast<BranchInst>(
I))
621 if (Br->isConditional())
660 StateType ParentState;
664 ParentState.indicateOptimisticFixpoint();
666 for (
const BasicBlock *BB : Br->successors()) {
667 StateType ChildState;
669 size_t BeforeSize =
Uses.size();
670 followUsesInContext(AA,
A, Explorer, &BB->front(),
Uses, ChildState);
673 for (
auto It =
Uses.begin() + BeforeSize; It !=
Uses.end();)
676 ParentState &= ChildState;
689namespace PointerInfo {
750 R.indicatePessimisticFixpoint();
846 if (!Range.mayOverlap(ItRange))
848 bool IsExact = Range == ItRange && !Range.offsetOrSizeAreUnknown();
849 for (
auto Index : It.getSecond()) {
851 if (!CB(Access, IsExact))
871 for (
unsigned Index : LocalList->getSecond()) {
874 if (Range.offsetAndSizeAreUnknown())
890 RemoteI = RemoteI ? RemoteI : &
I;
894 bool AccExists =
false;
896 for (
auto Index : LocalList) {
898 if (
A.getLocalInst() == &
I) {
908 dbgs() <<
"[AAPointerInfo] Inserting access in new offset bins\n";
911 for (
auto Key : ToAdd) {
920 "New Access should have been at AccIndex");
921 LocalList.push_back(AccIndex);
930 auto Before = Current;
932 if (Current == Before)
935 auto &ExistingRanges = Before.getRanges();
936 auto &NewRanges = Current.getRanges();
944 dbgs() <<
"[AAPointerInfo] Removing access from old offset bins\n";
952 "Expected bin to actually contain the Access.");
971 using const_iterator = VecTy::const_iterator;
974 const_iterator begin()
const {
return Offsets.begin(); }
975 const_iterator end()
const {
return Offsets.end(); }
978 return Offsets ==
RHS.Offsets;
984 bool isUnassigned()
const {
return Offsets.size() == 0; }
986 bool isUnknown()
const {
999 void addToAll(int64_t Inc) {
1000 for (
auto &
Offset : Offsets) {
1009 void merge(
const OffsetInfo &R) {
Offsets.append(
R.Offsets); }
1024struct AAPointerInfoImpl
1025 :
public StateWrapper<AA::PointerInfo::State, AAPointerInfo> {
1030 const std::string getAsStr()
const override {
1031 return std::string(
"PointerInfo ") +
1032 (isValidState() ? (std::string(
"#") +
1033 std::to_string(OffsetBins.
size()) +
" bins")
1039 return AAPointerInfo::manifest(
A);
1042 bool forallInterferingAccesses(
1046 return State::forallInterferingAccesses(Range, CB);
1049 bool forallInterferingAccesses(
1051 bool FindInterferingWrites,
bool FindInterferingReads,
1052 function_ref<
bool(
const Access &,
bool)> UserCB,
bool &HasBeenWrittenTo,
1054 HasBeenWrittenTo =
false;
1060 const auto &NoSyncAA =
A.getAAFor<
AANoSync>(
1064 bool AllInSameNoSyncFn = NoSyncAA.isAssumedNoSync();
1065 bool InstIsExecutedByInitialThreadOnly =
1066 ExecDomainAA && ExecDomainAA->isExecutedByInitialThreadOnly(
I);
1067 bool InstIsExecutedInAlignedRegion =
1068 ExecDomainAA && ExecDomainAA->isExecutedInAlignedRegion(
A,
I);
1069 if (InstIsExecutedInAlignedRegion || InstIsExecutedByInitialThreadOnly)
1070 A.recordDependence(*ExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1073 bool IsThreadLocalObj =
1082 auto CanIgnoreThreadingForInst = [&](
const Instruction &
I) ->
bool {
1083 if (IsThreadLocalObj || AllInSameNoSyncFn)
1085 const auto *FnExecDomainAA =
1086 I.getFunction() == &
Scope
1091 if (!FnExecDomainAA)
1093 if (InstIsExecutedInAlignedRegion ||
1094 FnExecDomainAA->isExecutedInAlignedRegion(
A,
I)) {
1095 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1098 if (InstIsExecutedByInitialThreadOnly &&
1099 FnExecDomainAA->isExecutedByInitialThreadOnly(
I)) {
1100 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1109 auto CanIgnoreThreading = [&](
const Access &Acc) ->
bool {
1110 return CanIgnoreThreadingForInst(*Acc.getRemoteInst()) ||
1111 (Acc.getRemoteInst() != Acc.getLocalInst() &&
1112 CanIgnoreThreadingForInst(*Acc.getLocalInst()));
1119 const bool UseDominanceReasoning =
1120 FindInterferingWrites && NoRecurseAA.isKnownNoRecurse();
1131 case AA::GPUAddressSpace::Shared:
1132 case AA::GPUAddressSpace::Constant:
1133 case AA::GPUAddressSpace::Local:
1145 std::function<
bool(
const Function &)> IsLiveInCalleeCB;
1147 if (
auto *AI = dyn_cast<AllocaInst>(&getAssociatedValue())) {
1153 if (NoRecurseAA.isAssumedNoRecurse()) {
1154 IsLiveInCalleeCB = [AIFn](
const Function &Fn) {
return AIFn != &Fn; };
1156 }
else if (
auto *GV = dyn_cast<GlobalValue>(&getAssociatedValue())) {
1159 if (HasKernelLifetime(GV, *GV->getParent()))
1160 IsLiveInCalleeCB = [](
const Function &Fn) {
1169 auto AccessCB = [&](
const Access &Acc,
bool Exact) {
1170 if (Exact && Acc.isMustAccess() && Acc.getRemoteInst() != &
I) {
1171 if (Acc.isWrite() || (isa<LoadInst>(
I) && Acc.isWriteOrAssumption()))
1172 ExclusionSet.
insert(Acc.getRemoteInst());
1175 if ((!FindInterferingWrites || !Acc.isWriteOrAssumption()) &&
1176 (!FindInterferingReads || !Acc.isRead()))
1179 bool Dominates = FindInterferingWrites && DT && Exact &&
1180 Acc.isMustAccess() &&
1181 (Acc.getRemoteInst()->getFunction() == &
Scope) &&
1184 DominatingWrites.
insert(&Acc);
1188 AllInSameNoSyncFn &= Acc.getRemoteInst()->getFunction() == &
Scope;
1190 InterferingAccesses.
push_back({&Acc, Exact});
1193 if (!State::forallInterferingAccesses(
I, AccessCB, Range))
1196 HasBeenWrittenTo = !DominatingWrites.
empty();
1200 for (
const Access *Acc : DominatingWrites) {
1201 if (!LeastDominatingWriteInst) {
1202 LeastDominatingWriteInst = Acc->getRemoteInst();
1203 }
else if (DT->
dominates(LeastDominatingWriteInst,
1204 Acc->getRemoteInst())) {
1205 LeastDominatingWriteInst = Acc->getRemoteInst();
1210 auto CanSkipAccess = [&](
const Access &Acc,
bool Exact) {
1211 if (!CanIgnoreThreading(Acc))
1217 bool ReadChecked = !FindInterferingReads;
1218 bool WriteChecked = !FindInterferingWrites;
1224 &ExclusionSet, IsLiveInCalleeCB))
1229 if (!WriteChecked) {
1231 &ExclusionSet, IsLiveInCalleeCB))
1232 WriteChecked =
true;
1246 if (!WriteChecked && HasBeenWrittenTo &&
1247 Acc.getRemoteInst()->getFunction() != &
Scope) {
1257 if (!FnReachabilityAA.instructionCanReach(
1258 A, *LeastDominatingWriteInst,
1259 *Acc.getRemoteInst()->getFunction(), &ExclusionSet))
1260 WriteChecked =
true;
1266 if (ReadChecked && WriteChecked)
1269 if (!DT || !UseDominanceReasoning)
1271 if (!DominatingWrites.count(&Acc))
1273 return LeastDominatingWriteInst != Acc.getRemoteInst();
1278 for (
auto &It : InterferingAccesses) {
1279 if ((!AllInSameNoSyncFn && !IsThreadLocalObj && !ExecDomainAA) ||
1280 !CanSkipAccess(*It.first, It.second)) {
1281 if (!UserCB(*It.first, It.second))
1291 using namespace AA::PointerInfo;
1293 return indicatePessimisticFixpoint();
1295 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1296 bool IsByval = OtherAAImpl.getAssociatedArgument()->hasByValAttr();
1300 const auto &State = OtherAAImpl.getState();
1301 for (
const auto &It : State) {
1302 for (
auto Index : It.getSecond()) {
1303 const auto &RAcc = State.getAccess(
Index);
1304 if (IsByval && !RAcc.isRead())
1306 bool UsedAssumedInformation =
false;
1308 auto Content =
A.translateArgumentToCallSiteContent(
1309 RAcc.getContent(), CB, *
this, UsedAssumedInformation);
1310 AK =
AccessKind(AK & (IsByval ? AccessKind::AK_R : AccessKind::AK_RW));
1311 AK =
AccessKind(AK | (RAcc.isMayAccess() ? AK_MAY : AK_MUST));
1313 Changed |= addAccess(
A, RAcc.getRanges(), CB,
Content, AK,
1314 RAcc.getType(), RAcc.getRemoteInst());
1321 const OffsetInfo &Offsets,
CallBase &CB) {
1322 using namespace AA::PointerInfo;
1324 return indicatePessimisticFixpoint();
1326 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1330 const auto &State = OtherAAImpl.getState();
1331 for (
const auto &It : State) {
1332 for (
auto Index : It.getSecond()) {
1333 const auto &RAcc = State.getAccess(
Index);
1334 for (
auto Offset : Offsets) {
1338 if (!NewRanges.isUnknown()) {
1339 NewRanges.addToAllOffsets(
Offset);
1342 addAccess(
A, NewRanges, CB, RAcc.getContent(), RAcc.getKind(),
1343 RAcc.getType(), RAcc.getRemoteInst());
1352 void trackPointerInfoStatistics(
const IRPosition &IRP)
const {}
1356 for (
auto &It : OffsetBins) {
1357 O <<
"[" << It.first.Offset <<
"-" << It.first.Offset + It.first.Size
1358 <<
"] : " << It.getSecond().size() <<
"\n";
1359 for (
auto AccIndex : It.getSecond()) {
1360 auto &Acc = AccessList[AccIndex];
1361 O <<
" - " << Acc.getKind() <<
" - " << *Acc.getLocalInst() <<
"\n";
1362 if (Acc.getLocalInst() != Acc.getRemoteInst())
1363 O <<
" --> " << *Acc.getRemoteInst()
1365 if (!Acc.isWrittenValueYetUndetermined()) {
1366 if (isa_and_nonnull<Function>(Acc.getWrittenValue()))
1367 O <<
" - c: func " << Acc.getWrittenValue()->getName()
1369 else if (Acc.getWrittenValue())
1370 O <<
" - c: " << *Acc.getWrittenValue() <<
"\n";
1372 O <<
" - c: <unknown>\n";
1379struct AAPointerInfoFloating :
public AAPointerInfoImpl {
1382 : AAPointerInfoImpl(IRP,
A) {}
1389 using namespace AA::PointerInfo;
1392 TypeSize AccessSize =
DL.getTypeStoreSize(&Ty);
1402 if (!VT || VT->getElementCount().isScalable() ||
1404 (*Content)->getType() != VT ||
1405 DL.getTypeStoreSize(VT->getElementType()).isScalable()) {
1414 Type *ElementType = VT->getElementType();
1415 int64_t ElementSize =
DL.getTypeStoreSize(ElementType).getFixedValue();
1416 auto *ConstContent = cast<Constant>(*
Content);
1420 for (
int i = 0, e = VT->getElementCount().getFixedValue(); i != e; ++i) {
1425 Changed = Changed | addAccess(
A, {ElementOffsets, ElementSize},
I,
1426 ElementContent,
Kind, ElementType);
1429 for (
auto &ElementOffset : ElementOffsets)
1430 ElementOffset += ElementSize;
1444 OffsetInfo &UsrOI,
const OffsetInfo &PtrOI,
1448 void trackStatistics()
const override {
1449 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1453bool AAPointerInfoFloating::collectConstantsForGEP(
Attributor &
A,
1456 const OffsetInfo &PtrOI,
1458 unsigned BitWidth =
DL.getIndexTypeSizeInBits(
GEP->getType());
1462 assert(!UsrOI.isUnknown() && !PtrOI.isUnknown() &&
1463 "Don't look for constant values if the offset has already been "
1464 "determined to be unknown.");
1466 if (!
GEP->collectOffset(
DL,
BitWidth, VariableOffsets, ConstantOffset)) {
1472 << (VariableOffsets.
empty() ?
"" :
"not") <<
" constant "
1476 Union.addToAll(ConstantOffset.getSExtValue());
1481 for (
const auto &VI : VariableOffsets) {
1484 if (!PotentialConstantsAA.isValidState()) {
1490 if (PotentialConstantsAA.undefIsContained())
1498 if (AssumedSet.empty())
1502 for (
const auto &ConstOffset : AssumedSet) {
1503 auto CopyPerOffset =
Union;
1504 CopyPerOffset.addToAll(ConstOffset.getSExtValue() *
1505 VI.second.getZExtValue());
1506 Product.merge(CopyPerOffset);
1511 UsrOI = std::move(Union);
1516 using namespace AA::PointerInfo;
1519 Value &AssociatedValue = getAssociatedValue();
1522 OffsetInfoMap[&AssociatedValue].
insert(0);
1524 auto HandlePassthroughUser = [&](
Value *Usr,
Value *CurPtr,
bool &Follow) {
1533 auto &UsrOI = OffsetInfoMap[Usr];
1534 auto &PtrOI = OffsetInfoMap[CurPtr];
1535 assert(!PtrOI.isUnassigned() &&
1536 "Cannot pass through if the input Ptr was not visited!");
1542 const auto *
F = getAnchorScope();
1547 F ?
A.getInfoCache().getTargetLibraryInfoForFunction(*
F) :
nullptr;
1549 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
1551 User *Usr =
U.getUser();
1552 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Analyze " << *CurPtr <<
" in " << *Usr
1555 "The current pointer offset should have been seeded!");
1559 return HandlePassthroughUser(Usr, CurPtr, Follow);
1560 if (
CE->isCompare())
1562 if (!isa<GEPOperator>(CE)) {
1563 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled constant user " << *CE
1568 if (
auto *
GEP = dyn_cast<GEPOperator>(Usr)) {
1571 auto &UsrOI = OffsetInfoMap[Usr];
1572 auto &PtrOI = OffsetInfoMap[CurPtr];
1574 if (UsrOI.isUnknown())
1577 if (PtrOI.isUnknown()) {
1583 Follow = collectConstantsForGEP(
A,
DL, UsrOI, PtrOI,
GEP);
1586 if (isa<PtrToIntInst>(Usr))
1588 if (isa<CastInst>(Usr) || isa<SelectInst>(Usr) || isa<ReturnInst>(Usr))
1589 return HandlePassthroughUser(Usr, CurPtr, Follow);
1594 if (isa<PHINode>(Usr)) {
1597 bool IsFirstPHIUser = !OffsetInfoMap.
count(Usr);
1598 auto &UsrOI = OffsetInfoMap[Usr];
1599 auto &PtrOI = OffsetInfoMap[CurPtr];
1603 if (PtrOI.isUnknown()) {
1604 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand offset unknown "
1605 << *CurPtr <<
" in " << *Usr <<
"\n");
1606 Follow = !UsrOI.isUnknown();
1612 if (UsrOI == PtrOI) {
1613 assert(!PtrOI.isUnassigned() &&
1614 "Cannot assign if the current Ptr was not visited!");
1615 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant (so far)");
1625 auto It = OffsetInfoMap.
find(CurPtrBase);
1626 if (It == OffsetInfoMap.
end()) {
1627 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand is too complex "
1628 << *CurPtr <<
" in " << *Usr <<
"\n");
1642 auto BaseOI = It->getSecond();
1643 BaseOI.addToAll(
Offset.getZExtValue());
1644 if (IsFirstPHIUser || BaseOI == UsrOI) {
1645 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant " << *CurPtr
1646 <<
" in " << *Usr <<
"\n");
1647 return HandlePassthroughUser(Usr, CurPtr, Follow);
1651 dbgs() <<
"[AAPointerInfo] PHI operand pointer offset mismatch "
1652 << *CurPtr <<
" in " << *Usr <<
"\n");
1663 if (
auto *LoadI = dyn_cast<LoadInst>(Usr)) {
1671 if (!handleAccess(
A, *LoadI,
nullptr, AK,
1672 OffsetInfoMap[CurPtr].Offsets, Changed,
1677 if (
auto *II = dyn_cast<IntrinsicInst>(&
I))
1678 return II->isAssumeLikeIntrinsic();
1689 }
while (FromI && FromI != ToI);
1695 if (IntrI.getIntrinsicID() != Intrinsic::assume)
1698 if (IntrI.getParent() == BB) {
1699 if (IsImpactedInRange(LoadI->getNextNonDebugInstruction(), &IntrI))
1705 if ((*PredIt) != BB)
1710 if (SuccBB == IntrBB)
1712 if (isa<UnreachableInst>(SuccBB->getTerminator()))
1716 if (IsImpactedInRange(LoadI->getNextNonDebugInstruction(),
1719 if (IsImpactedInRange(&IntrBB->
front(), &IntrI))
1725 std::pair<Value *, IntrinsicInst *> Assumption;
1726 for (
const Use &LoadU : LoadI->
uses()) {
1727 if (
auto *CmpI = dyn_cast<CmpInst>(LoadU.getUser())) {
1728 if (!CmpI->isEquality() || !CmpI->isTrueWhenEqual())
1730 for (
const Use &CmpU : CmpI->
uses()) {
1731 if (
auto *IntrI = dyn_cast<IntrinsicInst>(CmpU.getUser())) {
1732 if (!IsValidAssume(*IntrI))
1734 int Idx = CmpI->getOperandUse(0) == LoadU;
1735 Assumption = {CmpI->getOperand(
Idx), IntrI};
1740 if (Assumption.first)
1745 if (!Assumption.first || !Assumption.second)
1749 << *Assumption.second <<
": " << *LoadI
1750 <<
" == " << *Assumption.first <<
"\n");
1752 return handleAccess(
1753 A, *Assumption.second, Assumption.first, AccessKind::AK_ASSUMPTION,
1754 OffsetInfoMap[CurPtr].Offsets, Changed, *LoadI->getType());
1759 for (
auto *OtherOp : OtherOps) {
1760 if (OtherOp == CurPtr) {
1763 <<
"[AAPointerInfo] Escaping use in store like instruction " <<
I
1775 bool UsedAssumedInformation =
false;
1776 std::optional<Value *>
Content =
nullptr;
1780 return handleAccess(
A,
I,
Content, AK, OffsetInfoMap[CurPtr].Offsets,
1784 if (
auto *StoreI = dyn_cast<StoreInst>(Usr))
1785 return HandleStoreLike(*StoreI, StoreI->getValueOperand(),
1786 *StoreI->getValueOperand()->getType(),
1787 {StoreI->getValueOperand()}, AccessKind::AK_W);
1788 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(Usr))
1789 return HandleStoreLike(*RMWI,
nullptr, *RMWI->getValOperand()->getType(),
1790 {RMWI->getValOperand()}, AccessKind::AK_RW);
1791 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(Usr))
1792 return HandleStoreLike(
1793 *CXI,
nullptr, *CXI->getNewValOperand()->getType(),
1794 {CXI->getCompareOperand(), CXI->getNewValOperand()},
1797 if (
auto *CB = dyn_cast<CallBase>(Usr)) {
1807 Changed = translateAndAddState(
A, CSArgPI, OffsetInfoMap[CurPtr], *CB) |
1809 return isValidState();
1811 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Call user not handled " << *CB
1817 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] User not handled " << *Usr <<
"\n");
1820 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
1821 assert(OffsetInfoMap.
count(OldU) &&
"Old use should be known already!");
1822 if (OffsetInfoMap.
count(NewU)) {
1824 if (!(OffsetInfoMap[NewU] == OffsetInfoMap[OldU])) {
1825 dbgs() <<
"[AAPointerInfo] Equivalent use callback failed: "
1826 << OffsetInfoMap[NewU] <<
" vs " << OffsetInfoMap[OldU]
1830 return OffsetInfoMap[NewU] == OffsetInfoMap[OldU];
1832 OffsetInfoMap[NewU] = OffsetInfoMap[OldU];
1835 if (!
A.checkForAllUses(UsePred, *
this, AssociatedValue,
1837 true, EquivalentUseCB)) {
1838 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Check for all uses failed, abort!\n");
1839 return indicatePessimisticFixpoint();
1843 dbgs() <<
"Accesses by bin after update:\n";
1850struct AAPointerInfoReturned final : AAPointerInfoImpl {
1852 : AAPointerInfoImpl(IRP,
A) {}
1856 return indicatePessimisticFixpoint();
1860 void trackStatistics()
const override {
1861 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1865struct AAPointerInfoArgument final : AAPointerInfoFloating {
1867 : AAPointerInfoFloating(IRP,
A) {}
1871 AAPointerInfoFloating::initialize(
A);
1872 if (getAnchorScope()->isDeclaration())
1873 indicatePessimisticFixpoint();
1877 void trackStatistics()
const override {
1878 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1882struct AAPointerInfoCallSiteArgument final : AAPointerInfoFloating {
1884 : AAPointerInfoFloating(IRP,
A) {}
1888 using namespace AA::PointerInfo;
1892 if (
auto *
MI = dyn_cast_or_null<MemIntrinsic>(getCtxI())) {
1896 LengthVal =
Length->getSExtValue();
1897 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
1900 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled memory intrinsic "
1902 return indicatePessimisticFixpoint();
1905 ArgNo == 0 ? AccessKind::AK_MUST_WRITE : AccessKind::AK_MUST_READ;
1907 Changed | addAccess(
A, {0, LengthVal}, *
MI,
nullptr,
Kind,
nullptr);
1910 dbgs() <<
"Accesses by bin after update:\n";
1926 if (ArgAA.getState().isValidState())
1927 return translateAndAddStateFromCallee(
A, ArgAA,
1928 *cast<CallBase>(getCtxI()));
1929 if (!
Arg->getParent()->isDeclaration())
1930 return indicatePessimisticFixpoint();
1933 const auto &NoCaptureAA =
1934 A.getAAFor<
AANoCapture>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
1936 if (!NoCaptureAA.isAssumedNoCapture())
1937 return indicatePessimisticFixpoint();
1939 bool IsKnown =
false;
1941 return ChangeStatus::UNCHANGED;
1944 ReadOnly ? AccessKind::AK_MAY_READ : AccessKind::AK_MAY_READ_WRITE;
1950 void trackStatistics()
const override {
1951 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1955struct AAPointerInfoCallSiteReturned final : AAPointerInfoFloating {
1957 : AAPointerInfoFloating(IRP,
A) {}
1960 void trackStatistics()
const override {
1961 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1972 const std::string getAsStr()
const override {
1973 return getAssumed() ?
"nounwind" :
"may-unwind";
1979 (
unsigned)Instruction::Invoke, (
unsigned)Instruction::CallBr,
1980 (
unsigned)Instruction::Call, (
unsigned)Instruction::CleanupRet,
1981 (
unsigned)Instruction::CatchSwitch, (
unsigned)Instruction::Resume};
1984 if (!
I.mayThrow(
true))
1987 if (
const auto *CB = dyn_cast<CallBase>(&
I)) {
1995 bool UsedAssumedInformation =
false;
1996 if (!
A.checkForAllInstructions(CheckForNoUnwind, *
this, Opcodes,
1997 UsedAssumedInformation))
1998 return indicatePessimisticFixpoint();
2000 return ChangeStatus::UNCHANGED;
2004struct AANoUnwindFunction final :
public AANoUnwindImpl {
2006 : AANoUnwindImpl(IRP,
A) {}
2013struct AANoUnwindCallSite final : AANoUnwindImpl {
2015 : AANoUnwindImpl(IRP,
A) {}
2019 AANoUnwindImpl::initialize(
A);
2021 if (!
F ||
F->isDeclaration())
2022 indicatePessimisticFixpoint();
2033 auto &FnAA =
A.getAAFor<
AANoUnwind>(*
this, FnPos, DepClassTy::REQUIRED);
2059 bool IsFixed =
false;
2060 bool IsValidState =
true;
2071 IsValidState =
true;
2072 ReturnedValues.
clear();
2075 if (!
F ||
F->isDeclaration()) {
2076 indicatePessimisticFixpoint();
2079 assert(!
F->getReturnType()->isVoidTy() &&
2080 "Did not expect a void return type!");
2083 auto &OpcodeInstMap =
A.getInfoCache().getOpcodeInstMapForFunction(*
F);
2087 if (
Arg.hasReturnedAttr()) {
2088 auto &ReturnInstSet = ReturnedValues[&
Arg];
2089 if (
auto *Insts = OpcodeInstMap.lookup(Instruction::Ret))
2091 ReturnInstSet.
insert(cast<ReturnInst>(RI));
2093 indicateOptimisticFixpoint();
2098 if (!
A.isFunctionIPOAmendable(*
F))
2099 indicatePessimisticFixpoint();
2109 const AbstractState &getState()
const override {
return *
this; }
2123 size_t getNumReturnValues()
const override {
2124 return isValidState() ? ReturnedValues.
size() : -1;
2130 std::optional<Value *> getAssumedUniqueReturnValue(
Attributor &
A)
const;
2133 bool checkForAllReturnedValuesAndReturnInsts(
2138 const std::string getAsStr()
const override;
2141 bool isAtFixpoint()
const override {
return IsFixed; }
2144 bool isValidState()
const override {
return IsValidState; }
2149 return ChangeStatus::UNCHANGED;
2154 IsValidState =
false;
2155 return ChangeStatus::CHANGED;
2165 "Number of function with known return values");
2168 std::optional<Value *> UniqueRV = getAssumedUniqueReturnValue(
A);
2170 if (!UniqueRV || !*UniqueRV)
2175 "Number of function with unique return");
2177 if (
auto *UniqueRVArg = dyn_cast<Argument>(*UniqueRV)) {
2178 if (UniqueRVArg->getType()->canLosslesslyBitCastTo(
2179 getAssociatedFunction()->getReturnType())) {
2187const std::string AAReturnedValuesImpl::getAsStr()
const {
2188 return (isAtFixpoint() ?
"returns(#" :
"may-return(#") +
2189 (isValidState() ? std::to_string(getNumReturnValues()) :
"?") +
")";
2192std::optional<Value *>
2193AAReturnedValuesImpl::getAssumedUniqueReturnValue(
Attributor &
A)
const {
2199 std::optional<Value *> UniqueRV;
2200 Type *Ty = getAssociatedFunction()->getReturnType();
2202 auto Pred = [&](
Value &RV) ->
bool {
2204 return UniqueRV != std::optional<Value *>(
nullptr);
2207 if (!
A.checkForAllReturnedValues(Pred, *
this))
2213bool AAReturnedValuesImpl::checkForAllReturnedValuesAndReturnInsts(
2216 if (!isValidState())
2221 for (
const auto &It : ReturnedValues) {
2222 Value *RV = It.first;
2223 if (!Pred(*RV, It.second))
2234 bool UsedAssumedInformation =
false;
2240 UsedAssumedInformation))
2243 for (
auto &VAC : Values) {
2245 "Assumed returned value should be valid in function scope!");
2246 if (ReturnedValues[VAC.getValue()].insert(&Ret))
2254 if (!
A.checkForAllInstructions(ReturnInstCB, *
this, {Instruction::Ret},
2255 UsedAssumedInformation))
2256 return indicatePessimisticFixpoint();
2260struct AAReturnedValuesFunction final :
public AAReturnedValuesImpl {
2262 : AAReturnedValuesImpl(IRP,
A) {}
2269struct AAReturnedValuesCallSite final : AAReturnedValuesImpl {
2271 : AAReturnedValuesImpl(IRP,
A) {}
2280 "supported for call sites yet!");
2285 return indicatePessimisticFixpoint();
2289 void trackStatistics()
const override {}
2297 case Intrinsic::nvvm_barrier0:
2298 case Intrinsic::nvvm_barrier0_and:
2299 case Intrinsic::nvvm_barrier0_or:
2300 case Intrinsic::nvvm_barrier0_popc:
2302 case Intrinsic::amdgcn_s_barrier:
2303 if (ExecutedAligned)
2316 if (
auto *FI = dyn_cast<FenceInst>(
I))
2319 if (
auto *AI = dyn_cast<AtomicCmpXchgInst>(
I)) {
2326 switch (
I->getOpcode()) {
2327 case Instruction::AtomicRMW:
2328 Ordering = cast<AtomicRMWInst>(
I)->getOrdering();
2330 case Instruction::Store:
2331 Ordering = cast<StoreInst>(
I)->getOrdering();
2333 case Instruction::Load:
2334 Ordering = cast<LoadInst>(
I)->getOrdering();
2338 "New atomic operations need to be known in the attributor.");
2349 if (
auto *
MI = dyn_cast<MemIntrinsic>(
I))
2350 return !
MI->isVolatile();
2358 const std::string getAsStr()
const override {
2359 return getAssumed() ?
"nosync" :
"may-sync";
2375 if (
I.mayReadOrWriteMemory())
2379 return !cast<CallBase>(
I).isConvergent();
2382 bool UsedAssumedInformation =
false;
2383 if (!
A.checkForAllReadWriteInstructions(CheckRWInstForNoSync, *
this,
2384 UsedAssumedInformation) ||
2385 !
A.checkForAllCallLikeInstructions(CheckForNoSync, *
this,
2386 UsedAssumedInformation))
2387 return indicatePessimisticFixpoint();
2392struct AANoSyncFunction final :
public AANoSyncImpl {
2394 : AANoSyncImpl(IRP,
A) {}
2401struct AANoSyncCallSite final : AANoSyncImpl {
2403 : AANoSyncImpl(IRP,
A) {}
2407 AANoSyncImpl::initialize(
A);
2409 if (!
F ||
F->isDeclaration())
2410 indicatePessimisticFixpoint();
2421 auto &FnAA =
A.getAAFor<
AANoSync>(*
this, FnPos, DepClassTy::REQUIRED);
2433struct AANoFreeImpl :
public AANoFree {
2439 const auto &CB = cast<CallBase>(
I);
2443 const auto &NoFreeAA =
A.getAAFor<
AANoFree>(
2448 bool UsedAssumedInformation =
false;
2449 if (!
A.checkForAllCallLikeInstructions(CheckForNoFree, *
this,
2450 UsedAssumedInformation))
2451 return indicatePessimisticFixpoint();
2452 return ChangeStatus::UNCHANGED;
2456 const std::string getAsStr()
const override {
2457 return getAssumed() ?
"nofree" :
"may-free";
2461struct AANoFreeFunction final :
public AANoFreeImpl {
2463 : AANoFreeImpl(IRP,
A) {}
2470struct AANoFreeCallSite final : AANoFreeImpl {
2472 : AANoFreeImpl(IRP,
A) {}
2476 AANoFreeImpl::initialize(
A);
2478 if (!
F ||
F->isDeclaration())
2479 indicatePessimisticFixpoint();
2490 auto &FnAA =
A.getAAFor<
AANoFree>(*
this, FnPos, DepClassTy::REQUIRED);
2499struct AANoFreeFloating : AANoFreeImpl {
2501 : AANoFreeImpl(IRP,
A) {}
2510 const auto &NoFreeAA =
A.getAAFor<
AANoFree>(
2512 if (NoFreeAA.isAssumedNoFree())
2513 return ChangeStatus::UNCHANGED;
2515 Value &AssociatedValue = getIRPosition().getAssociatedValue();
2516 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
2518 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
2525 const auto &NoFreeArg =
A.getAAFor<
AANoFree>(
2527 DepClassTy::REQUIRED);
2531 if (isa<GetElementPtrInst>(UserI) || isa<BitCastInst>(UserI) ||
2532 isa<PHINode>(UserI) || isa<SelectInst>(UserI)) {
2536 if (isa<StoreInst>(UserI) || isa<LoadInst>(UserI) ||
2537 isa<ReturnInst>(UserI))
2543 if (!
A.checkForAllUses(Pred, *
this, AssociatedValue))
2544 return indicatePessimisticFixpoint();
2546 return ChangeStatus::UNCHANGED;
2551struct AANoFreeArgument final : AANoFreeFloating {
2553 : AANoFreeFloating(IRP,
A) {}
2560struct AANoFreeCallSiteArgument final : AANoFreeFloating {
2562 : AANoFreeFloating(IRP,
A) {}
2572 return indicatePessimisticFixpoint();
2574 auto &ArgAA =
A.getAAFor<
AANoFree>(*
this, ArgPos, DepClassTy::REQUIRED);
2583struct AANoFreeReturned final : AANoFreeFloating {
2585 : AANoFreeFloating(IRP,
A) {
2600 void trackStatistics()
const override {}
2604struct AANoFreeCallSiteReturned final : AANoFreeFloating {
2606 : AANoFreeFloating(IRP,
A) {}
2609 return ChangeStatus::UNCHANGED;
2618static int64_t getKnownNonNullAndDerefBytesForUse(
2620 const Use *U,
const Instruction *
I,
bool &IsNonNull,
bool &TrackUse) {
2623 const Value *UseV =
U->get();
2630 if (isa<CastInst>(
I)) {
2635 if (isa<GetElementPtrInst>(
I)) {
2645 if (
const auto *CB = dyn_cast<CallBase>(
I)) {
2648 U, {Attribute::NonNull, Attribute::Dereferenceable})) {
2667 IsNonNull |= DerefAA.isKnownNonNull();
2668 return DerefAA.getKnownDereferenceableBytes();
2672 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
I->isVolatile())
2678 if (
Base &&
Base == &AssociatedValue) {
2679 int64_t DerefBytes = Loc->Size.getValue() +
Offset;
2681 return std::max(int64_t(0), DerefBytes);
2688 int64_t DerefBytes = Loc->Size.getValue();
2690 return std::max(int64_t(0), DerefBytes);
2701 getAssociatedValue().
getType()->getPointerAddressSpace())) {}
2705 Value &
V = *getAssociatedValue().stripPointerCasts();
2706 if (!NullIsDefined &&
2707 hasAttr({Attribute::NonNull, Attribute::Dereferenceable},
2709 indicateOptimisticFixpoint();
2713 if (isa<ConstantPointerNull>(V)) {
2714 indicatePessimisticFixpoint();
2718 AANonNull::initialize(
A);
2720 bool CanBeNull, CanBeFreed;
2721 if (
V.getPointerDereferenceableBytes(
A.getDataLayout(), CanBeNull,
2724 indicateOptimisticFixpoint();
2729 if (isa<GlobalValue>(V)) {
2730 indicatePessimisticFixpoint();
2735 followUsesInMBEC(*
this,
A, getState(), *CtxI);
2741 bool IsNonNull =
false;
2742 bool TrackUse =
false;
2743 getKnownNonNullAndDerefBytesForUse(
A, *
this, getAssociatedValue(), U,
I,
2744 IsNonNull, TrackUse);
2745 State.setKnown(IsNonNull);
2750 const std::string getAsStr()
const override {
2751 return getAssumed() ?
"nonnull" :
"may-null";
2756 const bool NullIsDefined;
2760struct AANonNullFloating :
public AANonNullImpl {
2762 : AANonNullImpl(IRP,
A) {}
2769 bool UsedAssumedInformation =
false;
2771 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
2773 Values.
push_back({getAssociatedValue(), getCtxI()});
2776 Stripped = Values.
size() != 1 ||
2777 Values.
front().getValue() != &getAssociatedValue();
2783 if (
const Function *Fn = getAnchorScope()) {
2791 DepClassTy::REQUIRED);
2792 if (!Stripped &&
this == &AA) {
2794 T.indicatePessimisticFixpoint();
2800 return T.isValidState();
2803 for (
const auto &VAC : Values)
2804 if (!VisitValueCB(*VAC.getValue(), VAC.getCtxI()))
2805 return indicatePessimisticFixpoint();
2815struct AANonNullReturned final
2816 : AAReturnedFromReturnedValues<AANonNull, AANonNull> {
2821 const std::string getAsStr()
const override {
2822 return getAssumed() ?
"nonnull" :
"may-null";
2830struct AANonNullArgument final
2831 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl> {
2833 : AAArgumentFromCallSiteArguments<
AANonNull, AANonNullImpl>(IRP,
A) {}
2839struct AANonNullCallSiteArgument final : AANonNullFloating {
2841 : AANonNullFloating(IRP,
A) {}
2848struct AANonNullCallSiteReturned final
2849 : AACallSiteReturnedFromReturned<AANonNull, AANonNullImpl> {
2851 : AACallSiteReturnedFromReturned<
AANonNull, AANonNullImpl>(IRP,
A) {}
2865 const std::string getAsStr()
const override {
2866 return getAssumed() ?
"mustprogress" :
"may-not-progress";
2870struct AAMustProgressFunction final : AAMustProgressImpl {
2872 : AAMustProgressImpl(IRP,
A) {}
2876 const auto &WillReturnAA =
2877 A.getAAFor<
AAWillReturn>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
2878 if (WillReturnAA.isKnownWillReturn())
2879 return indicateOptimisticFixpoint();
2880 if (WillReturnAA.isAssumedWillReturn())
2881 return ChangeStatus::UNCHANGED;
2885 const auto &MustProgressAA =
2890 bool AllCallSitesKnown =
true;
2891 if (!
A.checkForAllCallSites(CheckForMustProgress, *
this,
2894 return indicatePessimisticFixpoint();
2896 return ChangeStatus::UNCHANGED;
2900 void trackStatistics()
const override {
2906struct AAMustProgressCallSite final : AAMustProgressImpl {
2908 : AAMustProgressImpl(IRP,
A) {}
2923 void trackStatistics()
const override {
2936 const std::string getAsStr()
const override {
2937 return getAssumed() ?
"norecurse" :
"may-recurse";
2941struct AANoRecurseFunction final : AANoRecurseImpl {
2943 : AANoRecurseImpl(IRP,
A) {}
2955 bool UsedAssumedInformation =
false;
2956 if (
A.checkForAllCallSites(CallSitePred, *
this,
true,
2957 UsedAssumedInformation)) {
2963 if (!UsedAssumedInformation)
2964 indicateOptimisticFixpoint();
2965 return ChangeStatus::UNCHANGED;
2970 DepClassTy::REQUIRED);
2971 if (EdgeReachability.
canReach(
A, *getAnchorScope()))
2972 return indicatePessimisticFixpoint();
2973 return ChangeStatus::UNCHANGED;
2980struct AANoRecurseCallSite final : AANoRecurseImpl {
2982 : AANoRecurseImpl(IRP,
A) {}
2986 AANoRecurseImpl::initialize(
A);
2988 if (!
F ||
F->isDeclaration())
2989 indicatePessimisticFixpoint();
3000 auto &FnAA =
A.getAAFor<
AANoRecurse>(*
this, FnPos, DepClassTy::REQUIRED);
3017 const std::string getAsStr()
const override {
3018 return getAssumed() ?
"non-convergent" :
"may-be-convergent";
3022struct AANonConvergentFunction final : AANonConvergentImpl {
3024 : AANonConvergentImpl(IRP,
A) {}
3029 auto CalleeIsNotConvergent = [&](
Instruction &Inst) {
3030 CallBase &CB = cast<CallBase>(Inst);
3035 if (
Callee->isDeclaration()) {
3036 return !
Callee->hasFnAttribute(Attribute::Convergent);
3043 bool UsedAssumedInformation =
false;
3044 if (!
A.checkForAllCallLikeInstructions(CalleeIsNotConvergent, *
this,
3045 UsedAssumedInformation)) {
3046 return indicatePessimisticFixpoint();
3048 return ChangeStatus::UNCHANGED;
3052 if (isKnownNotConvergent() && hasAttr(Attribute::Convergent)) {
3053 removeAttrs({Attribute::Convergent});
3054 return ChangeStatus::CHANGED;
3056 return ChangeStatus::UNCHANGED;
3073 const size_t UBPrevSize = KnownUBInsts.size();
3074 const size_t NoUBPrevSize = AssumedNoUBInsts.size();
3078 if (
I.isVolatile() &&
I.mayWriteToMemory())
3082 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3091 "Expected pointer operand of memory accessing instruction");
3095 std::optional<Value *> SimplifiedPtrOp =
3096 stopOnUndefOrAssumed(
A, PtrOp, &
I);
3097 if (!SimplifiedPtrOp || !*SimplifiedPtrOp)
3099 const Value *PtrOpVal = *SimplifiedPtrOp;
3104 if (!isa<ConstantPointerNull>(PtrOpVal)) {
3105 AssumedNoUBInsts.insert(&
I);
3117 AssumedNoUBInsts.insert(&
I);
3119 KnownUBInsts.insert(&
I);
3128 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3132 auto *BrInst = cast<BranchInst>(&
I);
3135 if (BrInst->isUnconditional())
3140 std::optional<Value *> SimplifiedCond =
3141 stopOnUndefOrAssumed(
A, BrInst->getCondition(), BrInst);
3142 if (!SimplifiedCond || !*SimplifiedCond)
3144 AssumedNoUBInsts.insert(&
I);
3152 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3161 for (
unsigned idx = 0; idx < CB.
arg_size(); idx++) {
3167 if (idx >=
Callee->arg_size())
3180 A.getAAFor<
AANoUndef>(*
this, CalleeArgumentIRP, DepClassTy::NONE);
3181 if (!NoUndefAA.isKnownNoUndef())
3183 bool UsedAssumedInformation =
false;
3184 std::optional<Value *> SimplifiedVal =
3187 if (UsedAssumedInformation)
3189 if (SimplifiedVal && !*SimplifiedVal)
3191 if (!SimplifiedVal || isa<UndefValue>(**SimplifiedVal)) {
3192 KnownUBInsts.insert(&
I);
3196 !isa<ConstantPointerNull>(**SimplifiedVal))
3199 A.getAAFor<
AANonNull>(*
this, CalleeArgumentIRP, DepClassTy::NONE);
3200 if (NonNullAA.isKnownNonNull())
3201 KnownUBInsts.insert(&
I);
3207 auto &RI = cast<ReturnInst>(
I);
3210 std::optional<Value *> SimplifiedRetValue =
3211 stopOnUndefOrAssumed(
A, RI.getReturnValue(), &
I);
3212 if (!SimplifiedRetValue || !*SimplifiedRetValue)
3229 if (isa<ConstantPointerNull>(*SimplifiedRetValue)) {
3232 if (NonNullAA.isKnownNonNull())
3233 KnownUBInsts.insert(&
I);
3239 bool UsedAssumedInformation =
false;
3240 A.checkForAllInstructions(InspectMemAccessInstForUB, *
this,
3241 {Instruction::Load, Instruction::Store,
3242 Instruction::AtomicCmpXchg,
3243 Instruction::AtomicRMW},
3244 UsedAssumedInformation,
3246 A.checkForAllInstructions(InspectBrInstForUB, *
this, {Instruction::Br},
3247 UsedAssumedInformation,
3249 A.checkForAllCallLikeInstructions(InspectCallSiteForUB, *
this,
3250 UsedAssumedInformation);
3254 if (!getAnchorScope()->getReturnType()->isVoidTy()) {
3256 if (!
A.isAssumedDead(ReturnIRP,
this,
nullptr, UsedAssumedInformation)) {
3257 auto &RetPosNoUndefAA =
3258 A.getAAFor<
AANoUndef>(*
this, ReturnIRP, DepClassTy::NONE);
3259 if (RetPosNoUndefAA.isKnownNoUndef())
3260 A.checkForAllInstructions(InspectReturnInstForUB, *
this,
3261 {Instruction::Ret}, UsedAssumedInformation,
3266 if (NoUBPrevSize != AssumedNoUBInsts.size() ||
3267 UBPrevSize != KnownUBInsts.size())
3268 return ChangeStatus::CHANGED;
3269 return ChangeStatus::UNCHANGED;
3273 return KnownUBInsts.count(
I);
3276 bool isAssumedToCauseUB(
Instruction *
I)
const override {
3283 switch (
I->getOpcode()) {
3284 case Instruction::Load:
3285 case Instruction::Store:
3286 case Instruction::AtomicCmpXchg:
3287 case Instruction::AtomicRMW:
3288 return !AssumedNoUBInsts.count(
I);
3289 case Instruction::Br: {
3290 auto *BrInst = cast<BranchInst>(
I);
3291 if (BrInst->isUnconditional())
3293 return !AssumedNoUBInsts.count(
I);
3302 if (KnownUBInsts.empty())
3305 A.changeToUnreachableAfterManifest(
I);
3310 const std::string getAsStr()
const override {
3311 return getAssumed() ?
"undefined-behavior" :
"no-ub";
3356 bool UsedAssumedInformation =
false;
3357 std::optional<Value *> SimplifiedV =
3360 if (!UsedAssumedInformation) {
3365 KnownUBInsts.insert(
I);
3366 return std::nullopt;
3372 if (isa<UndefValue>(V)) {
3373 KnownUBInsts.insert(
I);
3374 return std::nullopt;
3380struct AAUndefinedBehaviorFunction final : AAUndefinedBehaviorImpl {
3382 : AAUndefinedBehaviorImpl(IRP,
A) {}
3385 void trackStatistics()
const override {
3387 "Number of instructions known to have UB");
3389 KnownUBInsts.size();
3410 if (SCCI.hasCycle())
3420 for (
auto *L : LI->getLoopsInPreorder()) {
3433 AAWillReturn::initialize(
A);
3435 if (isImpliedByMustprogressAndReadonly(
A,
true)) {
3436 indicateOptimisticFixpoint();
3442 bool isImpliedByMustprogressAndReadonly(
Attributor &
A,
bool KnownOnly) {
3445 if ((!getAnchorScope() || !getAnchorScope()->mustProgress()) &&
3446 (!getAssociatedFunction() || !getAssociatedFunction()->mustProgress()))
3451 return IsKnown || !KnownOnly;
3457 if (isImpliedByMustprogressAndReadonly(
A,
false))
3458 return ChangeStatus::UNCHANGED;
3462 const auto &WillReturnAA =
3463 A.getAAFor<
AAWillReturn>(*
this, IPos, DepClassTy::REQUIRED);
3464 if (WillReturnAA.isKnownWillReturn())
3466 if (!WillReturnAA.isAssumedWillReturn())
3468 const auto &NoRecurseAA =
3469 A.getAAFor<
AANoRecurse>(*
this, IPos, DepClassTy::REQUIRED);
3473 bool UsedAssumedInformation =
false;
3474 if (!
A.checkForAllCallLikeInstructions(CheckForWillReturn, *
this,
3475 UsedAssumedInformation))
3476 return indicatePessimisticFixpoint();
3478 return ChangeStatus::UNCHANGED;
3482 const std::string getAsStr()
const override {
3483 return getAssumed() ?
"willreturn" :
"may-noreturn";
3487struct AAWillReturnFunction final : AAWillReturnImpl {
3489 : AAWillReturnImpl(IRP,
A) {}
3493 AAWillReturnImpl::initialize(
A);
3496 if (!
F ||
F->isDeclaration() || mayContainUnboundedCycle(*
F,
A))
3497 indicatePessimisticFixpoint();
3505struct AAWillReturnCallSite final : AAWillReturnImpl {
3507 : AAWillReturnImpl(IRP,
A) {}
3511 AAWillReturnImpl::initialize(
A);
3513 if (!
F || !
A.isFunctionIPOAmendable(*
F))
3514 indicatePessimisticFixpoint();
3519 if (isImpliedByMustprogressAndReadonly(
A,
false))
3520 return ChangeStatus::UNCHANGED;
3528 auto &FnAA =
A.getAAFor<
AAWillReturn>(*
this, FnPos, DepClassTy::REQUIRED);
3551 const ToTy *To =
nullptr;
3564 :
From(&
From), To(&To), ExclusionSet(ES) {
3566 if (!ES || ES->
empty()) {
3567 ExclusionSet =
nullptr;
3568 }
else if (MakeUnique) {
3569 ExclusionSet =
A.getInfoCache().getOrCreateUniqueBlockExecutionSet(ES);
3574 :
From(RQI.
From), To(RQI.To), ExclusionSet(RQI.ExclusionSet) {}
3587 return &TombstoneKey;
3590 unsigned H = PairDMI ::getHashValue({RQI->
From, RQI->
To});
3596 if (!PairDMI::isEqual({
LHS->From,
LHS->To}, {
RHS->From,
RHS->To}))
3598 return InstSetDMI::isEqual(
LHS->ExclusionSet,
RHS->ExclusionSet);
3602#define DefineKeys(ToTy) \
3604 ReachabilityQueryInfo<ToTy> \
3605 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::EmptyKey = \
3606 ReachabilityQueryInfo<ToTy>( \
3607 DenseMapInfo<const Instruction *>::getEmptyKey(), \
3608 DenseMapInfo<const ToTy *>::getEmptyKey()); \
3610 ReachabilityQueryInfo<ToTy> \
3611 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::TombstoneKey = \
3612 ReachabilityQueryInfo<ToTy>( \
3613 DenseMapInfo<const Instruction *>::getTombstoneKey(), \
3614 DenseMapInfo<const ToTy *>::getTombstoneKey());
3623template <
typename BaseTy,
typename ToTy>
3624struct CachedReachabilityAA :
public BaseTy {
3631 bool isQueryAA()
const override {
return true; }
3637 for (
unsigned u = 0,
e = QueryVector.size();
u <
e; ++
u) {
3638 RQITy *RQI = QueryVector[
u];
3639 if (RQI->Result == RQITy::Reachable::No && isReachableImpl(
A, *RQI))
3640 Changed = ChangeStatus::CHANGED;
3646 virtual bool isReachableImpl(
Attributor &
A, RQITy &RQI) = 0;
3649 RQITy &RQI,
bool UsedExclusionSet) {
3654 QueryCache.erase(&RQI);
3659 if (
Result == RQITy::Reachable::Yes || !UsedExclusionSet) {
3660 RQITy PlainRQI(RQI.From, RQI.To);
3661 if (!QueryCache.count(&PlainRQI)) {
3662 RQITy *RQIPtr =
new (
A.Allocator) RQITy(RQI.From, RQI.To);
3664 QueryVector.push_back(RQIPtr);
3665 QueryCache.insert(RQIPtr);
3670 if (!InUpdate &&
Result != RQITy::Reachable::Yes && UsedExclusionSet) {
3671 assert((!RQI.ExclusionSet || !RQI.ExclusionSet->empty()) &&
3672 "Did not expect empty set!");
3673 RQITy *RQIPtr =
new (
A.Allocator)
3674 RQITy(
A, *RQI.From, *RQI.To, RQI.ExclusionSet,
true);
3675 assert(RQIPtr->Result == RQITy::Reachable::No &&
"Already reachable?");
3677 assert(!QueryCache.count(RQIPtr));
3678 QueryVector.push_back(RQIPtr);
3679 QueryCache.insert(RQIPtr);
3682 if (
Result == RQITy::Reachable::No && !InUpdate)
3683 A.registerForUpdate(*
this);
3684 return Result == RQITy::Reachable::Yes;
3687 const std::string getAsStr()
const override {
3689 return "#queries(" + std::to_string(QueryVector.size()) +
")";
3692 bool checkQueryCache(
Attributor &
A, RQITy &StackRQI,
3693 typename RQITy::Reachable &
Result) {
3694 if (!this->getState().isValidState()) {
3695 Result = RQITy::Reachable::Yes;
3701 if (StackRQI.ExclusionSet) {
3702 RQITy PlainRQI(StackRQI.From, StackRQI.To);
3703 auto It = QueryCache.find(&PlainRQI);
3704 if (It != QueryCache.end() && (*It)->Result == RQITy::Reachable::No) {
3705 Result = RQITy::Reachable::No;
3710 auto It = QueryCache.find(&StackRQI);
3711 if (It != QueryCache.end()) {
3718 QueryCache.insert(&StackRQI);
3723 bool InUpdate =
false;
3728struct AAIntraFnReachabilityFunction final
3729 :
public CachedReachabilityAA<AAIntraFnReachability, Instruction> {
3730 using Base = CachedReachabilityAA<AAIntraFnReachability, Instruction>;
3734 bool isAssumedReachable(
3737 auto *NonConstThis =
const_cast<AAIntraFnReachabilityFunction *
>(
this);
3741 RQITy StackRQI(
A,
From, To, ExclusionSet,
false);
3742 typename RQITy::Reachable
Result;
3743 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
3744 return NonConstThis->isReachableImpl(
A, StackRQI);
3745 return Result == RQITy::Reachable::Yes;
3752 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3753 if (
llvm::all_of(DeadEdges, [&](
const auto &DeadEdge) {
3754 return LivenessAA.isEdgeDead(DeadEdge.first, DeadEdge.second);
3756 return ChangeStatus::UNCHANGED;
3759 return Base::updateImpl(
A);
3762 bool isReachableImpl(
Attributor &
A, RQITy &RQI)
override {
3764 bool UsedExclusionSet =
false;
3769 while (IP && IP != &To) {
3770 if (ExclusionSet && IP != Origin && ExclusionSet->
count(IP)) {
3771 UsedExclusionSet =
true;
3782 "Not an intra-procedural query!");
3786 if (FromBB == ToBB &&
3787 WillReachInBlock(*RQI.From, *RQI.To, RQI.ExclusionSet))
3788 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet);
3792 if (!WillReachInBlock(ToBB->
front(), *RQI.To, RQI.ExclusionSet))
3793 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet);
3796 if (RQI.ExclusionSet)
3797 for (
auto *
I : *RQI.ExclusionSet)
3798 ExclusionBlocks.
insert(
I->getParent());
3801 if (ExclusionBlocks.
count(FromBB) &&
3804 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet);
3812 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3813 while (!Worklist.
empty()) {
3815 if (!Visited.
insert(BB).second)
3818 if (LivenessAA.isEdgeDead(BB, SuccBB)) {
3819 LocalDeadEdges.
insert({BB, SuccBB});
3824 return rememberResult(
A, RQITy::Reachable::Yes, RQI,
3826 if (ExclusionBlocks.
count(SuccBB)) {
3827 UsedExclusionSet =
true;
3834 DeadEdges.insert(LocalDeadEdges.
begin(), LocalDeadEdges.
end());
3835 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet);
3839 void trackStatistics()
const override {}
3853 assert(getAssociatedType()->isPointerTy() &&
3854 "Noalias is a pointer attribute");
3857 const std::string getAsStr()
const override {
3858 return getAssumed() ?
"noalias" :
"may-alias";
3863struct AANoAliasFloating final : AANoAliasImpl {
3865 : AANoAliasImpl(IRP,
A) {}
3869 AANoAliasImpl::initialize(
A);
3870 Value *Val = &getAssociatedValue();
3872 CastInst *CI = dyn_cast<CastInst>(Val);
3876 if (!
Base->hasOneUse())
3882 indicatePessimisticFixpoint();
3886 if (isa<AllocaInst>(Val))
3887 indicateOptimisticFixpoint();
3888 else if (isa<ConstantPointerNull>(Val) &&
3891 indicateOptimisticFixpoint();
3892 else if (Val != &getAssociatedValue()) {
3893 const auto &ValNoAliasAA =
A.getAAFor<
AANoAlias>(
3895 if (ValNoAliasAA.isKnownNoAlias())
3896 indicateOptimisticFixpoint();
3903 return indicatePessimisticFixpoint();
3907 void trackStatistics()
const override {
3913struct AANoAliasArgument final
3914 : AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl> {
3915 using Base = AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl>;
3920 Base::initialize(
A);
3922 if (hasAttr({Attribute::ByVal}))
3923 indicateOptimisticFixpoint();
3934 const auto &NoSyncAA =
3936 DepClassTy::OPTIONAL);
3937 if (NoSyncAA.isAssumedNoSync())
3938 return Base::updateImpl(
A);
3943 return Base::updateImpl(
A);
3947 bool UsedAssumedInformation =
false;
3948 if (
A.checkForAllCallSites(
3950 true, UsedAssumedInformation))
3951 return Base::updateImpl(
A);
3959 return indicatePessimisticFixpoint();
3966struct AANoAliasCallSiteArgument final : AANoAliasImpl {
3968 : AANoAliasImpl(IRP,
A) {}
3973 const auto &CB = cast<CallBase>(getAnchorValue());
3974 if (CB.
paramHasAttr(getCallSiteArgNo(), Attribute::NoAlias))
3975 indicateOptimisticFixpoint();
3976 Value &Val = getAssociatedValue();
3977 if (isa<ConstantPointerNull>(Val) &&
3980 indicateOptimisticFixpoint();
3987 const CallBase &CB,
unsigned OtherArgNo) {
3989 if (this->getCalleeArgNo() == (
int)OtherArgNo)
4001 if (CBArgMemBehaviorAA.isAssumedReadNone()) {
4002 A.recordDependence(CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
4009 if (CBArgMemBehaviorAA.isAssumedReadOnly() && IsReadOnly) {
4010 A.recordDependence(MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
4011 A.recordDependence(CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
4017 AAR =
A.getInfoCache().getAAResultsForFunction(*getAnchorScope());
4020 bool IsAliasing = !AAR || !AAR->
isNoAlias(&getAssociatedValue(), ArgOp);
4022 "callsite arguments: "
4023 << getAssociatedValue() <<
" " << *ArgOp <<
" => "
4024 << (IsAliasing ?
"" :
"no-") <<
"alias \n");
4041 if (!AssociatedValueIsNoAliasAtDef) {
4043 <<
" is not no-alias at the definition\n");
4053 A.recordDependence(NoAliasAA, *
this, DepClassTy::OPTIONAL);
4057 auto &NoCaptureAA =
A.getAAFor<
AANoCapture>(*
this, VIRP, DepClassTy::NONE);
4061 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
4072 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
4079 DepClassTy::OPTIONAL);
4081 if (NoCaptureAA.isAssumedNoCapture())
4087 A, *UserI, *getCtxI(), *
this,
nullptr,
4088 [ScopeFn](
const Function &Fn) {
return &Fn != ScopeFn; }))
4096 case UseCaptureKind::NO_CAPTURE:
4098 case UseCaptureKind::MAY_CAPTURE:
4102 case UseCaptureKind::PASSTHROUGH:
4109 if (!NoCaptureAA.isAssumedNoCaptureMaybeReturned()) {
4110 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue())) {
4112 dbgs() <<
"[AANoAliasCSArg] " << getAssociatedValue()
4113 <<
" cannot be noalias as it is potentially captured\n");
4117 A.recordDependence(NoCaptureAA, *
this, DepClassTy::OPTIONAL);
4122 const auto &CB = cast<CallBase>(getAnchorValue());
4123 for (
unsigned OtherArgNo = 0; OtherArgNo < CB.
arg_size(); OtherArgNo++)
4124 if (mayAliasWithArgument(
A, AAR, MemBehaviorAA, CB, OtherArgNo))
4134 auto &MemBehaviorAA =
4137 A.recordDependence(MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
4138 return ChangeStatus::UNCHANGED;
4142 const auto &NoAliasAA =
4143 A.getAAFor<
AANoAlias>(*
this, VIRP, DepClassTy::NONE);
4146 if (isKnownNoAliasDueToNoAliasPreservation(
A, AAR, MemBehaviorAA,
4149 dbgs() <<
"[AANoAlias] No-Alias deduced via no-alias preservation\n");
4150 return ChangeStatus::UNCHANGED;
4153 return indicatePessimisticFixpoint();
4161struct AANoAliasReturned final : AANoAliasImpl {
4163 : AANoAliasImpl(IRP,
A) {}
4167 AANoAliasImpl::initialize(
A);
4169 if (!
F ||
F->isDeclaration())
4170 indicatePessimisticFixpoint();
4176 auto CheckReturnValue = [&](
Value &RV) ->
bool {
4177 if (
Constant *
C = dyn_cast<Constant>(&RV))
4178 if (
C->isNullValue() || isa<UndefValue>(
C))
4183 if (!isa<CallBase>(&RV))
4187 const auto &NoAliasAA =
4188 A.getAAFor<
AANoAlias>(*
this, RVPos, DepClassTy::REQUIRED);
4192 const auto &NoCaptureAA =
4193 A.getAAFor<
AANoCapture>(*
this, RVPos, DepClassTy::REQUIRED);
4197 if (!
A.checkForAllReturnedValues(CheckReturnValue, *
this))
4198 return indicatePessimisticFixpoint();
4200 return ChangeStatus::UNCHANGED;
4208struct AANoAliasCallSiteReturned final : AANoAliasImpl {
4210 : AANoAliasImpl(IRP,
A) {}
4214 AANoAliasImpl::initialize(
A);
4216 if (!
F ||
F->isDeclaration())
4217 indicatePessimisticFixpoint();
4228 auto &FnAA =
A.getAAFor<
AANoAlias>(*
this, FnPos, DepClassTy::REQUIRED);
4240struct AAIsDeadValueImpl :
public AAIsDead {
4245 if (
auto *Scope = getAnchorScope())
4246 if (!
A.isRunOn(*Scope))
4247 indicatePessimisticFixpoint();
4251 bool isAssumedDead()
const override {
return isAssumed(IS_DEAD); }
4254 bool isKnownDead()
const override {
return isKnown(IS_DEAD); }
4257 bool isAssumedDead(
const BasicBlock *BB)
const override {
return false; }
4260 bool isKnownDead(
const BasicBlock *BB)
const override {
return false; }
4263 bool isAssumedDead(
const Instruction *
I)
const override {
4264 return I == getCtxI() && isAssumedDead();
4268 bool isKnownDead(
const Instruction *
I)
const override {
4269 return isAssumedDead(
I) && isKnownDead();
4273 const std::string getAsStr()
const override {
4274 return isAssumedDead() ?
"assumed-dead" :
"assumed-live";
4280 if (
V.getType()->isVoidTy() ||
V.use_empty())
4284 if (!isa<Constant>(V)) {
4285 if (
auto *
I = dyn_cast<Instruction>(&V))
4286 if (!
A.isRunOn(*
I->getFunction()))
4288 bool UsedAssumedInformation =
false;
4289 std::optional<Constant *>
C =
4290 A.getAssumedConstant(V, *
this, UsedAssumedInformation);
4295 auto UsePred = [&](
const Use &
U,
bool &Follow) {
return false; };
4300 return A.checkForAllUses(UsePred, *
this, V,
false,
4301 DepClassTy::REQUIRED,
4310 auto *CB = dyn_cast<CallBase>(
I);
4311 if (!CB || isa<IntrinsicInst>(CB))
4315 const auto &NoUnwindAA =
4316 A.getAndUpdateAAFor<
AANoUnwind>(*
this, CallIRP, DepClassTy::NONE);
4317 if (!NoUnwindAA.isAssumedNoUnwind())
4319 if (!NoUnwindAA.isKnownNoUnwind())
4320 A.recordDependence(NoUnwindAA, *
this, DepClassTy::OPTIONAL);
4327struct AAIsDeadFloating :
public AAIsDeadValueImpl {
4329 : AAIsDeadValueImpl(IRP,
A) {}
4333 AAIsDeadValueImpl::initialize(
A);
4335 if (isa<UndefValue>(getAssociatedValue())) {
4336 indicatePessimisticFixpoint();
4340 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4341 if (!isAssumedSideEffectFree(
A,
I)) {
4342 if (!isa_and_nonnull<StoreInst>(
I) && !isa_and_nonnull<FenceInst>(
I))
4343 indicatePessimisticFixpoint();
4345 removeAssumedBits(HAS_NO_EFFECT);
4352 if (!ExecDomainAA || !ExecDomainAA->isNoOpFence(FI))
4354 A.recordDependence(*ExecDomainAA, *
this, DepClassTy::OPTIONAL);
4361 if (
SI.isVolatile())
4367 bool UsedAssumedInformation =
false;
4368 if (!AssumeOnlyInst) {
4369 PotentialCopies.clear();
4371 UsedAssumedInformation)) {
4374 <<
"[AAIsDead] Could not determine potential copies of store!\n");
4378 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Store has " << PotentialCopies.size()
4379 <<
" potential copies.\n");
4384 UsedAssumedInformation))
4386 if (
auto *LI = dyn_cast<LoadInst>(V)) {
4388 auto &UserI = cast<Instruction>(*U.getUser());
4389 if (InfoCache.isOnlyUsedByAssume(UserI)) {
4391 AssumeOnlyInst->insert(&UserI);
4394 return A.isAssumedDead(U,
this,
nullptr, UsedAssumedInformation);
4400 <<
" is assumed live!\n");
4406 const std::string getAsStr()
const override {
4407 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4408 if (isa_and_nonnull<StoreInst>(
I))
4410 return "assumed-dead-store";
4411 if (isa_and_nonnull<FenceInst>(
I))
4413 return "assumed-dead-fence";
4414 return AAIsDeadValueImpl::getAsStr();
4419 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4420 if (
auto *SI = dyn_cast_or_null<StoreInst>(
I)) {
4421 if (!isDeadStore(
A, *SI))
4422 return indicatePessimisticFixpoint();
4423 }
else if (
auto *FI = dyn_cast_or_null<FenceInst>(
I)) {
4424 if (!isDeadFence(
A, *FI))
4425 return indicatePessimisticFixpoint();
4427 if (!isAssumedSideEffectFree(
A,
I))
4428 return indicatePessimisticFixpoint();
4429 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4430 return indicatePessimisticFixpoint();
4435 bool isRemovableStore()
const override {
4436 return isAssumed(IS_REMOVABLE) && isa<StoreInst>(&getAssociatedValue());
4441 Value &
V = getAssociatedValue();
4442 if (
auto *
I = dyn_cast<Instruction>(&V)) {
4447 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
4449 bool IsDead = isDeadStore(
A, *SI, &AssumeOnlyInst);
4452 A.deleteAfterManifest(*
I);
4453 for (
size_t i = 0; i < AssumeOnlyInst.
size(); ++i) {
4455 for (
auto *Usr : AOI->
users())
4456 AssumeOnlyInst.
insert(cast<Instruction>(Usr));
4457 A.deleteAfterManifest(*AOI);
4461 if (
auto *FI = dyn_cast<FenceInst>(
I)) {
4463 A.deleteAfterManifest(*FI);
4466 if (isAssumedSideEffectFree(
A,
I) && !isa<InvokeInst>(
I)) {
4467 A.deleteAfterManifest(*
I);
4475 void trackStatistics()
const override {
4484struct AAIsDeadArgument :
public AAIsDeadFloating {
4486 : AAIsDeadFloating(IRP,
A) {}
4490 AAIsDeadFloating::initialize(
A);
4491 if (!
A.isFunctionIPOAmendable(*getAnchorScope()))
4492 indicatePessimisticFixpoint();
4498 if (
A.isValidFunctionSignatureRewrite(
Arg, {}))
4499 if (
A.registerFunctionSignatureRewrite(
4503 return ChangeStatus::CHANGED;
4505 return ChangeStatus::UNCHANGED;
4512struct AAIsDeadCallSiteArgument :
public AAIsDeadValueImpl {
4514 : AAIsDeadValueImpl(IRP,
A) {}
4518 AAIsDeadValueImpl::initialize(
A);
4519 if (isa<UndefValue>(getAssociatedValue()))
4520 indicatePessimisticFixpoint();
4531 return indicatePessimisticFixpoint();
4533 auto &ArgAA =
A.getAAFor<
AAIsDead>(*
this, ArgPos, DepClassTy::REQUIRED);
4539 CallBase &CB = cast<CallBase>(getAnchorValue());
4541 assert(!isa<UndefValue>(
U.get()) &&
4542 "Expected undef values to be filtered out!");
4544 if (
A.changeUseAfterManifest(U, UV))
4545 return ChangeStatus::CHANGED;
4546 return ChangeStatus::UNCHANGED;
4553struct AAIsDeadCallSiteReturned :
public AAIsDeadFloating {
4555 : AAIsDeadFloating(IRP,
A) {}
4558 bool isAssumedDead()
const override {
4559 return AAIsDeadFloating::isAssumedDead() && IsAssumedSideEffectFree;
4564 AAIsDeadFloating::initialize(
A);
4565 if (isa<UndefValue>(getAssociatedValue())) {
4566 indicatePessimisticFixpoint();
4571 IsAssumedSideEffectFree = isAssumedSideEffectFree(
A, getCtxI());
4577 if (IsAssumedSideEffectFree && !isAssumedSideEffectFree(
A, getCtxI())) {
4578 IsAssumedSideEffectFree =
false;
4579 Changed = ChangeStatus::CHANGED;
4581 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4582 return indicatePessimisticFixpoint();
4587 void trackStatistics()
const override {
4588 if (IsAssumedSideEffectFree)
4595 const std::string getAsStr()
const override {
4596 return isAssumedDead()
4598 : (getAssumed() ?
"assumed-dead-users" :
"assumed-live");
4602 bool IsAssumedSideEffectFree =
true;
4605struct AAIsDeadReturned :
public AAIsDeadValueImpl {
4607 : AAIsDeadValueImpl(IRP,
A) {}
4612 bool UsedAssumedInformation =
false;
4613 A.checkForAllInstructions([](
Instruction &) {
return true; }, *
this,
4614 {Instruction::Ret}, UsedAssumedInformation);
4617 if (ACS.isCallbackCall() || !ACS.getInstruction())
4619 return areAllUsesAssumedDead(
A, *ACS.getInstruction());
4622 if (!
A.checkForAllCallSites(PredForCallSite, *
this,
true,
4623 UsedAssumedInformation))
4624 return indicatePessimisticFixpoint();
4626 return ChangeStatus::UNCHANGED;
4632 bool AnyChange =
false;
4640 bool UsedAssumedInformation =
false;
4641 A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
4642 UsedAssumedInformation);
4643 return AnyChange ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
4650struct AAIsDeadFunction :
public AAIsDead {
4656 if (!
F ||
F->isDeclaration() || !
A.isRunOn(*
F)) {
4657 indicatePessimisticFixpoint();
4660 if (!isAssumedDeadInternalFunction(
A)) {
4661 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4662 assumeLive(
A,
F->getEntryBlock());
4666 bool isAssumedDeadInternalFunction(
Attributor &
A) {
4667 if (!getAnchorScope()->hasLocalLinkage())
4669 bool UsedAssumedInformation =
false;
4671 true, UsedAssumedInformation);
4675 const std::string getAsStr()
const override {
4676 return "Live[#BB " + std::to_string(AssumedLiveBlocks.size()) +
"/" +
4677 std::to_string(getAnchorScope()->
size()) +
"][#TBEP " +
4678 std::to_string(ToBeExploredFrom.size()) +
"][#KDE " +
4679 std::to_string(KnownDeadEnds.size()) +
"]";
4684 assert(getState().isValidState() &&
4685 "Attempted to manifest an invalid state!");
4690 if (AssumedLiveBlocks.empty()) {
4691 A.deleteAfterManifest(
F);
4692 return ChangeStatus::CHANGED;
4698 bool Invoke2CallAllowed = !mayCatchAsynchronousExceptions(
F);
4700 KnownDeadEnds.set_union(ToBeExploredFrom);
4701 for (
const Instruction *DeadEndI : KnownDeadEnds) {
4702 auto *CB = dyn_cast<CallBase>(DeadEndI);
4705 const auto &NoReturnAA =
A.getAndUpdateAAFor<
AANoReturn>(
4708 if (MayReturn && (!Invoke2CallAllowed || !isa<InvokeInst>(CB)))
4711 if (
auto *II = dyn_cast<InvokeInst>(DeadEndI))
4712 A.registerInvokeWithDeadSuccessor(
const_cast<InvokeInst &
>(*II));
4714 A.changeToUnreachableAfterManifest(
4715 const_cast<Instruction *
>(DeadEndI->getNextNode()));
4716 HasChanged = ChangeStatus::CHANGED;
4721 if (!AssumedLiveBlocks.count(&BB)) {
4722 A.deleteAfterManifest(BB);
4724 HasChanged = ChangeStatus::CHANGED;
4734 assert(
From->getParent() == getAnchorScope() &&
4736 "Used AAIsDead of the wrong function");
4737 return isValidState() && !AssumedLiveEdges.count(std::make_pair(
From, To));
4741 void trackStatistics()
const override {}
4744 bool isAssumedDead()
const override {
return false; }
4747 bool isKnownDead()
const override {
return false; }
4750 bool isAssumedDead(
const BasicBlock *BB)
const override {
4752 "BB must be in the same anchor scope function.");
4756 return !AssumedLiveBlocks.count(BB);
4760 bool isKnownDead(
const BasicBlock *BB)
const override {
4761 return getKnown() && isAssumedDead(BB);
4765 bool isAssumedDead(
const Instruction *
I)
const override {
4766 assert(
I->getParent()->getParent() == getAnchorScope() &&
4767 "Instruction must be in the same anchor scope function.");
4774 if (!AssumedLiveBlocks.count(
I->getParent()))
4780 if (KnownDeadEnds.count(PrevI) || ToBeExploredFrom.count(PrevI))
4788 bool isKnownDead(
const Instruction *
I)
const override {
4789 return getKnown() && isAssumedDead(
I);
4795 if (!AssumedLiveBlocks.insert(&BB).second)
4803 if (
const auto *CB = dyn_cast<CallBase>(&
I))
4805 if (
F->hasLocalLinkage())
4806 A.markLiveInternalFunction(*
F);
4830 const auto &NoReturnAA =
4832 if (NoReturnAA.isAssumedNoReturn())
4845 bool UsedAssumedInformation =
4846 identifyAliveSuccessors(
A, cast<CallBase>(II), AA, AliveSuccessors);
4851 if (AAIsDeadFunction::mayCatchAsynchronousExceptions(*II.
getFunction())) {
4855 const auto &AANoUnw =
4857 if (AANoUnw.isAssumedNoUnwind()) {
4858 UsedAssumedInformation |= !AANoUnw.isKnownNoUnwind();
4863 return UsedAssumedInformation;
4870 bool UsedAssumedInformation =
false;
4874 std::optional<Constant *>
C =
4875 A.getAssumedConstant(*BI.
getCondition(), AA, UsedAssumedInformation);
4876 if (!
C || isa_and_nonnull<UndefValue>(*
C)) {
4878 }
else if (isa_and_nonnull<ConstantInt>(*
C)) {
4880 BI.
getSuccessor(1 - cast<ConstantInt>(*C)->getValue().getZExtValue());
4885 UsedAssumedInformation =
false;
4888 return UsedAssumedInformation;
4895 bool UsedAssumedInformation =
false;
4896 std::optional<Constant *>
C =
4897 A.getAssumedConstant(*
SI.getCondition(), AA, UsedAssumedInformation);
4898 if (!
C || isa_and_nonnull<UndefValue>(*
C)) {
4900 }
else if (isa_and_nonnull<ConstantInt>(*
C)) {
4901 for (
const auto &CaseIt :
SI.cases()) {
4902 if (CaseIt.getCaseValue() == *
C) {
4903 AliveSuccessors.
push_back(&CaseIt.getCaseSuccessor()->front());
4904 return UsedAssumedInformation;
4907 AliveSuccessors.
push_back(&
SI.getDefaultDest()->front());
4908 return UsedAssumedInformation;
4913 return UsedAssumedInformation;
4919 if (AssumedLiveBlocks.empty()) {
4920 if (isAssumedDeadInternalFunction(
A))
4924 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4925 assumeLive(
A,
F->getEntryBlock());
4929 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Live [" << AssumedLiveBlocks.size() <<
"/"
4930 << getAnchorScope()->
size() <<
"] BBs and "
4931 << ToBeExploredFrom.size() <<
" exploration points and "
4932 << KnownDeadEnds.size() <<
" known dead ends\n");
4937 ToBeExploredFrom.end());
4938 decltype(ToBeExploredFrom) NewToBeExploredFrom;
4941 while (!Worklist.
empty()) {
4947 while (!
I->isTerminator() && !isa<CallBase>(
I))
4948 I =
I->getNextNode();
4950 AliveSuccessors.
clear();
4952 bool UsedAssumedInformation =
false;
4953 switch (
I->getOpcode()) {
4957 "Expected non-terminators to be handled already!");
4961 case Instruction::Call:
4962 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<CallInst>(*
I),
4963 *
this, AliveSuccessors);
4965 case Instruction::Invoke:
4966 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<InvokeInst>(*
I),
4967 *
this, AliveSuccessors);
4969 case Instruction::Br:
4970 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<BranchInst>(*
I),
4971 *
this, AliveSuccessors);