55#include "llvm/IR/IntrinsicsAMDGPU.h"
56#include "llvm/IR/IntrinsicsNVPTX.h"
81#define DEBUG_TYPE "attributor"
85 cl::desc(
"Manifest Attributor internal string attributes."),
98 cl::desc(
"Maximum number of potential values to be "
99 "tracked for each position."),
104 "attributor-max-potential-values-iterations",
cl::Hidden,
106 "Maximum number of iterations we keep dismantling potential values."),
109STATISTIC(NumAAs,
"Number of abstract attributes created");
124#define BUILD_STAT_MSG_IR_ATTR(TYPE, NAME) \
125 ("Number of " #TYPE " marked '" #NAME "'")
126#define BUILD_STAT_NAME(NAME, TYPE) NumIR##TYPE##_##NAME
127#define STATS_DECL_(NAME, MSG) STATISTIC(NAME, MSG);
128#define STATS_DECL(NAME, TYPE, MSG) \
129 STATS_DECL_(BUILD_STAT_NAME(NAME, TYPE), MSG);
130#define STATS_TRACK(NAME, TYPE) ++(BUILD_STAT_NAME(NAME, TYPE));
131#define STATS_DECLTRACK(NAME, TYPE, MSG) \
133 STATS_DECL(NAME, TYPE, MSG) \
134 STATS_TRACK(NAME, TYPE) \
136#define STATS_DECLTRACK_ARG_ATTR(NAME) \
137 STATS_DECLTRACK(NAME, Arguments, BUILD_STAT_MSG_IR_ATTR(arguments, NAME))
138#define STATS_DECLTRACK_CSARG_ATTR(NAME) \
139 STATS_DECLTRACK(NAME, CSArguments, \
140 BUILD_STAT_MSG_IR_ATTR(call site arguments, NAME))
141#define STATS_DECLTRACK_FN_ATTR(NAME) \
142 STATS_DECLTRACK(NAME, Function, BUILD_STAT_MSG_IR_ATTR(functions, NAME))
143#define STATS_DECLTRACK_CS_ATTR(NAME) \
144 STATS_DECLTRACK(NAME, CS, BUILD_STAT_MSG_IR_ATTR(call site, NAME))
145#define STATS_DECLTRACK_FNRET_ATTR(NAME) \
146 STATS_DECLTRACK(NAME, FunctionReturn, \
147 BUILD_STAT_MSG_IR_ATTR(function returns, NAME))
148#define STATS_DECLTRACK_CSRET_ATTR(NAME) \
149 STATS_DECLTRACK(NAME, CSReturn, \
150 BUILD_STAT_MSG_IR_ATTR(call site returns, NAME))
151#define STATS_DECLTRACK_FLOATING_ATTR(NAME) \
152 STATS_DECLTRACK(NAME, Floating, \
153 ("Number of floating values known to be '" #NAME "'"))
158#define PIPE_OPERATOR(CLASS) \
159 raw_ostream &operator<<(raw_ostream &OS, const CLASS &AA) { \
160 return OS << static_cast<const AbstractAttribute &>(AA); \
215 bool HeaderOnly,
Cycle **CPtr =
nullptr) {
218 auto *BB =
I->getParent();
224 return !HeaderOnly || BB ==
C->getHeader();
235 if (
DL.getTypeSizeInBits(Ty) !=
DL.getTypeAllocSizeInBits(Ty))
240 if (
VectorType *SeqTy = dyn_cast<VectorType>(Ty))
244 if (
ArrayType *SeqTy = dyn_cast<ArrayType>(Ty))
247 if (!isa<StructType>(Ty))
260 StartPos +=
DL.getTypeAllocSizeInBits(ElTy);
270 bool AllowVolatile) {
271 if (!AllowVolatile &&
I->isVolatile())
274 if (
auto *LI = dyn_cast<LoadInst>(
I)) {
275 return LI->getPointerOperand();
278 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
279 return SI->getPointerOperand();
282 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(
I)) {
283 return CXI->getPointerOperand();
286 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(
I)) {
287 return RMWI->getPointerOperand();
309 bool GetMinOffset,
bool AllowNonInbounds,
310 bool UseAssumed =
false) {
312 auto AttributorAnalysis = [&](
Value &V,
APInt &ROffset) ->
bool {
317 UseAssumed ? DepClassTy::OPTIONAL
319 if (!ValueConstantRangeAA)
342 const Value *
Ptr, int64_t &BytesOffset,
344 APInt OffsetAPInt(
DL.getIndexTypeSizeInBits(
Ptr->getType()), 0);
347 true, AllowNonInbounds);
355template <
typename AAType,
typename StateType =
typename AAType::StateType,
357 bool RecurseForSelectAndPHI =
true>
359 Attributor &
A,
const AAType &QueryingAA, StateType &S,
361 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp return value states for "
362 << QueryingAA <<
" into " << S <<
"\n");
364 assert((QueryingAA.getIRPosition().getPositionKind() ==
366 QueryingAA.getIRPosition().getPositionKind() ==
368 "Can only clamp returned value states for a function returned or call "
369 "site returned position!");
373 std::optional<StateType>
T;
376 auto CheckReturnValue = [&](
Value &RV) ->
bool {
381 return AA::hasAssumedIRAttr<IRAttributeKind>(
382 A, &QueryingAA, RVPos, DepClassTy::REQUIRED, IsKnown);
386 A.getAAFor<AAType>(QueryingAA, RVPos, DepClassTy::REQUIRED);
390 <<
" AA: " << AA->getAsStr(&
A) <<
" @ " << RVPos <<
"\n");
391 const StateType &AAS = AA->getState();
393 T = StateType::getBestState(AAS);
395 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" RV State: " <<
T
397 return T->isValidState();
400 if (!
A.checkForAllReturnedValues(CheckReturnValue, QueryingAA,
401 AA::ValueScope::Intraprocedural,
402 RecurseForSelectAndPHI))
403 S.indicatePessimisticFixpoint();
410template <
typename AAType,
typename BaseType,
411 typename StateType =
typename BaseType::StateType,
412 bool PropagateCallBaseContext =
false,
414 bool RecurseForSelectAndPHI =
true>
415struct AAReturnedFromReturnedValues :
public BaseType {
421 StateType S(StateType::getBestState(this->getState()));
423 RecurseForSelectAndPHI>(
425 PropagateCallBaseContext ? this->getCallBaseContext() : nullptr);
428 return clampStateAndIndicateChange<StateType>(this->getState(), S);
434template <
typename AAType,
typename StateType =
typename AAType::StateType,
436static void clampCallSiteArgumentStates(
Attributor &
A,
const AAType &QueryingAA,
438 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp call site argument states for "
439 << QueryingAA <<
" into " << S <<
"\n");
441 assert(QueryingAA.getIRPosition().getPositionKind() ==
443 "Can only clamp call site argument states for an argument position!");
447 std::optional<StateType>
T;
450 unsigned ArgNo = QueryingAA.getIRPosition().getCallSiteArgNo();
462 return AA::hasAssumedIRAttr<IRAttributeKind>(
463 A, &QueryingAA, ACSArgPos, DepClassTy::REQUIRED, IsKnown);
467 A.getAAFor<AAType>(QueryingAA, ACSArgPos, DepClassTy::REQUIRED);
470 LLVM_DEBUG(
dbgs() <<
"[Attributor] ACS: " << *ACS.getInstruction()
471 <<
" AA: " << AA->getAsStr(&
A) <<
" @" << ACSArgPos
473 const StateType &AAS = AA->getState();
475 T = StateType::getBestState(AAS);
477 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" CSA State: " <<
T
479 return T->isValidState();
482 bool UsedAssumedInformation =
false;
483 if (!
A.checkForAllCallSites(CallSiteCheck, QueryingAA,
true,
484 UsedAssumedInformation))
485 S.indicatePessimisticFixpoint();
492template <
typename AAType,
typename BaseType,
493 typename StateType =
typename AAType::StateType,
495bool getArgumentStateFromCallBaseContext(
Attributor &
A,
499 "Expected an 'argument' position !");
505 assert(ArgNo >= 0 &&
"Invalid Arg No!");
511 return AA::hasAssumedIRAttr<IRAttributeKind>(
512 A, &QueryingAttribute, CBArgPos, DepClassTy::REQUIRED, IsKnown);
516 A.getAAFor<AAType>(QueryingAttribute, CBArgPos, DepClassTy::REQUIRED);
519 const StateType &CBArgumentState =
520 static_cast<const StateType &
>(AA->getState());
522 LLVM_DEBUG(
dbgs() <<
"[Attributor] Briding Call site context to argument"
523 <<
"Position:" << Pos <<
"CB Arg state:" << CBArgumentState
527 State ^= CBArgumentState;
532template <
typename AAType,
typename BaseType,
533 typename StateType =
typename AAType::StateType,
534 bool BridgeCallBaseContext =
false,
536struct AAArgumentFromCallSiteArguments :
public BaseType {
542 StateType S = StateType::getBestState(this->getState());
544 if (BridgeCallBaseContext) {
546 getArgumentStateFromCallBaseContext<AAType,
BaseType, StateType,
548 A, *
this, this->getIRPosition(), S);
550 return clampStateAndIndicateChange<StateType>(this->getState(), S);
552 clampCallSiteArgumentStates<AAType, StateType, IRAttributeKind>(
A, *
this,
557 return clampStateAndIndicateChange<StateType>(this->getState(), S);
562template <
typename AAType,
typename BaseType,
563 typename StateType =
typename BaseType::StateType,
564 bool IntroduceCallBaseContext =
false,
566struct AACalleeToCallSite :
public BaseType {
571 auto IRPKind = this->getIRPosition().getPositionKind();
574 "Can only wrap function returned positions for call site "
575 "returned positions!");
576 auto &S = this->getState();
578 CallBase &CB = cast<CallBase>(this->getAnchorValue());
579 if (IntroduceCallBaseContext)
580 LLVM_DEBUG(
dbgs() <<
"[Attributor] Introducing call base context:" << CB
585 for (
const Function *Callee : Callees) {
589 IntroduceCallBaseContext ? &CB :
nullptr)
591 *
Callee, IntroduceCallBaseContext ? &CB : nullptr);
595 if (!AA::hasAssumedIRAttr<IRAttributeKind>(
596 A,
this, FnPos, DepClassTy::REQUIRED, IsKnown))
602 A.getAAFor<AAType>(*
this, FnPos, DepClassTy::REQUIRED);
606 if (S.isAtFixpoint())
607 return S.isValidState();
611 if (!
A.checkForAllCallees(CalleePred, *
this, CB))
612 return S.indicatePessimisticFixpoint();
618template <
class AAType,
typename StateType =
typename AAType::StateType>
619static void followUsesInContext(AAType &AA,
Attributor &
A,
624 auto EIt = Explorer.
begin(CtxI), EEnd = Explorer.
end(CtxI);
625 for (
unsigned u = 0;
u <
Uses.size(); ++
u) {
627 if (
const Instruction *UserI = dyn_cast<Instruction>(
U->getUser())) {
629 if (Found && AA.followUseInMBEC(
A, U, UserI, State))
630 for (
const Use &Us : UserI->
uses())
645template <
class AAType,
typename StateType =
typename AAType::StateType>
646static void followUsesInMBEC(AAType &AA,
Attributor &
A, StateType &S,
649 A.getInfoCache().getMustBeExecutedContextExplorer();
655 for (
const Use &U : AA.getIRPosition().getAssociatedValue().uses())
658 followUsesInContext<AAType>(AA,
A, *Explorer, &CtxI,
Uses, S);
660 if (S.isAtFixpoint())
665 if (
const BranchInst *Br = dyn_cast<BranchInst>(
I))
666 if (Br->isConditional())
705 StateType ParentState;
709 ParentState.indicateOptimisticFixpoint();
711 for (
const BasicBlock *BB : Br->successors()) {
712 StateType ChildState;
714 size_t BeforeSize =
Uses.size();
715 followUsesInContext(AA,
A, *Explorer, &BB->front(),
Uses, ChildState);
718 for (
auto It =
Uses.begin() + BeforeSize; It !=
Uses.end();)
721 ParentState &= ChildState;
734namespace PointerInfo {
795 R.indicatePessimisticFixpoint();
889 if (!
Range.mayOverlap(ItRange))
891 bool IsExact =
Range == ItRange && !
Range.offsetOrSizeAreUnknown();
892 for (
auto Index : It.getSecond()) {
894 if (!CB(Access, IsExact))
914 for (
unsigned Index : LocalList->getSecond()) {
917 if (
Range.offsetAndSizeAreUnknown())
933 RemoteI = RemoteI ? RemoteI : &
I;
937 bool AccExists =
false;
939 for (
auto Index : LocalList) {
941 if (
A.getLocalInst() == &
I) {
950 <<
"[AAPointerInfo] Inserting access in new offset bins\n";);
952 for (
auto Key : ToAdd) {
961 "New Access should have been at AccIndex");
962 LocalList.push_back(AccIndex);
976 auto &ExistingRanges =
Before.getRanges();
977 auto &NewRanges = Current.getRanges();
984 <<
"[AAPointerInfo] Removing access from old offset bins\n";);
991 "Expected bin to actually contain the Access.");
1010 using const_iterator = VecTy::const_iterator;
1013 const_iterator begin()
const {
return Offsets.begin(); }
1014 const_iterator end()
const {
return Offsets.end(); }
1017 return Offsets ==
RHS.Offsets;
1023 bool isUnassigned()
const {
return Offsets.size() == 0; }
1025 bool isUnknown()
const {
1038 void addToAll(int64_t Inc) {
1039 for (
auto &
Offset : Offsets) {
1048 void merge(
const OffsetInfo &R) {
Offsets.append(
R.Offsets); }
1063struct AAPointerInfoImpl
1064 :
public StateWrapper<AA::PointerInfo::State, AAPointerInfo> {
1069 const std::string getAsStr(
Attributor *
A)
const override {
1070 return std::string(
"PointerInfo ") +
1071 (isValidState() ? (std::string(
"#") +
1072 std::to_string(OffsetBins.
size()) +
" bins")
1078 return AAPointerInfo::manifest(
A);
1081 virtual const_bin_iterator
begin()
const override {
return State::begin(); }
1082 virtual const_bin_iterator
end()
const override {
return State::end(); }
1083 virtual int64_t numOffsetBins()
const override {
1084 return State::numOffsetBins();
1087 bool forallInterferingAccesses(
1091 return State::forallInterferingAccesses(
Range, CB);
1094 bool forallInterferingAccesses(
1096 bool FindInterferingWrites,
bool FindInterferingReads,
1097 function_ref<
bool(
const Access &,
bool)> UserCB,
bool &HasBeenWrittenTo,
1099 function_ref<
bool(
const Access &)> SkipCB)
const override {
1100 HasBeenWrittenTo =
false;
1107 bool IsAssumedNoSync = AA::hasAssumedIRAttr<Attribute::NoSync>(
1112 bool AllInSameNoSyncFn = IsAssumedNoSync;
1113 bool InstIsExecutedByInitialThreadOnly =
1114 ExecDomainAA && ExecDomainAA->isExecutedByInitialThreadOnly(
I);
1121 bool InstIsExecutedInAlignedRegion =
1122 FindInterferingReads && ExecDomainAA &&
1123 ExecDomainAA->isExecutedInAlignedRegion(
A,
I);
1125 if (InstIsExecutedInAlignedRegion || InstIsExecutedByInitialThreadOnly)
1126 A.recordDependence(*ExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1129 bool IsThreadLocalObj =
1138 auto CanIgnoreThreadingForInst = [&](
const Instruction &
I) ->
bool {
1139 if (IsThreadLocalObj || AllInSameNoSyncFn)
1141 const auto *FnExecDomainAA =
1142 I.getFunction() == &
Scope
1147 if (!FnExecDomainAA)
1149 if (InstIsExecutedInAlignedRegion ||
1150 (FindInterferingWrites &&
1151 FnExecDomainAA->isExecutedInAlignedRegion(
A,
I))) {
1152 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1155 if (InstIsExecutedByInitialThreadOnly &&
1156 FnExecDomainAA->isExecutedByInitialThreadOnly(
I)) {
1157 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1166 auto CanIgnoreThreading = [&](
const Access &Acc) ->
bool {
1167 return CanIgnoreThreadingForInst(*Acc.getRemoteInst()) ||
1168 (Acc.getRemoteInst() != Acc.getLocalInst() &&
1169 CanIgnoreThreadingForInst(*Acc.getLocalInst()));
1173 bool IsKnownNoRecurse;
1174 AA::hasAssumedIRAttr<Attribute::NoRecurse>(
1181 bool InstInKernel =
Scope.hasFnAttribute(
"kernel");
1182 bool ObjHasKernelLifetime =
false;
1183 const bool UseDominanceReasoning =
1184 FindInterferingWrites && IsKnownNoRecurse;
1195 case AA::GPUAddressSpace::Shared:
1196 case AA::GPUAddressSpace::Constant:
1197 case AA::GPUAddressSpace::Local:
1209 std::function<
bool(
const Function &)> IsLiveInCalleeCB;
1211 if (
auto *AI = dyn_cast<AllocaInst>(&getAssociatedValue())) {
1216 bool IsKnownNoRecurse;
1217 if (AA::hasAssumedIRAttr<Attribute::NoRecurse>(
1219 IsKnownNoRecurse)) {
1220 IsLiveInCalleeCB = [AIFn](
const Function &Fn) {
return AIFn != &Fn; };
1222 }
else if (
auto *GV = dyn_cast<GlobalValue>(&getAssociatedValue())) {
1225 ObjHasKernelLifetime = HasKernelLifetime(GV, *GV->getParent());
1226 if (ObjHasKernelLifetime)
1227 IsLiveInCalleeCB = [](
const Function &Fn) {
1228 return !Fn.hasFnAttribute(
"kernel");
1236 auto AccessCB = [&](
const Access &Acc,
bool Exact) {
1238 bool AccInSameScope = AccScope == &
Scope;
1242 if (InstInKernel && ObjHasKernelLifetime && !AccInSameScope &&
1246 if (Exact && Acc.isMustAccess() && Acc.getRemoteInst() != &
I) {
1247 if (Acc.isWrite() || (isa<LoadInst>(
I) && Acc.isWriteOrAssumption()))
1248 ExclusionSet.
insert(Acc.getRemoteInst());
1251 if ((!FindInterferingWrites || !Acc.isWriteOrAssumption()) &&
1252 (!FindInterferingReads || !Acc.isRead()))
1255 bool Dominates = FindInterferingWrites && DT && Exact &&
1256 Acc.isMustAccess() && AccInSameScope &&
1259 DominatingWrites.
insert(&Acc);
1263 AllInSameNoSyncFn &= Acc.getRemoteInst()->getFunction() == &
Scope;
1265 InterferingAccesses.
push_back({&Acc, Exact});
1268 if (!State::forallInterferingAccesses(
I, AccessCB,
Range))
1271 HasBeenWrittenTo = !DominatingWrites.
empty();
1275 for (
const Access *Acc : DominatingWrites) {
1276 if (!LeastDominatingWriteInst) {
1277 LeastDominatingWriteInst = Acc->getRemoteInst();
1278 }
else if (DT->
dominates(LeastDominatingWriteInst,
1279 Acc->getRemoteInst())) {
1280 LeastDominatingWriteInst = Acc->getRemoteInst();
1285 auto CanSkipAccess = [&](
const Access &Acc,
bool Exact) {
1286 if (SkipCB && SkipCB(Acc))
1288 if (!CanIgnoreThreading(Acc))
1294 bool ReadChecked = !FindInterferingReads;
1295 bool WriteChecked = !FindInterferingWrites;
1301 &ExclusionSet, IsLiveInCalleeCB))
1306 if (!WriteChecked) {
1308 &ExclusionSet, IsLiveInCalleeCB))
1309 WriteChecked =
true;
1323 if (!WriteChecked && HasBeenWrittenTo &&
1324 Acc.getRemoteInst()->getFunction() != &
Scope) {
1328 if (FnReachabilityAA) {
1334 if (!FnReachabilityAA->instructionCanReach(
1335 A, *LeastDominatingWriteInst,
1336 *Acc.getRemoteInst()->getFunction(), &ExclusionSet))
1337 WriteChecked =
true;
1344 if (ReadChecked && WriteChecked)
1347 if (!DT || !UseDominanceReasoning)
1349 if (!DominatingWrites.count(&Acc))
1351 return LeastDominatingWriteInst != Acc.getRemoteInst();
1356 for (
auto &It : InterferingAccesses) {
1357 if ((!AllInSameNoSyncFn && !IsThreadLocalObj && !ExecDomainAA) ||
1358 !CanSkipAccess(*It.first, It.second)) {
1359 if (!UserCB(*It.first, It.second))
1369 using namespace AA::PointerInfo;
1371 return indicatePessimisticFixpoint();
1373 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1374 bool IsByval = OtherAAImpl.getAssociatedArgument()->hasByValAttr();
1378 const auto &State = OtherAAImpl.getState();
1379 for (
const auto &It : State) {
1380 for (
auto Index : It.getSecond()) {
1381 const auto &RAcc = State.getAccess(
Index);
1382 if (IsByval && !RAcc.isRead())
1384 bool UsedAssumedInformation =
false;
1386 auto Content =
A.translateArgumentToCallSiteContent(
1387 RAcc.getContent(), CB, *
this, UsedAssumedInformation);
1388 AK =
AccessKind(AK & (IsByval ? AccessKind::AK_R : AccessKind::AK_RW));
1389 AK =
AccessKind(AK | (RAcc.isMayAccess() ? AK_MAY : AK_MUST));
1391 Changed |= addAccess(
A, RAcc.getRanges(), CB,
Content, AK,
1392 RAcc.getType(), RAcc.getRemoteInst());
1399 const OffsetInfo &Offsets,
CallBase &CB) {
1400 using namespace AA::PointerInfo;
1402 return indicatePessimisticFixpoint();
1404 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1408 const auto &State = OtherAAImpl.getState();
1409 for (
const auto &It : State) {
1410 for (
auto Index : It.getSecond()) {
1411 const auto &RAcc = State.getAccess(
Index);
1412 for (
auto Offset : Offsets) {
1416 if (!NewRanges.isUnknown()) {
1417 NewRanges.addToAllOffsets(
Offset);
1420 addAccess(
A, NewRanges, CB, RAcc.getContent(), RAcc.getKind(),
1421 RAcc.getType(), RAcc.getRemoteInst());
1430 void trackPointerInfoStatistics(
const IRPosition &IRP)
const {}
1434 for (
auto &It : OffsetBins) {
1435 O <<
"[" << It.first.Offset <<
"-" << It.first.Offset + It.first.Size
1436 <<
"] : " << It.getSecond().size() <<
"\n";
1437 for (
auto AccIndex : It.getSecond()) {
1438 auto &Acc = AccessList[AccIndex];
1439 O <<
" - " << Acc.getKind() <<
" - " << *Acc.getLocalInst() <<
"\n";
1440 if (Acc.getLocalInst() != Acc.getRemoteInst())
1441 O <<
" --> " << *Acc.getRemoteInst()
1443 if (!Acc.isWrittenValueYetUndetermined()) {
1444 if (isa_and_nonnull<Function>(Acc.getWrittenValue()))
1445 O <<
" - c: func " << Acc.getWrittenValue()->getName()
1447 else if (Acc.getWrittenValue())
1448 O <<
" - c: " << *Acc.getWrittenValue() <<
"\n";
1450 O <<
" - c: <unknown>\n";
1457struct AAPointerInfoFloating :
public AAPointerInfoImpl {
1460 : AAPointerInfoImpl(IRP,
A) {}
1467 using namespace AA::PointerInfo;
1470 TypeSize AccessSize =
DL.getTypeStoreSize(&Ty);
1480 if (!VT || VT->getElementCount().isScalable() ||
1482 (*Content)->getType() != VT ||
1483 DL.getTypeStoreSize(VT->getElementType()).isScalable()) {
1493 int64_t ElementSize =
DL.getTypeStoreSize(ElementType).getFixedValue();
1494 auto *ConstContent = cast<Constant>(*
Content);
1498 for (
int i = 0, e = VT->getElementCount().getFixedValue(); i != e; ++i) {
1500 ConstContent, ConstantInt::get(Int32Ty, i));
1503 Changed = Changed | addAccess(
A, {ElementOffsets, ElementSize},
I,
1507 for (
auto &ElementOffset : ElementOffsets)
1508 ElementOffset += ElementSize;
1522 OffsetInfo &UsrOI,
const OffsetInfo &PtrOI,
1526 void trackStatistics()
const override {
1527 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1531bool AAPointerInfoFloating::collectConstantsForGEP(
Attributor &
A,
1534 const OffsetInfo &PtrOI,
1536 unsigned BitWidth =
DL.getIndexTypeSizeInBits(
GEP->getType());
1540 assert(!UsrOI.isUnknown() && !PtrOI.isUnknown() &&
1541 "Don't look for constant values if the offset has already been "
1542 "determined to be unknown.");
1544 if (!
GEP->collectOffset(
DL,
BitWidth, VariableOffsets, ConstantOffset)) {
1550 << (VariableOffsets.
empty() ?
"" :
"not") <<
" constant "
1554 Union.addToAll(ConstantOffset.getSExtValue());
1559 for (
const auto &VI : VariableOffsets) {
1562 if (!PotentialConstantsAA || !PotentialConstantsAA->isValidState()) {
1568 if (PotentialConstantsAA->undefIsContained())
1576 if (AssumedSet.empty())
1580 for (
const auto &ConstOffset : AssumedSet) {
1581 auto CopyPerOffset =
Union;
1582 CopyPerOffset.addToAll(ConstOffset.getSExtValue() *
1583 VI.second.getZExtValue());
1584 Product.merge(CopyPerOffset);
1589 UsrOI = std::move(Union);
1594 using namespace AA::PointerInfo;
1597 Value &AssociatedValue = getAssociatedValue();
1600 OffsetInfoMap[&AssociatedValue].
insert(0);
1602 auto HandlePassthroughUser = [&](
Value *Usr,
Value *CurPtr,
bool &Follow) {
1613 "CurPtr does not exist in the map!");
1615 auto &UsrOI = OffsetInfoMap[Usr];
1616 auto &PtrOI = OffsetInfoMap[CurPtr];
1617 assert(!PtrOI.isUnassigned() &&
1618 "Cannot pass through if the input Ptr was not visited!");
1624 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
1626 User *Usr =
U.getUser();
1627 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Analyze " << *CurPtr <<
" in " << *Usr
1630 "The current pointer offset should have been seeded!");
1634 return HandlePassthroughUser(Usr, CurPtr, Follow);
1635 if (!isa<GEPOperator>(CE)) {
1636 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled constant user " << *CE
1641 if (
auto *
GEP = dyn_cast<GEPOperator>(Usr)) {
1644 auto &UsrOI = OffsetInfoMap[Usr];
1645 auto &PtrOI = OffsetInfoMap[CurPtr];
1647 if (UsrOI.isUnknown())
1650 if (PtrOI.isUnknown()) {
1656 Follow = collectConstantsForGEP(
A,
DL, UsrOI, PtrOI,
GEP);
1659 if (isa<PtrToIntInst>(Usr))
1661 if (isa<CastInst>(Usr) || isa<SelectInst>(Usr) || isa<ReturnInst>(Usr))
1662 return HandlePassthroughUser(Usr, CurPtr, Follow);
1667 if (
auto *
PHI = dyn_cast<PHINode>(Usr)) {
1670 bool IsFirstPHIUser = !OffsetInfoMap.
count(
PHI);
1671 auto &UsrOI = OffsetInfoMap[
PHI];
1672 auto &PtrOI = OffsetInfoMap[CurPtr];
1676 if (PtrOI.isUnknown()) {
1677 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand offset unknown "
1678 << *CurPtr <<
" in " << *
PHI <<
"\n");
1679 Follow = !UsrOI.isUnknown();
1685 if (UsrOI == PtrOI) {
1686 assert(!PtrOI.isUnassigned() &&
1687 "Cannot assign if the current Ptr was not visited!");
1688 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant (so far)");
1698 auto It = OffsetInfoMap.
find(CurPtrBase);
1699 if (It == OffsetInfoMap.
end()) {
1700 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand is too complex "
1701 << *CurPtr <<
" in " << *
PHI
1702 <<
" (base: " << *CurPtrBase <<
")\n");
1717 *
PHI->getFunction());
1719 auto BaseOI = It->getSecond();
1720 BaseOI.addToAll(
Offset.getZExtValue());
1721 if (IsFirstPHIUser || BaseOI == UsrOI) {
1722 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant " << *CurPtr
1723 <<
" in " << *Usr <<
"\n");
1724 return HandlePassthroughUser(Usr, CurPtr, Follow);
1728 dbgs() <<
"[AAPointerInfo] PHI operand pointer offset mismatch "
1729 << *CurPtr <<
" in " << *
PHI <<
"\n");
1740 if (
auto *LoadI = dyn_cast<LoadInst>(Usr)) {
1748 if (!handleAccess(
A, *LoadI,
nullptr, AK,
1749 OffsetInfoMap[CurPtr].Offsets, Changed,
1754 if (
auto *
II = dyn_cast<IntrinsicInst>(&
I))
1755 return II->isAssumeLikeIntrinsic();
1766 }
while (FromI && FromI != ToI);
1772 if (IntrI.getIntrinsicID() != Intrinsic::assume)
1775 if (IntrI.getParent() == BB) {
1776 if (IsImpactedInRange(LoadI->getNextNonDebugInstruction(), &IntrI))
1782 if ((*PredIt) != BB)
1787 if (SuccBB == IntrBB)
1789 if (isa<UnreachableInst>(SuccBB->getTerminator()))
1793 if (IsImpactedInRange(LoadI->getNextNonDebugInstruction(),
1796 if (IsImpactedInRange(&IntrBB->
front(), &IntrI))
1802 std::pair<Value *, IntrinsicInst *> Assumption;
1803 for (
const Use &LoadU : LoadI->
uses()) {
1804 if (
auto *CmpI = dyn_cast<CmpInst>(LoadU.getUser())) {
1805 if (!CmpI->isEquality() || !CmpI->isTrueWhenEqual())
1807 for (
const Use &CmpU : CmpI->
uses()) {
1808 if (
auto *IntrI = dyn_cast<IntrinsicInst>(CmpU.getUser())) {
1809 if (!IsValidAssume(*IntrI))
1811 int Idx = CmpI->getOperandUse(0) == LoadU;
1812 Assumption = {CmpI->getOperand(
Idx), IntrI};
1817 if (Assumption.first)
1822 if (!Assumption.first || !Assumption.second)
1826 << *Assumption.second <<
": " << *LoadI
1827 <<
" == " << *Assumption.first <<
"\n");
1828 bool UsedAssumedInformation =
false;
1829 std::optional<Value *>
Content =
nullptr;
1830 if (Assumption.first)
1832 A.getAssumedSimplified(*Assumption.first, *
this,
1834 return handleAccess(
1835 A, *Assumption.second,
Content, AccessKind::AK_ASSUMPTION,
1836 OffsetInfoMap[CurPtr].Offsets, Changed, *LoadI->getType());
1841 for (
auto *OtherOp : OtherOps) {
1842 if (OtherOp == CurPtr) {
1845 <<
"[AAPointerInfo] Escaping use in store like instruction " <<
I
1857 bool UsedAssumedInformation =
false;
1858 std::optional<Value *>
Content =
nullptr;
1862 return handleAccess(
A,
I,
Content, AK, OffsetInfoMap[CurPtr].Offsets,
1866 if (
auto *StoreI = dyn_cast<StoreInst>(Usr))
1867 return HandleStoreLike(*StoreI, StoreI->getValueOperand(),
1868 *StoreI->getValueOperand()->getType(),
1869 {StoreI->getValueOperand()}, AccessKind::AK_W);
1870 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(Usr))
1871 return HandleStoreLike(*RMWI,
nullptr, *RMWI->getValOperand()->getType(),
1872 {RMWI->getValOperand()}, AccessKind::AK_RW);
1873 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(Usr))
1874 return HandleStoreLike(
1875 *CXI,
nullptr, *CXI->getNewValOperand()->getType(),
1876 {CXI->getCompareOperand(), CXI->getNewValOperand()},
1879 if (
auto *CB = dyn_cast<CallBase>(Usr)) {
1883 A.getInfoCache().getTargetLibraryInfoForFunction(*CB->
getFunction());
1894 translateAndAddState(
A, *CSArgPI, OffsetInfoMap[CurPtr], *CB) |
1896 return isValidState();
1898 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Call user not handled " << *CB
1904 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] User not handled " << *Usr <<
"\n");
1907 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
1908 assert(OffsetInfoMap.
count(OldU) &&
"Old use should be known already!");
1909 if (OffsetInfoMap.
count(NewU)) {
1911 if (!(OffsetInfoMap[NewU] == OffsetInfoMap[OldU])) {
1912 dbgs() <<
"[AAPointerInfo] Equivalent use callback failed: "
1913 << OffsetInfoMap[NewU] <<
" vs " << OffsetInfoMap[OldU]
1917 return OffsetInfoMap[NewU] == OffsetInfoMap[OldU];
1919 OffsetInfoMap[NewU] = OffsetInfoMap[OldU];
1922 if (!
A.checkForAllUses(UsePred, *
this, AssociatedValue,
1924 true, EquivalentUseCB)) {
1925 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Check for all uses failed, abort!\n");
1926 return indicatePessimisticFixpoint();
1930 dbgs() <<
"Accesses by bin after update:\n";
1937struct AAPointerInfoReturned final : AAPointerInfoImpl {
1939 : AAPointerInfoImpl(IRP,
A) {}
1943 return indicatePessimisticFixpoint();
1947 void trackStatistics()
const override {
1948 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1952struct AAPointerInfoArgument final : AAPointerInfoFloating {
1954 : AAPointerInfoFloating(IRP,
A) {}
1957 void trackStatistics()
const override {
1958 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1962struct AAPointerInfoCallSiteArgument final : AAPointerInfoFloating {
1964 : AAPointerInfoFloating(IRP,
A) {}
1968 using namespace AA::PointerInfo;
1972 if (
auto *
MI = dyn_cast_or_null<MemIntrinsic>(getCtxI())) {
1976 LengthVal =
Length->getSExtValue();
1977 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
1980 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled memory intrinsic "
1982 return indicatePessimisticFixpoint();
1985 ArgNo == 0 ? AccessKind::AK_MUST_WRITE : AccessKind::AK_MUST_READ;
1987 Changed | addAccess(
A, {0, LengthVal}, *
MI,
nullptr,
Kind,
nullptr);
1990 dbgs() <<
"Accesses by bin after update:\n";
2001 Argument *Arg = getAssociatedArgument();
2006 if (ArgAA && ArgAA->getState().isValidState())
2007 return translateAndAddStateFromCallee(
A, *ArgAA,
2008 *cast<CallBase>(getCtxI()));
2010 return indicatePessimisticFixpoint();
2013 bool IsKnownNoCapture;
2014 if (!AA::hasAssumedIRAttr<Attribute::NoCapture>(
2015 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNoCapture))
2016 return indicatePessimisticFixpoint();
2018 bool IsKnown =
false;
2020 return ChangeStatus::UNCHANGED;
2023 ReadOnly ? AccessKind::AK_MAY_READ : AccessKind::AK_MAY_READ_WRITE;
2029 void trackStatistics()
const override {
2030 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2034struct AAPointerInfoCallSiteReturned final : AAPointerInfoFloating {
2036 : AAPointerInfoFloating(IRP,
A) {}
2039 void trackStatistics()
const override {
2040 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2054 assert(!AA::hasAssumedIRAttr<Attribute::NoUnwind>(
2055 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2059 const std::string getAsStr(
Attributor *
A)
const override {
2060 return getAssumed() ?
"nounwind" :
"may-unwind";
2066 (
unsigned)Instruction::Invoke, (
unsigned)Instruction::CallBr,
2067 (
unsigned)Instruction::Call, (
unsigned)Instruction::CleanupRet,
2068 (
unsigned)Instruction::CatchSwitch, (
unsigned)Instruction::Resume};
2071 if (!
I.mayThrow(
true))
2074 if (
const auto *CB = dyn_cast<CallBase>(&
I)) {
2075 bool IsKnownNoUnwind;
2076 return AA::hasAssumedIRAttr<Attribute::NoUnwind>(
2083 bool UsedAssumedInformation =
false;
2084 if (!
A.checkForAllInstructions(CheckForNoUnwind, *
this, Opcodes,
2085 UsedAssumedInformation))
2086 return indicatePessimisticFixpoint();
2088 return ChangeStatus::UNCHANGED;
2092struct AANoUnwindFunction final :
public AANoUnwindImpl {
2094 : AANoUnwindImpl(IRP,
A) {}
2101struct AANoUnwindCallSite final
2102 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl> {
2104 : AACalleeToCallSite<
AANoUnwind, AANoUnwindImpl>(IRP,
A) {}
2115 case Intrinsic::nvvm_barrier0:
2116 case Intrinsic::nvvm_barrier0_and:
2117 case Intrinsic::nvvm_barrier0_or:
2118 case Intrinsic::nvvm_barrier0_popc:
2120 case Intrinsic::amdgcn_s_barrier:
2121 if (ExecutedAligned)
2134 if (
auto *FI = dyn_cast<FenceInst>(
I))
2137 if (
auto *AI = dyn_cast<AtomicCmpXchgInst>(
I)) {
2144 switch (
I->getOpcode()) {
2145 case Instruction::AtomicRMW:
2146 Ordering = cast<AtomicRMWInst>(
I)->getOrdering();
2148 case Instruction::Store:
2149 Ordering = cast<StoreInst>(
I)->getOrdering();
2151 case Instruction::Load:
2152 Ordering = cast<LoadInst>(
I)->getOrdering();
2156 "New atomic operations need to be known in the attributor.");
2167 if (
auto *
MI = dyn_cast<MemIntrinsic>(
I))
2168 return !
MI->isVolatile();
2179 assert(!AA::hasAssumedIRAttr<Attribute::NoSync>(
A,
nullptr, getIRPosition(),
2180 DepClassTy::NONE, IsKnown));
2184 const std::string getAsStr(
Attributor *
A)
const override {
2185 return getAssumed() ?
"nosync" :
"may-sync";
2201 if (
I.mayReadOrWriteMemory())
2206 if (AA::hasAssumedIRAttr<Attribute::NoSync>(
2215 bool UsedAssumedInformation =
false;
2216 if (!
A.checkForAllReadWriteInstructions(CheckRWInstForNoSync, *
this,
2217 UsedAssumedInformation) ||
2218 !
A.checkForAllCallLikeInstructions(CheckForNoSync, *
this,
2219 UsedAssumedInformation))
2220 return indicatePessimisticFixpoint();
2225struct AANoSyncFunction final :
public AANoSyncImpl {
2227 : AANoSyncImpl(IRP,
A) {}
2234struct AANoSyncCallSite final : AACalleeToCallSite<AANoSync, AANoSyncImpl> {
2236 : AACalleeToCallSite<
AANoSync, AANoSyncImpl>(IRP,
A) {}
2246struct AANoFreeImpl :
public AANoFree {
2252 assert(!AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
nullptr, getIRPosition(),
2253 DepClassTy::NONE, IsKnown));
2261 return AA::hasAssumedIRAttr<Attribute::NoFree>(
2263 DepClassTy::REQUIRED, IsKnown);
2266 bool UsedAssumedInformation =
false;
2267 if (!
A.checkForAllCallLikeInstructions(CheckForNoFree, *
this,
2268 UsedAssumedInformation))
2269 return indicatePessimisticFixpoint();
2270 return ChangeStatus::UNCHANGED;
2274 const std::string getAsStr(
Attributor *
A)
const override {
2275 return getAssumed() ?
"nofree" :
"may-free";
2279struct AANoFreeFunction final :
public AANoFreeImpl {
2281 : AANoFreeImpl(IRP,
A) {}
2288struct AANoFreeCallSite final : AACalleeToCallSite<AANoFree, AANoFreeImpl> {
2290 : AACalleeToCallSite<
AANoFree, AANoFreeImpl>(IRP,
A) {}
2297struct AANoFreeFloating : AANoFreeImpl {
2299 : AANoFreeImpl(IRP,
A) {}
2309 if (AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
this,
2311 DepClassTy::OPTIONAL, IsKnown))
2312 return ChangeStatus::UNCHANGED;
2314 Value &AssociatedValue = getIRPosition().getAssociatedValue();
2315 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
2317 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
2325 return AA::hasAssumedIRAttr<Attribute::NoFree>(
2327 DepClassTy::REQUIRED, IsKnown);
2330 if (isa<GetElementPtrInst>(UserI) || isa<PHINode>(UserI) ||
2331 isa<SelectInst>(UserI)) {
2335 if (isa<StoreInst>(UserI) || isa<LoadInst>(UserI) ||
2336 isa<ReturnInst>(UserI))
2342 if (!
A.checkForAllUses(Pred, *
this, AssociatedValue))
2343 return indicatePessimisticFixpoint();
2345 return ChangeStatus::UNCHANGED;
2350struct AANoFreeArgument final : AANoFreeFloating {
2352 : AANoFreeFloating(IRP,
A) {}
2359struct AANoFreeCallSiteArgument final : AANoFreeFloating {
2361 : AANoFreeFloating(IRP,
A) {}
2369 Argument *Arg = getAssociatedArgument();
2371 return indicatePessimisticFixpoint();
2374 if (AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
this, ArgPos,
2375 DepClassTy::REQUIRED, IsKnown))
2376 return ChangeStatus::UNCHANGED;
2377 return indicatePessimisticFixpoint();
2385struct AANoFreeReturned final : AANoFreeFloating {
2387 : AANoFreeFloating(IRP,
A) {
2402 void trackStatistics()
const override {}
2406struct AANoFreeCallSiteReturned final : AANoFreeFloating {
2408 : AANoFreeFloating(IRP,
A) {}
2411 return ChangeStatus::UNCHANGED;
2422 bool IgnoreSubsumingPositions) {
2424 AttrKinds.
push_back(Attribute::NonNull);
2427 AttrKinds.
push_back(Attribute::Dereferenceable);
2428 if (
A.hasAttr(IRP, AttrKinds, IgnoreSubsumingPositions, Attribute::NonNull))
2435 if (!Fn->isDeclaration()) {
2445 bool UsedAssumedInformation =
false;
2446 if (!
A.checkForAllInstructions(
2448 Worklist.push_back({*cast<ReturnInst>(I).getReturnValue(), &I});
2452 UsedAssumedInformation,
false,
true))
2464 Attribute::NonNull)});
2469static int64_t getKnownNonNullAndDerefBytesForUse(
2471 const Use *U,
const Instruction *
I,
bool &IsNonNull,
bool &TrackUse) {
2474 const Value *UseV =
U->get();
2481 if (isa<CastInst>(
I)) {
2486 if (isa<GetElementPtrInst>(
I)) {
2496 if (
const auto *CB = dyn_cast<CallBase>(
I)) {
2499 U, {Attribute::NonNull, Attribute::Dereferenceable})) {
2516 bool IsKnownNonNull;
2517 AA::hasAssumedIRAttr<Attribute::NonNull>(
A, &QueryingAA, IRP,
2519 IsNonNull |= IsKnownNonNull;
2526 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
2527 Loc->Size.isScalable() ||
I->isVolatile())
2533 if (
Base &&
Base == &AssociatedValue) {
2534 int64_t DerefBytes = Loc->Size.getValue() +
Offset;
2536 return std::max(int64_t(0), DerefBytes);
2543 int64_t DerefBytes = Loc->Size.getValue();
2545 return std::max(int64_t(0), DerefBytes);
2556 Value &
V = *getAssociatedValue().stripPointerCasts();
2557 if (isa<ConstantPointerNull>(V)) {
2558 indicatePessimisticFixpoint();
2563 followUsesInMBEC(*
this,
A, getState(), *CtxI);
2569 bool IsNonNull =
false;
2570 bool TrackUse =
false;
2571 getKnownNonNullAndDerefBytesForUse(
A, *
this, getAssociatedValue(), U,
I,
2572 IsNonNull, TrackUse);
2573 State.setKnown(IsNonNull);
2578 const std::string getAsStr(
Attributor *
A)
const override {
2579 return getAssumed() ?
"nonnull" :
"may-null";
2584struct AANonNullFloating :
public AANonNullImpl {
2586 : AANonNullImpl(IRP,
A) {}
2591 bool IsKnownNonNull;
2592 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2593 A, *
this, IRP, DepClassTy::OPTIONAL, IsKnownNonNull);
2597 bool UsedAssumedInformation =
false;
2598 Value *AssociatedValue = &getAssociatedValue();
2600 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
2605 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
2609 if (
auto *
PHI = dyn_cast<PHINode>(AssociatedValue))
2611 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2612 A, this, IRPosition::value(*Op), DepClassTy::OPTIONAL,
2615 return ChangeStatus::UNCHANGED;
2616 if (
auto *
Select = dyn_cast<SelectInst>(AssociatedValue))
2617 if (AA::hasAssumedIRAttr<Attribute::NonNull>(
2619 DepClassTy::OPTIONAL, IsKnown) &&
2620 AA::hasAssumedIRAttr<Attribute::NonNull>(
2622 DepClassTy::OPTIONAL, IsKnown))
2623 return ChangeStatus::UNCHANGED;
2630 if (AVIRP == getIRPosition() || !CheckIRP(AVIRP))
2631 return indicatePessimisticFixpoint();
2632 return ChangeStatus::UNCHANGED;
2635 for (
const auto &VAC : Values)
2637 return indicatePessimisticFixpoint();
2639 return ChangeStatus::UNCHANGED;
2647struct AANonNullReturned final
2648 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2649 false, AANonNull::IRAttributeKind, false> {
2656 const std::string getAsStr(
Attributor *
A)
const override {
2657 return getAssumed() ?
"nonnull" :
"may-null";
2665struct AANonNullArgument final
2666 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl> {
2668 : AAArgumentFromCallSiteArguments<
AANonNull, AANonNullImpl>(IRP,
A) {}
2674struct AANonNullCallSiteArgument final : AANonNullFloating {
2676 : AANonNullFloating(IRP,
A) {}
2683struct AANonNullCallSiteReturned final
2684 : AACalleeToCallSite<AANonNull, AANonNullImpl> {
2686 : AACalleeToCallSite<
AANonNull, AANonNullImpl>(IRP,
A) {}
2702 assert(!AA::hasAssumedIRAttr<Attribute::MustProgress>(
2703 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2708 const std::string getAsStr(
Attributor *
A)
const override {
2709 return getAssumed() ?
"mustprogress" :
"may-not-progress";
2713struct AAMustProgressFunction final : AAMustProgressImpl {
2715 : AAMustProgressImpl(IRP,
A) {}
2720 if (AA::hasAssumedIRAttr<Attribute::WillReturn>(
2721 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnown)) {
2723 return indicateOptimisticFixpoint();
2724 return ChangeStatus::UNCHANGED;
2729 bool IsKnownMustProgress;
2730 return AA::hasAssumedIRAttr<Attribute::MustProgress>(
2731 A,
this, IPos, DepClassTy::REQUIRED, IsKnownMustProgress,
2735 bool AllCallSitesKnown =
true;
2736 if (!
A.checkForAllCallSites(CheckForMustProgress, *
this,
2739 return indicatePessimisticFixpoint();
2741 return ChangeStatus::UNCHANGED;
2745 void trackStatistics()
const override {
2751struct AAMustProgressCallSite final : AAMustProgressImpl {
2753 : AAMustProgressImpl(IRP,
A) {}
2762 bool IsKnownMustProgress;
2763 if (!AA::hasAssumedIRAttr<Attribute::MustProgress>(
2764 A,
this, FnPos, DepClassTy::REQUIRED, IsKnownMustProgress))
2765 return indicatePessimisticFixpoint();
2766 return ChangeStatus::UNCHANGED;
2770 void trackStatistics()
const override {
2785 assert(!AA::hasAssumedIRAttr<Attribute::NoRecurse>(
2786 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2791 const std::string getAsStr(
Attributor *
A)
const override {
2792 return getAssumed() ?
"norecurse" :
"may-recurse";
2796struct AANoRecurseFunction final : AANoRecurseImpl {
2798 : AANoRecurseImpl(IRP,
A) {}
2805 bool IsKnownNoRecurse;
2806 if (!AA::hasAssumedIRAttr<Attribute::NoRecurse>(
2809 DepClassTy::NONE, IsKnownNoRecurse))
2811 return IsKnownNoRecurse;
2813 bool UsedAssumedInformation =
false;
2814 if (
A.checkForAllCallSites(CallSitePred, *
this,
true,
2815 UsedAssumedInformation)) {
2821 if (!UsedAssumedInformation)
2822 indicateOptimisticFixpoint();
2823 return ChangeStatus::UNCHANGED;
2828 DepClassTy::REQUIRED);
2829 if (EdgeReachability && EdgeReachability->
canReach(
A, *getAnchorScope()))
2830 return indicatePessimisticFixpoint();
2831 return ChangeStatus::UNCHANGED;
2838struct AANoRecurseCallSite final
2839 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl> {
2841 : AACalleeToCallSite<
AANoRecurse, AANoRecurseImpl>(IRP,
A) {}
2856 const std::string getAsStr(
Attributor *
A)
const override {
2857 return getAssumed() ?
"non-convergent" :
"may-be-convergent";
2861struct AANonConvergentFunction final : AANonConvergentImpl {
2863 : AANonConvergentImpl(IRP,
A) {}
2869 auto CalleeIsNotConvergent = [&](
Instruction &Inst) {
2870 CallBase &CB = cast<CallBase>(Inst);
2872 if (!Callee ||
Callee->isIntrinsic()) {
2875 if (
Callee->isDeclaration()) {
2876 return !
Callee->hasFnAttribute(Attribute::Convergent);
2883 bool UsedAssumedInformation =
false;
2884 if (!
A.checkForAllCallLikeInstructions(CalleeIsNotConvergent, *
this,
2885 UsedAssumedInformation)) {
2886 return indicatePessimisticFixpoint();
2888 return ChangeStatus::UNCHANGED;
2892 if (isKnownNotConvergent() &&
2893 A.hasAttr(getIRPosition(), Attribute::Convergent)) {
2894 A.removeAttrs(getIRPosition(), {Attribute::Convergent});
2895 return ChangeStatus::CHANGED;
2897 return ChangeStatus::UNCHANGED;
2914 const size_t UBPrevSize = KnownUBInsts.size();
2915 const size_t NoUBPrevSize = AssumedNoUBInsts.size();
2919 if (
I.isVolatile() &&
I.mayWriteToMemory())
2923 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2932 "Expected pointer operand of memory accessing instruction");
2936 std::optional<Value *> SimplifiedPtrOp =
2937 stopOnUndefOrAssumed(
A, PtrOp, &
I);
2938 if (!SimplifiedPtrOp || !*SimplifiedPtrOp)
2940 const Value *PtrOpVal = *SimplifiedPtrOp;
2945 if (!isa<ConstantPointerNull>(PtrOpVal)) {
2946 AssumedNoUBInsts.insert(&
I);
2958 AssumedNoUBInsts.insert(&
I);
2960 KnownUBInsts.insert(&
I);
2969 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2973 auto *BrInst = cast<BranchInst>(&
I);
2976 if (BrInst->isUnconditional())
2981 std::optional<Value *> SimplifiedCond =
2982 stopOnUndefOrAssumed(
A, BrInst->getCondition(), BrInst);
2983 if (!SimplifiedCond || !*SimplifiedCond)
2985 AssumedNoUBInsts.insert(&
I);
2993 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3002 for (
unsigned idx = 0; idx < CB.
arg_size(); idx++) {
3008 if (idx >=
Callee->arg_size())
3020 bool IsKnownNoUndef;
3021 AA::hasAssumedIRAttr<Attribute::NoUndef>(
3022 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNoUndef);
3023 if (!IsKnownNoUndef)
3025 bool UsedAssumedInformation =
false;
3026 std::optional<Value *> SimplifiedVal =
3029 if (UsedAssumedInformation)
3031 if (SimplifiedVal && !*SimplifiedVal)
3033 if (!SimplifiedVal || isa<UndefValue>(**SimplifiedVal)) {
3034 KnownUBInsts.insert(&
I);
3038 !isa<ConstantPointerNull>(**SimplifiedVal))
3040 bool IsKnownNonNull;
3041 AA::hasAssumedIRAttr<Attribute::NonNull>(
3042 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNonNull);
3044 KnownUBInsts.insert(&
I);
3050 auto &RI = cast<ReturnInst>(
I);
3053 std::optional<Value *> SimplifiedRetValue =
3054 stopOnUndefOrAssumed(
A, RI.getReturnValue(), &
I);
3055 if (!SimplifiedRetValue || !*SimplifiedRetValue)
3072 if (isa<ConstantPointerNull>(*SimplifiedRetValue)) {
3073 bool IsKnownNonNull;
3074 AA::hasAssumedIRAttr<Attribute::NonNull>(
3078 KnownUBInsts.insert(&
I);
3084 bool UsedAssumedInformation =
false;
3085 A.checkForAllInstructions(InspectMemAccessInstForUB, *
this,
3086 {Instruction::Load, Instruction::Store,
3087 Instruction::AtomicCmpXchg,
3088 Instruction::AtomicRMW},
3089 UsedAssumedInformation,
3091 A.checkForAllInstructions(InspectBrInstForUB, *
this, {Instruction::Br},
3092 UsedAssumedInformation,
3094 A.checkForAllCallLikeInstructions(InspectCallSiteForUB, *
this,
3095 UsedAssumedInformation);
3099 if (!getAnchorScope()->getReturnType()->isVoidTy()) {
3101 if (!
A.isAssumedDead(ReturnIRP,
this,
nullptr, UsedAssumedInformation)) {
3102 bool IsKnownNoUndef;
3103 AA::hasAssumedIRAttr<Attribute::NoUndef>(
3104 A,
this, ReturnIRP, DepClassTy::NONE, IsKnownNoUndef);
3106 A.checkForAllInstructions(InspectReturnInstForUB, *
this,
3107 {Instruction::Ret}, UsedAssumedInformation,
3112 if (NoUBPrevSize != AssumedNoUBInsts.size() ||
3113 UBPrevSize != KnownUBInsts.size())
3114 return ChangeStatus::CHANGED;
3115 return ChangeStatus::UNCHANGED;
3119 return KnownUBInsts.count(
I);
3122 bool isAssumedToCauseUB(
Instruction *
I)
const override {
3129 switch (
I->getOpcode()) {
3130 case Instruction::Load:
3131 case Instruction::Store:
3132 case Instruction::AtomicCmpXchg:
3133 case Instruction::AtomicRMW:
3134 return !AssumedNoUBInsts.count(
I);
3135 case Instruction::Br: {
3136 auto *BrInst = cast<BranchInst>(
I);
3137 if (BrInst->isUnconditional())
3139 return !AssumedNoUBInsts.count(
I);
3148 if (KnownUBInsts.empty())
3149 return ChangeStatus::UNCHANGED;
3151 A.changeToUnreachableAfterManifest(
I);
3152 return ChangeStatus::CHANGED;
3156 const std::string getAsStr(
Attributor *
A)
const override {
3157 return getAssumed() ?
"undefined-behavior" :
"no-ub";
3202 bool UsedAssumedInformation =
false;
3203 std::optional<Value *> SimplifiedV =
3206 if (!UsedAssumedInformation) {
3211 KnownUBInsts.insert(
I);
3212 return std::nullopt;
3218 if (isa<UndefValue>(V)) {
3219 KnownUBInsts.insert(
I);
3220 return std::nullopt;
3226struct AAUndefinedBehaviorFunction final : AAUndefinedBehaviorImpl {
3228 : AAUndefinedBehaviorImpl(IRP,
A) {}
3231 void trackStatistics()
const override {
3233 "Number of instructions known to have UB");
3235 KnownUBInsts.size();
3256 if (SCCI.hasCycle())
3266 for (
auto *L : LI->getLoopsInPreorder()) {
3280 assert(!AA::hasAssumedIRAttr<Attribute::WillReturn>(
3281 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
3286 bool isImpliedByMustprogressAndReadonly(
Attributor &
A,
bool KnownOnly) {
3287 if (!
A.hasAttr(getIRPosition(), {Attribute::MustProgress}))
3292 return IsKnown || !KnownOnly;
3298 if (isImpliedByMustprogressAndReadonly(
A,
false))
3299 return ChangeStatus::UNCHANGED;
3304 if (AA::hasAssumedIRAttr<Attribute::WillReturn>(
3305 A,
this, IPos, DepClassTy::REQUIRED, IsKnown)) {
3311 bool IsKnownNoRecurse;
3312 return AA::hasAssumedIRAttr<Attribute::NoRecurse>(
3313 A,
this, IPos, DepClassTy::REQUIRED, IsKnownNoRecurse);
3316 bool UsedAssumedInformation =
false;
3317 if (!
A.checkForAllCallLikeInstructions(CheckForWillReturn, *
this,
3318 UsedAssumedInformation))
3319 return indicatePessimisticFixpoint();
3321 return ChangeStatus::UNCHANGED;
3325 const std::string getAsStr(
Attributor *
A)
const override {
3326 return getAssumed() ?
"willreturn" :
"may-noreturn";
3330struct AAWillReturnFunction final : AAWillReturnImpl {
3332 : AAWillReturnImpl(IRP,
A) {}
3336 AAWillReturnImpl::initialize(
A);
3339 assert(
F &&
"Did expect an anchor function");
3340 if (
F->isDeclaration() || mayContainUnboundedCycle(*
F,
A))
3341 indicatePessimisticFixpoint();
3349struct AAWillReturnCallSite final
3350 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl> {
3352 : AACalleeToCallSite<
AAWillReturn, AAWillReturnImpl>(IRP,
A) {}
3356 if (isImpliedByMustprogressAndReadonly(
A,
false))
3357 return ChangeStatus::UNCHANGED;
3359 return AACalleeToCallSite::updateImpl(
A);
3381 const ToTy *To =
nullptr;
3391 assert(Hash == 0 &&
"Computed hash twice!");
3395 detail::combineHashValue(PairDMI ::getHashValue({
From, To}),
3396 InstSetDMI::getHashValue(ExclusionSet));
3406 :
From(&
From), To(&To), ExclusionSet(ES) {
3408 if (!ES || ES->
empty()) {
3409 ExclusionSet =
nullptr;
3410 }
else if (MakeUnique) {
3411 ExclusionSet =
A.getInfoCache().getOrCreateUniqueBlockExecutionSet(ES);
3416 :
From(RQI.
From), To(RQI.To), ExclusionSet(RQI.ExclusionSet) {}
3429 return &TombstoneKey;
3436 if (!PairDMI::isEqual({
LHS->From,
LHS->To}, {
RHS->From,
RHS->To}))
3438 return InstSetDMI::isEqual(
LHS->ExclusionSet,
RHS->ExclusionSet);
3442#define DefineKeys(ToTy) \
3444 ReachabilityQueryInfo<ToTy> \
3445 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::EmptyKey = \
3446 ReachabilityQueryInfo<ToTy>( \
3447 DenseMapInfo<const Instruction *>::getEmptyKey(), \
3448 DenseMapInfo<const ToTy *>::getEmptyKey()); \
3450 ReachabilityQueryInfo<ToTy> \
3451 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::TombstoneKey = \
3452 ReachabilityQueryInfo<ToTy>( \
3453 DenseMapInfo<const Instruction *>::getTombstoneKey(), \
3454 DenseMapInfo<const ToTy *>::getTombstoneKey());
3463template <
typename BaseTy,
typename ToTy>
3464struct CachedReachabilityAA :
public BaseTy {
3470 bool isQueryAA()
const override {
return true; }
3475 for (
unsigned u = 0,
e = QueryVector.size();
u <
e; ++
u) {
3476 RQITy *RQI = QueryVector[
u];
3477 if (RQI->Result == RQITy::Reachable::No &&
3479 Changed = ChangeStatus::CHANGED;
3485 bool IsTemporaryRQI) = 0;
3488 RQITy &RQI,
bool UsedExclusionSet,
bool IsTemporaryRQI) {
3493 QueryCache.erase(&RQI);
3499 if (
Result == RQITy::Reachable::Yes || !UsedExclusionSet) {
3500 RQITy PlainRQI(RQI.From, RQI.To);
3501 if (!QueryCache.count(&PlainRQI)) {
3502 RQITy *RQIPtr =
new (
A.Allocator) RQITy(RQI.From, RQI.To);
3504 QueryVector.push_back(RQIPtr);
3505 QueryCache.insert(RQIPtr);
3510 if (IsTemporaryRQI &&
Result != RQITy::Reachable::Yes && UsedExclusionSet) {
3511 assert((!RQI.ExclusionSet || !RQI.ExclusionSet->empty()) &&
3512 "Did not expect empty set!");
3513 RQITy *RQIPtr =
new (
A.Allocator)
3514 RQITy(
A, *RQI.From, *RQI.To, RQI.ExclusionSet,
true);
3515 assert(RQIPtr->Result == RQITy::Reachable::No &&
"Already reachable?");
3517 assert(!QueryCache.count(RQIPtr));
3518 QueryVector.push_back(RQIPtr);
3519 QueryCache.insert(RQIPtr);
3522 if (
Result == RQITy::Reachable::No && IsTemporaryRQI)
3523 A.registerForUpdate(*
this);
3524 return Result == RQITy::Reachable::Yes;
3527 const std::string getAsStr(
Attributor *
A)
const override {
3529 return "#queries(" + std::to_string(QueryVector.size()) +
")";
3532 bool checkQueryCache(
Attributor &
A, RQITy &StackRQI,
3533 typename RQITy::Reachable &
Result) {
3534 if (!this->getState().isValidState()) {
3535 Result = RQITy::Reachable::Yes;
3541 if (StackRQI.ExclusionSet) {
3542 RQITy PlainRQI(StackRQI.From, StackRQI.To);
3543 auto It = QueryCache.find(&PlainRQI);
3544 if (It != QueryCache.end() && (*It)->Result == RQITy::Reachable::No) {
3545 Result = RQITy::Reachable::No;
3550 auto It = QueryCache.find(&StackRQI);
3551 if (It != QueryCache.end()) {
3558 QueryCache.insert(&StackRQI);
3567struct AAIntraFnReachabilityFunction final
3568 :
public CachedReachabilityAA<AAIntraFnReachability, Instruction> {
3569 using Base = CachedReachabilityAA<AAIntraFnReachability, Instruction>;
3576 bool isAssumedReachable(
3579 auto *NonConstThis =
const_cast<AAIntraFnReachabilityFunction *
>(
this);
3583 RQITy StackRQI(
A,
From, To, ExclusionSet,
false);
3584 typename RQITy::Reachable
Result;
3585 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
3586 return NonConstThis->isReachableImpl(
A, StackRQI,
3588 return Result == RQITy::Reachable::Yes;
3595 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3598 [&](
const auto &DeadEdge) {
3599 return LivenessAA->isEdgeDead(DeadEdge.first,
3603 return LivenessAA->isAssumedDead(BB);
3605 return ChangeStatus::UNCHANGED;
3609 return Base::updateImpl(
A);
3613 bool IsTemporaryRQI)
override {
3615 bool UsedExclusionSet =
false;
3620 while (IP && IP != &To) {
3621 if (ExclusionSet && IP != Origin && ExclusionSet->
count(IP)) {
3622 UsedExclusionSet =
true;
3633 "Not an intra-procedural query!");
3637 if (FromBB == ToBB &&
3638 WillReachInBlock(*RQI.From, *RQI.To, RQI.ExclusionSet))
3639 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3644 if (!WillReachInBlock(ToBB->
front(), *RQI.To, RQI.ExclusionSet))
3645 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3650 if (RQI.ExclusionSet)
3651 for (
auto *
I : *RQI.ExclusionSet)
3652 if (
I->getFunction() == Fn)
3653 ExclusionBlocks.
insert(
I->getParent());
3656 if (ExclusionBlocks.
count(FromBB) &&
3659 return rememberResult(
A, RQITy::Reachable::No, RQI,
true, IsTemporaryRQI);
3662 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3663 if (LivenessAA && LivenessAA->isAssumedDead(ToBB)) {
3664 DeadBlocks.insert(ToBB);
3665 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3674 while (!Worklist.
empty()) {
3676 if (!Visited.
insert(BB).second)
3679 if (LivenessAA && LivenessAA->isEdgeDead(BB, SuccBB)) {
3680 LocalDeadEdges.
insert({BB, SuccBB});
3685 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3688 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3691 if (ExclusionBlocks.
count(SuccBB)) {
3692 UsedExclusionSet =
true;
3699 DeadEdges.insert(LocalDeadEdges.
begin(), LocalDeadEdges.
end());
3700 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3705 void trackStatistics()
const override {}
3725 bool IgnoreSubsumingPositions) {
3726 assert(ImpliedAttributeKind == Attribute::NoAlias &&
3727 "Unexpected attribute kind");
3730 if (isa<AllocaInst>(Val))
3733 IgnoreSubsumingPositions =
true;
3736 if (isa<UndefValue>(Val))
3739 if (isa<ConstantPointerNull>(Val) &&
3744 if (
A.hasAttr(IRP, {Attribute::ByVal, Attribute::NoAlias},
3745 IgnoreSubsumingPositions, Attribute::NoAlias))
3755 "Noalias is a pointer attribute");
3758 const std::string getAsStr(
Attributor *
A)
const override {
3759 return getAssumed() ?
"noalias" :
"may-alias";
3764struct AANoAliasFloating final : AANoAliasImpl {
3766 : AANoAliasImpl(IRP,
A) {}
3771 return indicatePessimisticFixpoint();
3775 void trackStatistics()
const override {
3781struct AANoAliasArgument final
3782 : AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl> {
3783 using Base = AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl>;
3795 if (AA::hasAssumedIRAttr<Attribute::NoSync>(
3797 DepClassTy::OPTIONAL, IsKnownNoSycn))
3798 return Base::updateImpl(
A);
3803 return Base::updateImpl(
A);
3807 bool UsedAssumedInformation =
false;
3808 if (
A.checkForAllCallSites(
3810 true, UsedAssumedInformation))
3811 return Base::updateImpl(
A);
3819 return indicatePessimisticFixpoint();
3826struct AANoAliasCallSiteArgument final : AANoAliasImpl {
3828 : AANoAliasImpl(IRP,
A) {}
3834 const CallBase &CB,
unsigned OtherArgNo) {
3836 if (this->getCalleeArgNo() == (
int)OtherArgNo)
3848 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadNone()) {
3849 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3856 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadOnly() &&
3858 A.recordDependence(MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3859 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3865 AAR =
A.getInfoCache().getAnalysisResultForFunction<
AAManager>(
3869 bool IsAliasing = !AAR || !AAR->
isNoAlias(&getAssociatedValue(), ArgOp);
3871 "callsite arguments: "
3872 << getAssociatedValue() <<
" " << *ArgOp <<
" => "
3873 << (IsAliasing ?
"" :
"no-") <<
"alias \n");
3878 bool isKnownNoAliasDueToNoAliasPreservation(
3898 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
3909 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
3914 bool IsKnownNoCapture;
3915 if (AA::hasAssumedIRAttr<Attribute::NoCapture>(
3917 DepClassTy::OPTIONAL, IsKnownNoCapture))
3923 A, *UserI, *getCtxI(), *
this,
nullptr,
3924 [ScopeFn](
const Function &Fn) {
return &Fn != ScopeFn; }))
3932 case UseCaptureKind::NO_CAPTURE:
3934 case UseCaptureKind::MAY_CAPTURE:
3938 case UseCaptureKind::PASSTHROUGH:
3945 bool IsKnownNoCapture;
3947 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
3948 A,
this, VIRP, DepClassTy::NONE, IsKnownNoCapture,
false, &NoCaptureAA);
3949 if (!IsAssumedNoCapture &&
3951 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue())) {
3953 dbgs() <<
"[AANoAliasCSArg] " << getAssociatedValue()
3954 <<
" cannot be noalias as it is potentially captured\n");
3959 A.recordDependence(*NoCaptureAA, *
this, DepClassTy::OPTIONAL);
3964 const auto &CB = cast<CallBase>(getAnchorValue());
3965 for (
unsigned OtherArgNo = 0; OtherArgNo < CB.
arg_size(); OtherArgNo++)
3966 if (mayAliasWithArgument(
A, AAR, MemBehaviorAA, CB, OtherArgNo))
3976 auto *MemBehaviorAA =
3979 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3980 return ChangeStatus::UNCHANGED;
3983 bool IsKnownNoAlias;
3985 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
3986 A,
this, VIRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
3988 <<
" is not no-alias at the definition\n");
3989 return indicatePessimisticFixpoint();
3993 if (MemBehaviorAA &&
3994 isKnownNoAliasDueToNoAliasPreservation(
A, AAR, *MemBehaviorAA)) {
3996 dbgs() <<
"[AANoAlias] No-Alias deduced via no-alias preservation\n");
3997 return ChangeStatus::UNCHANGED;
4000 return indicatePessimisticFixpoint();
4008struct AANoAliasReturned final : AANoAliasImpl {
4010 : AANoAliasImpl(IRP,
A) {}
4015 auto CheckReturnValue = [&](
Value &RV) ->
bool {
4016 if (
Constant *
C = dyn_cast<Constant>(&RV))
4017 if (
C->isNullValue() || isa<UndefValue>(
C))
4022 if (!isa<CallBase>(&RV))
4026 bool IsKnownNoAlias;
4027 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
4028 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoAlias))
4031 bool IsKnownNoCapture;
4033 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
4034 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
4036 return IsAssumedNoCapture ||
4040 if (!
A.checkForAllReturnedValues(CheckReturnValue, *
this))
4041 return indicatePessimisticFixpoint();
4043 return ChangeStatus::UNCHANGED;
4051struct AANoAliasCallSiteReturned final
4052 : AACalleeToCallSite<AANoAlias, AANoAliasImpl> {
4054 : AACalleeToCallSite<
AANoAlias, AANoAliasImpl>(IRP,
A) {}
4064struct AAIsDeadValueImpl :
public AAIsDead {
4068 bool isAssumedDead()
const override {
return isAssumed(IS_DEAD); }
4071 bool isKnownDead()
const override {
return isKnown(IS_DEAD); }
4074 bool isAssumedDead(
const BasicBlock *BB)
const override {
return false; }
4077 bool isKnownDead(
const BasicBlock *BB)
const override {
return false; }
4080 bool isAssumedDead(
const Instruction *
I)
const override {
4081 return I == getCtxI() && isAssumedDead();
4085 bool isKnownDead(
const Instruction *
I)
const override {
4086 return isAssumedDead(
I) && isKnownDead();
4090 const std::string getAsStr(
Attributor *
A)
const override {
4091 return isAssumedDead() ?
"assumed-dead" :
"assumed-live";
4097 if (
V.getType()->isVoidTy() ||
V.use_empty())
4101 if (!isa<Constant>(V)) {
4102 if (
auto *
I = dyn_cast<Instruction>(&V))
4103 if (!
A.isRunOn(*
I->getFunction()))
4105 bool UsedAssumedInformation =
false;
4106 std::optional<Constant *>
C =
4107 A.getAssumedConstant(V, *
this, UsedAssumedInformation);
4112 auto UsePred = [&](
const Use &
U,
bool &Follow) {
return false; };
4117 return A.checkForAllUses(UsePred, *
this, V,
false,
4118 DepClassTy::REQUIRED,
4127 auto *CB = dyn_cast<CallBase>(
I);
4128 if (!CB || isa<IntrinsicInst>(CB))
4133 bool IsKnownNoUnwind;
4134 if (!AA::hasAssumedIRAttr<Attribute::NoUnwind>(
4135 A,
this, CallIRP, DepClassTy::OPTIONAL, IsKnownNoUnwind))
4143struct AAIsDeadFloating :
public AAIsDeadValueImpl {
4145 : AAIsDeadValueImpl(IRP,
A) {}
4149 AAIsDeadValueImpl::initialize(
A);
4151 if (isa<UndefValue>(getAssociatedValue())) {
4152 indicatePessimisticFixpoint();
4156 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4157 if (!isAssumedSideEffectFree(
A,
I)) {
4158 if (!isa_and_nonnull<StoreInst>(
I) && !isa_and_nonnull<FenceInst>(
I))
4159 indicatePessimisticFixpoint();
4161 removeAssumedBits(HAS_NO_EFFECT);
4168 if (!ExecDomainAA || !ExecDomainAA->isNoOpFence(FI))
4170 A.recordDependence(*ExecDomainAA, *
this, DepClassTy::OPTIONAL);
4177 if (
SI.isVolatile())
4183 bool UsedAssumedInformation =
false;
4184 if (!AssumeOnlyInst) {
4185 PotentialCopies.clear();
4187 UsedAssumedInformation)) {
4190 <<
"[AAIsDead] Could not determine potential copies of store!\n");
4194 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Store has " << PotentialCopies.size()
4195 <<
" potential copies.\n");
4200 UsedAssumedInformation))
4202 if (
auto *LI = dyn_cast<LoadInst>(V)) {
4204 auto &UserI = cast<Instruction>(*U.getUser());
4205 if (InfoCache.isOnlyUsedByAssume(UserI)) {
4207 AssumeOnlyInst->insert(&UserI);
4210 return A.isAssumedDead(U,
this,
nullptr, UsedAssumedInformation);
4216 <<
" is assumed live!\n");
4222 const std::string getAsStr(
Attributor *
A)
const override {
4223 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4224 if (isa_and_nonnull<StoreInst>(
I))
4226 return "assumed-dead-store";
4227 if (isa_and_nonnull<FenceInst>(
I))
4229 return "assumed-dead-fence";
4230 return AAIsDeadValueImpl::getAsStr(
A);
4235 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4236 if (
auto *SI = dyn_cast_or_null<StoreInst>(
I)) {
4237 if (!isDeadStore(
A, *SI))
4238 return indicatePessimisticFixpoint();
4239 }
else if (
auto *FI = dyn_cast_or_null<FenceInst>(
I)) {
4240 if (!isDeadFence(
A, *FI))
4241 return indicatePessimisticFixpoint();
4243 if (!isAssumedSideEffectFree(
A,
I))
4244 return indicatePessimisticFixpoint();
4245 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4246 return indicatePessimisticFixpoint();
4251 bool isRemovableStore()
const override {
4252 return isAssumed(IS_REMOVABLE) && isa<StoreInst>(&getAssociatedValue());
4257 Value &
V = getAssociatedValue();
4258 if (
auto *
I = dyn_cast<Instruction>(&V)) {
4263 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
4265 bool IsDead = isDeadStore(
A, *SI, &AssumeOnlyInst);
4268 A.deleteAfterManifest(*
I);
4269 for (
size_t i = 0; i < AssumeOnlyInst.
size(); ++i) {
4271 for (
auto *Usr : AOI->
users())
4272 AssumeOnlyInst.
insert(cast<Instruction>(Usr));
4273 A.deleteAfterManifest(*AOI);
4277 if (
auto *FI = dyn_cast<FenceInst>(
I)) {
4279 A.deleteAfterManifest(*FI);
4282 if (isAssumedSideEffectFree(
A,
I) && !isa<InvokeInst>(
I)) {
4283 A.deleteAfterManifest(*
I);
4291 void trackStatistics()
const override {
4300struct AAIsDeadArgument :
public AAIsDeadFloating {
4302 : AAIsDeadFloating(IRP,
A) {}
4306 Argument &Arg = *getAssociatedArgument();
4307 if (
A.isValidFunctionSignatureRewrite(Arg, {}))
4308 if (
A.registerFunctionSignatureRewrite(
4312 return ChangeStatus::CHANGED;
4314 return ChangeStatus::UNCHANGED;
4321struct AAIsDeadCallSiteArgument :
public AAIsDeadValueImpl {
4323 : AAIsDeadValueImpl(IRP,
A) {}
4327 AAIsDeadValueImpl::initialize(
A);
4328 if (isa<UndefValue>(getAssociatedValue()))
4329 indicatePessimisticFixpoint();
4338 Argument *Arg = getAssociatedArgument();
4340 return indicatePessimisticFixpoint();
4342 auto *ArgAA =
A.getAAFor<
AAIsDead>(*
this, ArgPos, DepClassTy::REQUIRED);
4344 return indicatePessimisticFixpoint();
4350 CallBase &CB = cast<CallBase>(getAnchorValue());
4352 assert(!isa<UndefValue>(
U.get()) &&
4353 "Expected undef values to be filtered out!");
4355 if (
A.changeUseAfterManifest(U, UV))
4356 return ChangeStatus::CHANGED;
4357 return ChangeStatus::UNCHANGED;
4364struct AAIsDeadCallSiteReturned :
public AAIsDeadFloating {
4366 : AAIsDeadFloating(IRP,
A) {}
4369 bool isAssumedDead()
const override {
4370 return AAIsDeadFloating::isAssumedDead() && IsAssumedSideEffectFree;
4375 AAIsDeadFloating::initialize(
A);
4376 if (isa<UndefValue>(getAssociatedValue())) {
4377 indicatePessimisticFixpoint();
4382 IsAssumedSideEffectFree = isAssumedSideEffectFree(
A, getCtxI());
4388 if (IsAssumedSideEffectFree && !isAssumedSideEffectFree(
A, getCtxI())) {
4389 IsAssumedSideEffectFree =
false;
4390 Changed = ChangeStatus::CHANGED;
4392 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4393 return indicatePessimisticFixpoint();
4398 void trackStatistics()
const override {
4399 if (IsAssumedSideEffectFree)
4406 const std::string getAsStr(
Attributor *
A)
const override {
4407 return isAssumedDead()
4409 : (getAssumed() ?
"assumed-dead-users" :
"assumed-live");
4413 bool IsAssumedSideEffectFree =
true;
4416struct AAIsDeadReturned :
public AAIsDeadValueImpl {
4418 : AAIsDeadValueImpl(IRP,
A) {}
4423 bool UsedAssumedInformation =
false;
4424 A.checkForAllInstructions([](
Instruction &) {
return true; }, *
this,
4425 {Instruction::Ret}, UsedAssumedInformation);
4428 if (ACS.isCallbackCall() || !ACS.getInstruction())
4430 return areAllUsesAssumedDead(
A, *ACS.getInstruction());
4433 if (!
A.checkForAllCallSites(PredForCallSite, *
this,
true,
4434 UsedAssumedInformation))
4435 return indicatePessimisticFixpoint();
4437 return ChangeStatus::UNCHANGED;
4443 bool AnyChange =
false;
4451 bool UsedAssumedInformation =
false;
4452 A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
4453 UsedAssumedInformation);
4454 return AnyChange ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
4461struct AAIsDeadFunction :
public AAIsDead {
4467 assert(
F &&
"Did expect an anchor function");
4468 if (!isAssumedDeadInternalFunction(
A)) {
4469 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4470 assumeLive(
A,
F->getEntryBlock());
4474 bool isAssumedDeadInternalFunction(
Attributor &
A) {
4475 if (!getAnchorScope()->hasLocalLinkage())
4477 bool UsedAssumedInformation =
false;
4479 true, UsedAssumedInformation);
4483 const std::string getAsStr(
Attributor *
A)
const override {
4484 return "Live[#BB " + std::to_string(AssumedLiveBlocks.size()) +
"/" +
4485 std::to_string(getAnchorScope()->
size()) +
"][#TBEP " +
4486 std::to_string(ToBeExploredFrom.size()) +
"][#KDE " +
4487 std::to_string(KnownDeadEnds.size()) +
"]";
4492 assert(getState().isValidState() &&
4493 "Attempted to manifest an invalid state!");
4498 if (AssumedLiveBlocks.empty()) {
4499 A.deleteAfterManifest(
F);
4500 return ChangeStatus::CHANGED;
4506 bool Invoke2CallAllowed = !mayCatchAsynchronousExceptions(
F);
4508 KnownDeadEnds.set_union(ToBeExploredFrom);
4509 for (
const Instruction *DeadEndI : KnownDeadEnds) {
4510 auto *CB = dyn_cast<CallBase>(DeadEndI);
4513 bool IsKnownNoReturn;
4514 bool MayReturn = !AA::hasAssumedIRAttr<Attribute::NoReturn>(
4517 if (MayReturn && (!Invoke2CallAllowed || !isa<InvokeInst>(CB)))
4520 if (
auto *
II = dyn_cast<InvokeInst>(DeadEndI))
4521 A.registerInvokeWithDeadSuccessor(
const_cast<InvokeInst &
>(*
II));
4523 A.changeToUnreachableAfterManifest(
4524 const_cast<Instruction *
>(DeadEndI->getNextNode()));
4525 HasChanged = ChangeStatus::CHANGED;
4530 if (!AssumedLiveBlocks.count(&BB)) {
4531 A.deleteAfterManifest(BB);
4533 HasChanged = ChangeStatus::CHANGED;
4543 assert(
From->getParent() == getAnchorScope() &&
4545 "Used AAIsDead of the wrong function");
4546 return isValidState() && !AssumedLiveEdges.count(std::make_pair(
From, To));
4550 void trackStatistics()
const override {}
4553 bool isAssumedDead()
const override {
return false; }
4556 bool isKnownDead()
const override {
return false; }
4559 bool isAssumedDead(
const BasicBlock *BB)
const override {
4561 "BB must be in the same anchor scope function.");
4565 return !AssumedLiveBlocks.count(BB);
4569 bool isKnownDead(
const BasicBlock *BB)
const override {
4570 return getKnown() && isAssumedDead(BB);
4574 bool isAssumedDead(
const Instruction *
I)
const override {
4575 assert(
I->getParent()->getParent() == getAnchorScope() &&
4576 "Instruction must be in the same anchor scope function.");
4583 if (!AssumedLiveBlocks.count(
I->getParent()))
4589 if (KnownDeadEnds.count(PrevI) || ToBeExploredFrom.count(PrevI))
4597 bool isKnownDead(
const Instruction *
I)
const override {
4598 return getKnown() && isAssumedDead(
I);
4604 if (!AssumedLiveBlocks.insert(&BB).second)
4612 if (
const auto *CB = dyn_cast<CallBase>(&
I))
4614 if (
F->hasLocalLinkage())
4615 A.markLiveInternalFunction(*
F);
4639 bool IsKnownNoReturn;
4640 if (AA::hasAssumedIRAttr<Attribute::NoReturn>(
4642 return !IsKnownNoReturn;
4654 bool UsedAssumedInformation =
4655 identifyAliveSuccessors(
A, cast<CallBase>(
II), AA, AliveSuccessors);
4660 if (AAIsDeadFunction::mayCatchAsynchronousExceptions(*
II.getFunction())) {
4661 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4665 bool IsKnownNoUnwind;
4666 if (AA::hasAssumedIRAttr<Attribute::NoUnwind>(
4668 UsedAssumedInformation |= !IsKnownNoUnwind;
4670 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4673 return UsedAssumedInformation;
4680 bool UsedAssumedInformation =
false;
4684 std::optional<Constant *>
C =
4685 A.getAssumedConstant(*BI.
getCondition(), AA, UsedAssumedInformation);
4686 if (!
C || isa_and_nonnull<UndefValue>(*
C)) {
4688 }
else if (isa_and_nonnull<ConstantInt>(*
C)) {
4690 BI.
getSuccessor(1 - cast<ConstantInt>(*C)->getValue().getZExtValue());
4695 UsedAssumedInformation =
false;
4698 return UsedAssumedInformation;
4705 bool UsedAssumedInformation =
false;
4709 UsedAssumedInformation)) {
4716 if (Values.
empty() ||
4717 (Values.
size() == 1 &&
4718 isa_and_nonnull<UndefValue>(Values.
front().getValue()))) {
4720 return UsedAssumedInformation;
4723 Type &Ty = *
SI.getCondition()->getType();
4725 auto CheckForConstantInt = [&](
Value *
V) {
4726 if (
auto *CI = dyn_cast_if_present<ConstantInt>(
AA::getWithType(*V, Ty))) {
4734 return CheckForConstantInt(
VAC.getValue());
4738 return UsedAssumedInformation;
4741 unsigned MatchedCases = 0;
4742 for (
const auto &CaseIt :
SI.cases()) {
4743 if (
Constants.count(CaseIt.getCaseValue())) {
4745 AliveSuccessors.
push_back(&CaseIt.getCaseSuccessor()->front());
4752 AliveSuccessors.
push_back(&
SI.getDefaultDest()->front());
4753 return UsedAssumedInformation;
4759 if (AssumedLiveBlocks.empty()) {
4760 if (isAssumedDeadInternalFunction(
A))
4764 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4765 assumeLive(
A,
F->getEntryBlock());
4769 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Live [" << AssumedLiveBlocks.size() <<
"/"
4770 << getAnchorScope()->
size() <<
"] BBs and "
4771 << ToBeExploredFrom.size() <<
" exploration points and "
4772 << KnownDeadEnds.size() <<
" known dead ends\n");
4777 ToBeExploredFrom.end());
4778 decltype(ToBeExploredFrom) NewToBeExploredFrom;
4781 while (!Worklist.
empty()) {
4787 while (!
I->isTerminator() && !isa<CallBase>(
I))
4788 I =
I->getNextNode();
4790 AliveSuccessors.
clear();
4792 bool UsedAssumedInformation =
false;
4793 switch (
I->getOpcode()) {
4797 "Expected non-terminators to be handled already!");
4801 case Instruction::Call:
4802 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<CallInst>(*
I),
4803 *
this, AliveSuccessors);
4805 case Instruction::Invoke:
4806 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<InvokeInst>(*
I),
4807 *
this, AliveSuccessors);
4809 case Instruction::Br:
4810 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<BranchInst>(*
I),
4811 *
this, AliveSuccessors);
4813 case Instruction::Switch:
4814 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<SwitchInst>(*
I),
4815 *
this, AliveSuccessors);
4819 if (UsedAssumedInformation) {
4820 NewToBeExploredFrom.insert(
I);
4821 }
else if (AliveSuccessors.
empty() ||
4822 (
I->isTerminator() &&
4823 AliveSuccessors.
size() <
I->getNumSuccessors())) {
4824 if (KnownDeadEnds.insert(
I))
4829 << AliveSuccessors.
size() <<
" UsedAssumedInformation: "
4830 << UsedAssumedInformation <<
"\n");
4832 for (
const Instruction *AliveSuccessor : AliveSuccessors) {
4833 if (!
I->isTerminator()) {
4834 assert(AliveSuccessors.size() == 1 &&
4835 "Non-terminator expected to have a single successor!");
4839 auto Edge = std::make_pair(
I->getParent(), AliveSuccessor->getParent());
4840 if (AssumedLiveEdges.insert(Edge).second)
4842 if (assumeLive(
A, *AliveSuccessor->getParent()))
4849 if (NewToBeExploredFrom.size() != ToBeExploredFrom.size() ||
4851 return !ToBeExploredFrom.count(I);
4854 ToBeExploredFrom = std::move(NewToBeExploredFrom);
4863 if (ToBeExploredFrom.empty() &&
4864 getAnchorScope()->
size() == AssumedLiveBlocks.size() &&
4866 return DeadEndI->isTerminator() && DeadEndI->getNumSuccessors() == 0;
4868 return indicatePessimisticFixpoint();
4873struct AAIsDeadCallSite final : AAIsDeadFunction {
4875 : AAIsDeadFunction(IRP,
A) {}
4884 "supported for call sites yet!");
4889 return indicatePessimisticFixpoint();
4893 void trackStatistics()
const override {}
4907 Value &
V = *getAssociatedValue().stripPointerCasts();
4909 A.getAttrs(getIRPosition(),
4910 {Attribute::Dereferenceable, Attribute::DereferenceableOrNull},
4913 takeKnownDerefBytesMaximum(Attr.getValueAsInt());
4916 bool IsKnownNonNull;
4917 AA::hasAssumedIRAttr<Attribute::NonNull>(
4918 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNonNull);
4920 bool CanBeNull, CanBeFreed;
4921 takeKnownDerefBytesMaximum(
V.getPointerDereferenceableBytes(
4922 A.getDataLayout(), CanBeNull, CanBeFreed));
4925 followUsesInMBEC(*
this,
A, getState(), *CtxI);
4930 StateType &getState()
override {
return *
this; }
4931 const StateType &getState()
const override {
return *
this; }
4937 const Value *UseV =
U->get();
4942 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
I->isVolatile())
4947 Loc->Ptr,
Offset,
A.getDataLayout(),
true);
4948 if (
Base &&
Base == &getAssociatedValue())
4949 State.addAccessedBytes(
Offset, Loc->Size.getValue());
4955 bool IsNonNull =
false;
4956 bool TrackUse =
false;
4957 int64_t DerefBytes = getKnownNonNullAndDerefBytesForUse(
4958 A, *
this, getAssociatedValue(), U,
I, IsNonNull, TrackUse);
4959 LLVM_DEBUG(
dbgs() <<
"[AADereferenceable] Deref bytes: " << DerefBytes
4960 <<
" for instruction " << *
I <<
"\n");
4962 addAccessedBytesForUse(
A, U,
I, State);
4963 State.takeKnownDerefBytesMaximum(DerefBytes);
4970 bool IsKnownNonNull;
4971 bool IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
4972 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
4973 if (IsAssumedNonNull &&