55#include "llvm/IR/IntrinsicsAMDGPU.h"
56#include "llvm/IR/IntrinsicsNVPTX.h"
81#define DEBUG_TYPE "attributor"
85 cl::desc(
"Manifest Attributor internal string attributes."),
98 cl::desc(
"Maximum number of potential values to be "
99 "tracked for each position."),
104 "attributor-max-potential-values-iterations",
cl::Hidden,
106 "Maximum number of iterations we keep dismantling potential values."),
109STATISTIC(NumAAs,
"Number of abstract attributes created");
124#define BUILD_STAT_MSG_IR_ATTR(TYPE, NAME) \
125 ("Number of " #TYPE " marked '" #NAME "'")
126#define BUILD_STAT_NAME(NAME, TYPE) NumIR##TYPE##_##NAME
127#define STATS_DECL_(NAME, MSG) STATISTIC(NAME, MSG);
128#define STATS_DECL(NAME, TYPE, MSG) \
129 STATS_DECL_(BUILD_STAT_NAME(NAME, TYPE), MSG);
130#define STATS_TRACK(NAME, TYPE) ++(BUILD_STAT_NAME(NAME, TYPE));
131#define STATS_DECLTRACK(NAME, TYPE, MSG) \
133 STATS_DECL(NAME, TYPE, MSG) \
134 STATS_TRACK(NAME, TYPE) \
136#define STATS_DECLTRACK_ARG_ATTR(NAME) \
137 STATS_DECLTRACK(NAME, Arguments, BUILD_STAT_MSG_IR_ATTR(arguments, NAME))
138#define STATS_DECLTRACK_CSARG_ATTR(NAME) \
139 STATS_DECLTRACK(NAME, CSArguments, \
140 BUILD_STAT_MSG_IR_ATTR(call site arguments, NAME))
141#define STATS_DECLTRACK_FN_ATTR(NAME) \
142 STATS_DECLTRACK(NAME, Function, BUILD_STAT_MSG_IR_ATTR(functions, NAME))
143#define STATS_DECLTRACK_CS_ATTR(NAME) \
144 STATS_DECLTRACK(NAME, CS, BUILD_STAT_MSG_IR_ATTR(call site, NAME))
145#define STATS_DECLTRACK_FNRET_ATTR(NAME) \
146 STATS_DECLTRACK(NAME, FunctionReturn, \
147 BUILD_STAT_MSG_IR_ATTR(function returns, NAME))
148#define STATS_DECLTRACK_CSRET_ATTR(NAME) \
149 STATS_DECLTRACK(NAME, CSReturn, \
150 BUILD_STAT_MSG_IR_ATTR(call site returns, NAME))
151#define STATS_DECLTRACK_FLOATING_ATTR(NAME) \
152 STATS_DECLTRACK(NAME, Floating, \
153 ("Number of floating values known to be '" #NAME "'"))
158#define PIPE_OPERATOR(CLASS) \
159 raw_ostream &operator<<(raw_ostream &OS, const CLASS &AA) { \
160 return OS << static_cast<const AbstractAttribute &>(AA); \
215 bool HeaderOnly,
Cycle **CPtr =
nullptr) {
218 auto *BB =
I->getParent();
224 return !HeaderOnly || BB ==
C->getHeader();
235 if (
DL.getTypeSizeInBits(Ty) !=
DL.getTypeAllocSizeInBits(Ty))
240 if (
VectorType *SeqTy = dyn_cast<VectorType>(Ty))
244 if (
ArrayType *SeqTy = dyn_cast<ArrayType>(Ty))
247 if (!isa<StructType>(Ty))
260 StartPos +=
DL.getTypeAllocSizeInBits(ElTy);
270 bool AllowVolatile) {
271 if (!AllowVolatile &&
I->isVolatile())
274 if (
auto *LI = dyn_cast<LoadInst>(
I)) {
275 return LI->getPointerOperand();
278 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
279 return SI->getPointerOperand();
282 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(
I)) {
283 return CXI->getPointerOperand();
286 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(
I)) {
287 return RMWI->getPointerOperand();
304 assert(
Offset >= 0 &&
"Negative offset not supported yet!");
306 <<
"-bytes as " << *ResTy <<
"\n");
309 Type *Ty = PtrElemTy;
314 std::string GEPName =
Ptr->getName().str();
317 GEPName +=
"." + std::to_string(
Index.getZExtValue());
324 if (IntOffset != 0) {
332 Ptr->getName() +
".cast");
341 bool GetMinOffset,
bool AllowNonInbounds,
342 bool UseAssumed =
false) {
344 auto AttributorAnalysis = [&](
Value &V,
APInt &ROffset) ->
bool {
349 UseAssumed ? DepClassTy::OPTIONAL
351 if (!ValueConstantRangeAA)
355 if (Range.isFullSet())
361 ROffset = Range.getSignedMin();
363 ROffset = Range.getSignedMax();
374 const Value *
Ptr, int64_t &BytesOffset,
376 APInt OffsetAPInt(
DL.getIndexTypeSizeInBits(
Ptr->getType()), 0);
379 true, AllowNonInbounds);
387template <
typename AAType,
typename StateType =
typename AAType::StateType,
389 bool RecurseForSelectAndPHI =
true>
391 Attributor &
A,
const AAType &QueryingAA, StateType &S,
393 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp return value states for "
394 << QueryingAA <<
" into " << S <<
"\n");
396 assert((QueryingAA.getIRPosition().getPositionKind() ==
398 QueryingAA.getIRPosition().getPositionKind() ==
400 "Can only clamp returned value states for a function returned or call "
401 "site returned position!");
405 std::optional<StateType>
T;
408 auto CheckReturnValue = [&](
Value &RV) ->
bool {
413 return AA::hasAssumedIRAttr<IRAttributeKind>(
414 A, &QueryingAA, RVPos, DepClassTy::REQUIRED, IsKnown);
418 A.getAAFor<AAType>(QueryingAA, RVPos, DepClassTy::REQUIRED);
422 <<
" AA: " << AA->getAsStr(&
A) <<
" @ " << RVPos <<
"\n");
423 const StateType &AAS = AA->getState();
425 T = StateType::getBestState(AAS);
427 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" RV State: " <<
T
429 return T->isValidState();
432 if (!
A.checkForAllReturnedValues(CheckReturnValue, QueryingAA,
433 AA::ValueScope::Intraprocedural,
434 RecurseForSelectAndPHI))
435 S.indicatePessimisticFixpoint();
442template <
typename AAType,
typename BaseType,
443 typename StateType =
typename BaseType::StateType,
444 bool PropagateCallBaseContext =
false,
446 bool RecurseForSelectAndPHI =
true>
447struct AAReturnedFromReturnedValues :
public BaseType {
453 StateType S(StateType::getBestState(this->getState()));
454 clampReturnedValueStates<AAType, StateType, IRAttributeKind, RecurseForSelectAndPHI>(
456 PropagateCallBaseContext ? this->getCallBaseContext() :
nullptr);
459 return clampStateAndIndicateChange<StateType>(this->getState(), S);
465template <
typename AAType,
typename StateType =
typename AAType::StateType,
467static void clampCallSiteArgumentStates(
Attributor &
A,
const AAType &QueryingAA,
469 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp call site argument states for "
470 << QueryingAA <<
" into " << S <<
"\n");
472 assert(QueryingAA.getIRPosition().getPositionKind() ==
474 "Can only clamp call site argument states for an argument position!");
478 std::optional<StateType>
T;
481 unsigned ArgNo = QueryingAA.getIRPosition().getCallSiteArgNo();
493 return AA::hasAssumedIRAttr<IRAttributeKind>(
494 A, &QueryingAA, ACSArgPos, DepClassTy::REQUIRED, IsKnown);
498 A.getAAFor<AAType>(QueryingAA, ACSArgPos, DepClassTy::REQUIRED);
501 LLVM_DEBUG(
dbgs() <<
"[Attributor] ACS: " << *ACS.getInstruction()
502 <<
" AA: " << AA->getAsStr(&
A) <<
" @" << ACSArgPos
504 const StateType &AAS = AA->getState();
506 T = StateType::getBestState(AAS);
508 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" CSA State: " <<
T
510 return T->isValidState();
513 bool UsedAssumedInformation =
false;
514 if (!
A.checkForAllCallSites(CallSiteCheck, QueryingAA,
true,
515 UsedAssumedInformation))
516 S.indicatePessimisticFixpoint();
523template <
typename AAType,
typename BaseType,
524 typename StateType =
typename AAType::StateType,
526bool getArgumentStateFromCallBaseContext(
Attributor &
A,
530 "Expected an 'argument' position !");
536 assert(ArgNo >= 0 &&
"Invalid Arg No!");
542 return AA::hasAssumedIRAttr<IRAttributeKind>(
543 A, &QueryingAttribute, CBArgPos, DepClassTy::REQUIRED, IsKnown);
547 A.getAAFor<AAType>(QueryingAttribute, CBArgPos, DepClassTy::REQUIRED);
550 const StateType &CBArgumentState =
551 static_cast<const StateType &
>(AA->getState());
553 LLVM_DEBUG(
dbgs() <<
"[Attributor] Briding Call site context to argument"
554 <<
"Position:" << Pos <<
"CB Arg state:" << CBArgumentState
558 State ^= CBArgumentState;
563template <
typename AAType,
typename BaseType,
564 typename StateType =
typename AAType::StateType,
565 bool BridgeCallBaseContext =
false,
567struct AAArgumentFromCallSiteArguments :
public BaseType {
573 StateType S = StateType::getBestState(this->getState());
575 if (BridgeCallBaseContext) {
577 getArgumentStateFromCallBaseContext<AAType,
BaseType, StateType,
579 A, *
this, this->getIRPosition(), S);
581 return clampStateAndIndicateChange<StateType>(this->getState(), S);
583 clampCallSiteArgumentStates<AAType, StateType, IRAttributeKind>(
A, *
this,
588 return clampStateAndIndicateChange<StateType>(this->getState(), S);
593template <
typename AAType,
typename BaseType,
594 typename StateType =
typename BaseType::StateType,
595 bool IntroduceCallBaseContext =
false,
597struct AACalleeToCallSite :
public BaseType {
602 auto IRPKind = this->getIRPosition().getPositionKind();
605 "Can only wrap function returned positions for call site "
606 "returned positions!");
607 auto &S = this->getState();
609 CallBase &CB = cast<CallBase>(this->getAnchorValue());
610 if (IntroduceCallBaseContext)
611 LLVM_DEBUG(
dbgs() <<
"[Attributor] Introducing call base context:" << CB
616 for (
const Function *Callee : Callees) {
620 IntroduceCallBaseContext ? &CB :
nullptr)
622 *
Callee, IntroduceCallBaseContext ? &CB : nullptr);
626 if (!AA::hasAssumedIRAttr<IRAttributeKind>(
627 A,
this, FnPos, DepClassTy::REQUIRED, IsKnown))
633 A.getAAFor<AAType>(*
this, FnPos, DepClassTy::REQUIRED);
637 if (S.isAtFixpoint())
638 return S.isValidState();
642 if (!
A.checkForAllCallees(CalleePred, *
this, CB))
643 return S.indicatePessimisticFixpoint();
649template <
class AAType,
typename StateType =
typename AAType::StateType>
650static void followUsesInContext(AAType &AA,
Attributor &
A,
655 auto EIt = Explorer.
begin(CtxI), EEnd = Explorer.
end(CtxI);
656 for (
unsigned u = 0;
u <
Uses.size(); ++
u) {
658 if (
const Instruction *UserI = dyn_cast<Instruction>(
U->getUser())) {
660 if (Found && AA.followUseInMBEC(
A, U, UserI, State))
661 for (
const Use &Us : UserI->
uses())
676template <
class AAType,
typename StateType =
typename AAType::StateType>
677static void followUsesInMBEC(AAType &AA,
Attributor &
A, StateType &S,
680 A.getInfoCache().getMustBeExecutedContextExplorer();
686 for (
const Use &U : AA.getIRPosition().getAssociatedValue().uses())
689 followUsesInContext<AAType>(AA,
A, *Explorer, &CtxI,
Uses, S);
691 if (S.isAtFixpoint())
696 if (
const BranchInst *Br = dyn_cast<BranchInst>(
I))
697 if (Br->isConditional())
736 StateType ParentState;
740 ParentState.indicateOptimisticFixpoint();
742 for (
const BasicBlock *BB : Br->successors()) {
743 StateType ChildState;
745 size_t BeforeSize =
Uses.size();
746 followUsesInContext(AA,
A, *Explorer, &BB->front(),
Uses, ChildState);
749 for (
auto It =
Uses.begin() + BeforeSize; It !=
Uses.end();)
752 ParentState &= ChildState;
765namespace PointerInfo {
826 R.indicatePessimisticFixpoint();
920 if (!Range.mayOverlap(ItRange))
922 bool IsExact = Range == ItRange && !Range.offsetOrSizeAreUnknown();
923 for (
auto Index : It.getSecond()) {
925 if (!CB(Access, IsExact))
945 for (
unsigned Index : LocalList->getSecond()) {
948 if (Range.offsetAndSizeAreUnknown())
964 RemoteI = RemoteI ? RemoteI : &
I;
968 bool AccExists =
false;
970 for (
auto Index : LocalList) {
972 if (
A.getLocalInst() == &
I) {
981 <<
"[AAPointerInfo] Inserting access in new offset bins\n";);
983 for (
auto Key : ToAdd) {
992 "New Access should have been at AccIndex");
993 LocalList.push_back(AccIndex);
1007 auto &ExistingRanges =
Before.getRanges();
1008 auto &NewRanges = Current.getRanges();
1015 <<
"[AAPointerInfo] Removing access from old offset bins\n";);
1022 "Expected bin to actually contain the Access.");
1023 Bin.erase(AccIndex);
1041 using const_iterator = VecTy::const_iterator;
1044 const_iterator begin()
const {
return Offsets.begin(); }
1045 const_iterator end()
const {
return Offsets.end(); }
1048 return Offsets ==
RHS.Offsets;
1054 bool isUnassigned()
const {
return Offsets.size() == 0; }
1056 bool isUnknown()
const {
1069 void addToAll(int64_t Inc) {
1070 for (
auto &
Offset : Offsets) {
1079 void merge(
const OffsetInfo &R) {
Offsets.append(
R.Offsets); }
1094struct AAPointerInfoImpl
1095 :
public StateWrapper<AA::PointerInfo::State, AAPointerInfo> {
1100 const std::string getAsStr(
Attributor *
A)
const override {
1101 return std::string(
"PointerInfo ") +
1102 (isValidState() ? (std::string(
"#") +
1103 std::to_string(OffsetBins.
size()) +
" bins")
1109 return AAPointerInfo::manifest(
A);
1112 virtual const_bin_iterator
begin()
const override {
return State::begin(); }
1113 virtual const_bin_iterator
end()
const override {
return State::end(); }
1114 virtual int64_t numOffsetBins()
const override {
1115 return State::numOffsetBins();
1118 bool forallInterferingAccesses(
1122 return State::forallInterferingAccesses(Range, CB);
1125 bool forallInterferingAccesses(
1127 bool FindInterferingWrites,
bool FindInterferingReads,
1128 function_ref<
bool(
const Access &,
bool)> UserCB,
bool &HasBeenWrittenTo,
1130 function_ref<
bool(
const Access &)> SkipCB)
const override {
1131 HasBeenWrittenTo =
false;
1138 bool IsAssumedNoSync = AA::hasAssumedIRAttr<Attribute::NoSync>(
1143 bool AllInSameNoSyncFn = IsAssumedNoSync;
1144 bool InstIsExecutedByInitialThreadOnly =
1145 ExecDomainAA && ExecDomainAA->isExecutedByInitialThreadOnly(
I);
1152 bool InstIsExecutedInAlignedRegion =
1153 FindInterferingReads && ExecDomainAA &&
1154 ExecDomainAA->isExecutedInAlignedRegion(
A,
I);
1156 if (InstIsExecutedInAlignedRegion || InstIsExecutedByInitialThreadOnly)
1157 A.recordDependence(*ExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1160 bool IsThreadLocalObj =
1169 auto CanIgnoreThreadingForInst = [&](
const Instruction &
I) ->
bool {
1170 if (IsThreadLocalObj || AllInSameNoSyncFn)
1172 const auto *FnExecDomainAA =
1173 I.getFunction() == &
Scope
1178 if (!FnExecDomainAA)
1180 if (InstIsExecutedInAlignedRegion ||
1181 (FindInterferingWrites &&
1182 FnExecDomainAA->isExecutedInAlignedRegion(
A,
I))) {
1183 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1186 if (InstIsExecutedByInitialThreadOnly &&
1187 FnExecDomainAA->isExecutedByInitialThreadOnly(
I)) {
1188 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1197 auto CanIgnoreThreading = [&](
const Access &Acc) ->
bool {
1198 return CanIgnoreThreadingForInst(*Acc.getRemoteInst()) ||
1199 (Acc.getRemoteInst() != Acc.getLocalInst() &&
1200 CanIgnoreThreadingForInst(*Acc.getLocalInst()));
1204 bool IsKnownNoRecurse;
1205 AA::hasAssumedIRAttr<Attribute::NoRecurse>(
1212 bool InstInKernel =
Scope.hasFnAttribute(
"kernel");
1213 bool ObjHasKernelLifetime =
false;
1214 const bool UseDominanceReasoning =
1215 FindInterferingWrites && IsKnownNoRecurse;
1226 case AA::GPUAddressSpace::Shared:
1227 case AA::GPUAddressSpace::Constant:
1228 case AA::GPUAddressSpace::Local:
1240 std::function<
bool(
const Function &)> IsLiveInCalleeCB;
1242 if (
auto *AI = dyn_cast<AllocaInst>(&getAssociatedValue())) {
1247 bool IsKnownNoRecurse;
1248 if (AA::hasAssumedIRAttr<Attribute::NoRecurse>(
1250 IsKnownNoRecurse)) {
1251 IsLiveInCalleeCB = [AIFn](
const Function &Fn) {
return AIFn != &Fn; };
1253 }
else if (
auto *GV = dyn_cast<GlobalValue>(&getAssociatedValue())) {
1256 ObjHasKernelLifetime = HasKernelLifetime(GV, *GV->getParent());
1257 if (ObjHasKernelLifetime)
1258 IsLiveInCalleeCB = [](
const Function &Fn) {
1259 return !Fn.hasFnAttribute(
"kernel");
1267 auto AccessCB = [&](
const Access &Acc,
bool Exact) {
1269 bool AccInSameScope = AccScope == &
Scope;
1273 if (InstInKernel && ObjHasKernelLifetime && !AccInSameScope &&
1277 if (Exact && Acc.isMustAccess() && Acc.getRemoteInst() != &
I) {
1278 if (Acc.isWrite() || (isa<LoadInst>(
I) && Acc.isWriteOrAssumption()))
1279 ExclusionSet.
insert(Acc.getRemoteInst());
1282 if ((!FindInterferingWrites || !Acc.isWriteOrAssumption()) &&
1283 (!FindInterferingReads || !Acc.isRead()))
1286 bool Dominates = FindInterferingWrites && DT && Exact &&
1287 Acc.isMustAccess() && AccInSameScope &&
1290 DominatingWrites.
insert(&Acc);
1294 AllInSameNoSyncFn &= Acc.getRemoteInst()->getFunction() == &
Scope;
1296 InterferingAccesses.
push_back({&Acc, Exact});
1299 if (!State::forallInterferingAccesses(
I, AccessCB, Range))
1302 HasBeenWrittenTo = !DominatingWrites.
empty();
1306 for (
const Access *Acc : DominatingWrites) {
1307 if (!LeastDominatingWriteInst) {
1308 LeastDominatingWriteInst = Acc->getRemoteInst();
1309 }
else if (DT->
dominates(LeastDominatingWriteInst,
1310 Acc->getRemoteInst())) {
1311 LeastDominatingWriteInst = Acc->getRemoteInst();
1316 auto CanSkipAccess = [&](
const Access &Acc,
bool Exact) {
1317 if (SkipCB && SkipCB(Acc))
1319 if (!CanIgnoreThreading(Acc))
1325 bool ReadChecked = !FindInterferingReads;
1326 bool WriteChecked = !FindInterferingWrites;
1332 &ExclusionSet, IsLiveInCalleeCB))
1337 if (!WriteChecked) {
1339 &ExclusionSet, IsLiveInCalleeCB))
1340 WriteChecked =
true;
1354 if (!WriteChecked && HasBeenWrittenTo &&
1355 Acc.getRemoteInst()->getFunction() != &
Scope) {
1365 if (!FnReachabilityAA ||
1366 !FnReachabilityAA->instructionCanReach(
1367 A, *LeastDominatingWriteInst,
1368 *Acc.getRemoteInst()->getFunction(), &ExclusionSet))
1369 WriteChecked =
true;
1375 if (ReadChecked && WriteChecked)
1378 if (!DT || !UseDominanceReasoning)
1380 if (!DominatingWrites.count(&Acc))
1382 return LeastDominatingWriteInst != Acc.getRemoteInst();
1387 for (
auto &It : InterferingAccesses) {
1388 if ((!AllInSameNoSyncFn && !IsThreadLocalObj && !ExecDomainAA) ||
1389 !CanSkipAccess(*It.first, It.second)) {
1390 if (!UserCB(*It.first, It.second))
1400 using namespace AA::PointerInfo;
1402 return indicatePessimisticFixpoint();
1404 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1405 bool IsByval = OtherAAImpl.getAssociatedArgument()->hasByValAttr();
1409 const auto &State = OtherAAImpl.getState();
1410 for (
const auto &It : State) {
1411 for (
auto Index : It.getSecond()) {
1412 const auto &RAcc = State.getAccess(
Index);
1413 if (IsByval && !RAcc.isRead())
1415 bool UsedAssumedInformation =
false;
1417 auto Content =
A.translateArgumentToCallSiteContent(
1418 RAcc.getContent(), CB, *
this, UsedAssumedInformation);
1419 AK =
AccessKind(AK & (IsByval ? AccessKind::AK_R : AccessKind::AK_RW));
1420 AK =
AccessKind(AK | (RAcc.isMayAccess() ? AK_MAY : AK_MUST));
1422 Changed |= addAccess(
A, RAcc.getRanges(), CB,
Content, AK,
1423 RAcc.getType(), RAcc.getRemoteInst());
1430 const OffsetInfo &Offsets,
CallBase &CB) {
1431 using namespace AA::PointerInfo;
1433 return indicatePessimisticFixpoint();
1435 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1439 const auto &State = OtherAAImpl.getState();
1440 for (
const auto &It : State) {
1441 for (
auto Index : It.getSecond()) {
1442 const auto &RAcc = State.getAccess(
Index);
1443 for (
auto Offset : Offsets) {
1447 if (!NewRanges.isUnknown()) {
1448 NewRanges.addToAllOffsets(
Offset);
1451 addAccess(
A, NewRanges, CB, RAcc.getContent(), RAcc.getKind(),
1452 RAcc.getType(), RAcc.getRemoteInst());
1461 void trackPointerInfoStatistics(
const IRPosition &IRP)
const {}
1465 for (
auto &It : OffsetBins) {
1466 O <<
"[" << It.first.Offset <<
"-" << It.first.Offset + It.first.Size
1467 <<
"] : " << It.getSecond().size() <<
"\n";
1468 for (
auto AccIndex : It.getSecond()) {
1469 auto &Acc = AccessList[AccIndex];
1470 O <<
" - " << Acc.getKind() <<
" - " << *Acc.getLocalInst() <<
"\n";
1471 if (Acc.getLocalInst() != Acc.getRemoteInst())
1472 O <<
" --> " << *Acc.getRemoteInst()
1474 if (!Acc.isWrittenValueYetUndetermined()) {
1475 if (isa_and_nonnull<Function>(Acc.getWrittenValue()))
1476 O <<
" - c: func " << Acc.getWrittenValue()->getName()
1478 else if (Acc.getWrittenValue())
1479 O <<
" - c: " << *Acc.getWrittenValue() <<
"\n";
1481 O <<
" - c: <unknown>\n";
1488struct AAPointerInfoFloating :
public AAPointerInfoImpl {
1491 : AAPointerInfoImpl(IRP,
A) {}
1498 using namespace AA::PointerInfo;
1501 TypeSize AccessSize =
DL.getTypeStoreSize(&Ty);
1511 if (!VT || VT->getElementCount().isScalable() ||
1513 (*Content)->getType() != VT ||
1514 DL.getTypeStoreSize(VT->getElementType()).isScalable()) {
1523 Type *ElementType = VT->getElementType();
1524 int64_t ElementSize =
DL.getTypeStoreSize(ElementType).getFixedValue();
1525 auto *ConstContent = cast<Constant>(*
Content);
1529 for (
int i = 0, e = VT->getElementCount().getFixedValue(); i != e; ++i) {
1534 Changed = Changed | addAccess(
A, {ElementOffsets, ElementSize},
I,
1535 ElementContent,
Kind, ElementType);
1538 for (
auto &ElementOffset : ElementOffsets)
1539 ElementOffset += ElementSize;
1553 OffsetInfo &UsrOI,
const OffsetInfo &PtrOI,
1557 void trackStatistics()
const override {
1558 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1562bool AAPointerInfoFloating::collectConstantsForGEP(
Attributor &
A,
1565 const OffsetInfo &PtrOI,
1567 unsigned BitWidth =
DL.getIndexTypeSizeInBits(
GEP->getType());
1571 assert(!UsrOI.isUnknown() && !PtrOI.isUnknown() &&
1572 "Don't look for constant values if the offset has already been "
1573 "determined to be unknown.");
1575 if (!
GEP->collectOffset(
DL,
BitWidth, VariableOffsets, ConstantOffset)) {
1581 << (VariableOffsets.
empty() ?
"" :
"not") <<
" constant "
1585 Union.addToAll(ConstantOffset.getSExtValue());
1590 for (
const auto &VI : VariableOffsets) {
1593 if (!PotentialConstantsAA || !PotentialConstantsAA->isValidState()) {
1599 if (PotentialConstantsAA->undefIsContained())
1607 if (AssumedSet.empty())
1611 for (
const auto &ConstOffset : AssumedSet) {
1612 auto CopyPerOffset =
Union;
1613 CopyPerOffset.addToAll(ConstOffset.getSExtValue() *
1614 VI.second.getZExtValue());
1615 Product.merge(CopyPerOffset);
1620 UsrOI = std::move(Union);
1625 using namespace AA::PointerInfo;
1628 Value &AssociatedValue = getAssociatedValue();
1631 OffsetInfoMap[&AssociatedValue].
insert(0);
1633 auto HandlePassthroughUser = [&](
Value *Usr,
Value *CurPtr,
bool &Follow) {
1642 auto &UsrOI = OffsetInfoMap[Usr];
1643 auto &PtrOI = OffsetInfoMap[CurPtr];
1644 assert(!PtrOI.isUnassigned() &&
1645 "Cannot pass through if the input Ptr was not visited!");
1651 const auto *
F = getAnchorScope();
1656 F ?
A.getInfoCache().getTargetLibraryInfoForFunction(*
F) :
nullptr;
1658 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
1660 User *Usr =
U.getUser();
1661 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Analyze " << *CurPtr <<
" in " << *Usr
1664 "The current pointer offset should have been seeded!");
1668 return HandlePassthroughUser(Usr, CurPtr, Follow);
1669 if (
CE->isCompare())
1671 if (!isa<GEPOperator>(CE)) {
1672 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled constant user " << *CE
1677 if (
auto *
GEP = dyn_cast<GEPOperator>(Usr)) {
1680 auto &UsrOI = OffsetInfoMap[Usr];
1681 auto &PtrOI = OffsetInfoMap[CurPtr];
1683 if (UsrOI.isUnknown())
1686 if (PtrOI.isUnknown()) {
1692 Follow = collectConstantsForGEP(
A,
DL, UsrOI, PtrOI,
GEP);
1695 if (isa<PtrToIntInst>(Usr))
1697 if (isa<CastInst>(Usr) || isa<SelectInst>(Usr) || isa<ReturnInst>(Usr))
1698 return HandlePassthroughUser(Usr, CurPtr, Follow);
1703 if (isa<PHINode>(Usr)) {
1706 bool IsFirstPHIUser = !OffsetInfoMap.
count(Usr);
1707 auto &UsrOI = OffsetInfoMap[Usr];
1708 auto &PtrOI = OffsetInfoMap[CurPtr];
1712 if (PtrOI.isUnknown()) {
1713 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand offset unknown "
1714 << *CurPtr <<
" in " << *Usr <<
"\n");
1715 Follow = !UsrOI.isUnknown();
1721 if (UsrOI == PtrOI) {
1722 assert(!PtrOI.isUnassigned() &&
1723 "Cannot assign if the current Ptr was not visited!");
1724 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant (so far)");
1734 auto It = OffsetInfoMap.
find(CurPtrBase);
1735 if (It == OffsetInfoMap.
end()) {
1736 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand is too complex "
1737 << *CurPtr <<
" in " << *Usr <<
"\n");
1751 auto BaseOI = It->getSecond();
1752 BaseOI.addToAll(
Offset.getZExtValue());
1753 if (IsFirstPHIUser || BaseOI == UsrOI) {
1754 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant " << *CurPtr
1755 <<
" in " << *Usr <<
"\n");
1756 return HandlePassthroughUser(Usr, CurPtr, Follow);
1760 dbgs() <<
"[AAPointerInfo] PHI operand pointer offset mismatch "
1761 << *CurPtr <<
" in " << *Usr <<
"\n");
1772 if (
auto *LoadI = dyn_cast<LoadInst>(Usr)) {
1780 if (!handleAccess(
A, *LoadI,
nullptr, AK,
1781 OffsetInfoMap[CurPtr].Offsets, Changed,
1786 if (
auto *II = dyn_cast<IntrinsicInst>(&
I))
1787 return II->isAssumeLikeIntrinsic();
1798 }
while (FromI && FromI != ToI);
1804 if (IntrI.getIntrinsicID() != Intrinsic::assume)
1807 if (IntrI.getParent() == BB) {
1808 if (IsImpactedInRange(LoadI->getNextNonDebugInstruction(), &IntrI))
1814 if ((*PredIt) != BB)
1819 if (SuccBB == IntrBB)
1821 if (isa<UnreachableInst>(SuccBB->getTerminator()))
1825 if (IsImpactedInRange(LoadI->getNextNonDebugInstruction(),
1828 if (IsImpactedInRange(&IntrBB->
front(), &IntrI))
1834 std::pair<Value *, IntrinsicInst *> Assumption;
1835 for (
const Use &LoadU : LoadI->
uses()) {
1836 if (
auto *CmpI = dyn_cast<CmpInst>(LoadU.getUser())) {
1837 if (!CmpI->isEquality() || !CmpI->isTrueWhenEqual())
1839 for (
const Use &CmpU : CmpI->
uses()) {
1840 if (
auto *IntrI = dyn_cast<IntrinsicInst>(CmpU.getUser())) {
1841 if (!IsValidAssume(*IntrI))
1843 int Idx = CmpI->getOperandUse(0) == LoadU;
1844 Assumption = {CmpI->getOperand(
Idx), IntrI};
1849 if (Assumption.first)
1854 if (!Assumption.first || !Assumption.second)
1858 << *Assumption.second <<
": " << *LoadI
1859 <<
" == " << *Assumption.first <<
"\n");
1860 bool UsedAssumedInformation =
false;
1861 std::optional<Value *>
Content =
nullptr;
1862 if (Assumption.first)
1864 A.getAssumedSimplified(*Assumption.first, *
this,
1866 return handleAccess(
1867 A, *Assumption.second,
Content, AccessKind::AK_ASSUMPTION,
1868 OffsetInfoMap[CurPtr].Offsets, Changed, *LoadI->getType());
1873 for (
auto *OtherOp : OtherOps) {
1874 if (OtherOp == CurPtr) {
1877 <<
"[AAPointerInfo] Escaping use in store like instruction " <<
I
1889 bool UsedAssumedInformation =
false;
1890 std::optional<Value *>
Content =
nullptr;
1894 return handleAccess(
A,
I,
Content, AK, OffsetInfoMap[CurPtr].Offsets,
1898 if (
auto *StoreI = dyn_cast<StoreInst>(Usr))
1899 return HandleStoreLike(*StoreI, StoreI->getValueOperand(),
1900 *StoreI->getValueOperand()->getType(),
1901 {StoreI->getValueOperand()}, AccessKind::AK_W);
1902 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(Usr))
1903 return HandleStoreLike(*RMWI,
nullptr, *RMWI->getValOperand()->getType(),
1904 {RMWI->getValOperand()}, AccessKind::AK_RW);
1905 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(Usr))
1906 return HandleStoreLike(
1907 *CXI,
nullptr, *CXI->getNewValOperand()->getType(),
1908 {CXI->getCompareOperand(), CXI->getNewValOperand()},
1911 if (
auto *CB = dyn_cast<CallBase>(Usr)) {
1924 translateAndAddState(
A, *CSArgPI, OffsetInfoMap[CurPtr], *CB) |
1926 return isValidState();
1928 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Call user not handled " << *CB
1934 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] User not handled " << *Usr <<
"\n");
1937 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
1938 assert(OffsetInfoMap.
count(OldU) &&
"Old use should be known already!");
1939 if (OffsetInfoMap.
count(NewU)) {
1941 if (!(OffsetInfoMap[NewU] == OffsetInfoMap[OldU])) {
1942 dbgs() <<
"[AAPointerInfo] Equivalent use callback failed: "
1943 << OffsetInfoMap[NewU] <<
" vs " << OffsetInfoMap[OldU]
1947 return OffsetInfoMap[NewU] == OffsetInfoMap[OldU];
1949 OffsetInfoMap[NewU] = OffsetInfoMap[OldU];
1952 if (!
A.checkForAllUses(UsePred, *
this, AssociatedValue,
1954 true, EquivalentUseCB)) {
1955 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Check for all uses failed, abort!\n");
1956 return indicatePessimisticFixpoint();
1960 dbgs() <<
"Accesses by bin after update:\n";
1967struct AAPointerInfoReturned final : AAPointerInfoImpl {
1969 : AAPointerInfoImpl(IRP,
A) {}
1973 return indicatePessimisticFixpoint();
1977 void trackStatistics()
const override {
1978 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1982struct AAPointerInfoArgument final : AAPointerInfoFloating {
1984 : AAPointerInfoFloating(IRP,
A) {}
1987 void trackStatistics()
const override {
1988 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1992struct AAPointerInfoCallSiteArgument final : AAPointerInfoFloating {
1994 : AAPointerInfoFloating(IRP,
A) {}
1998 using namespace AA::PointerInfo;
2002 if (
auto *
MI = dyn_cast_or_null<MemIntrinsic>(getCtxI())) {
2006 LengthVal =
Length->getSExtValue();
2007 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
2010 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled memory intrinsic "
2012 return indicatePessimisticFixpoint();
2015 ArgNo == 0 ? AccessKind::AK_MUST_WRITE : AccessKind::AK_MUST_READ;
2017 Changed | addAccess(
A, {0, LengthVal}, *
MI,
nullptr,
Kind,
nullptr);
2020 dbgs() <<
"Accesses by bin after update:\n";
2031 Argument *Arg = getAssociatedArgument();
2036 if (ArgAA && ArgAA->getState().isValidState())
2037 return translateAndAddStateFromCallee(
A, *ArgAA,
2038 *cast<CallBase>(getCtxI()));
2040 return indicatePessimisticFixpoint();
2043 bool IsKnownNoCapture;
2044 if (!AA::hasAssumedIRAttr<Attribute::NoCapture>(
2045 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNoCapture))
2046 return indicatePessimisticFixpoint();
2048 bool IsKnown =
false;
2050 return ChangeStatus::UNCHANGED;
2053 ReadOnly ? AccessKind::AK_MAY_READ : AccessKind::AK_MAY_READ_WRITE;
2059 void trackStatistics()
const override {
2060 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2064struct AAPointerInfoCallSiteReturned final : AAPointerInfoFloating {
2066 : AAPointerInfoFloating(IRP,
A) {}
2069 void trackStatistics()
const override {
2070 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2084 assert(!AA::hasAssumedIRAttr<Attribute::NoUnwind>(
2085 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2089 const std::string getAsStr(
Attributor *
A)
const override {
2090 return getAssumed() ?
"nounwind" :
"may-unwind";
2096 (
unsigned)Instruction::Invoke, (
unsigned)Instruction::CallBr,
2097 (
unsigned)Instruction::Call, (
unsigned)Instruction::CleanupRet,
2098 (
unsigned)Instruction::CatchSwitch, (
unsigned)Instruction::Resume};
2101 if (!
I.mayThrow(
true))
2104 if (
const auto *CB = dyn_cast<CallBase>(&
I)) {
2105 bool IsKnownNoUnwind;
2106 return AA::hasAssumedIRAttr<Attribute::NoUnwind>(
2113 bool UsedAssumedInformation =
false;
2114 if (!
A.checkForAllInstructions(CheckForNoUnwind, *
this, Opcodes,
2115 UsedAssumedInformation))
2116 return indicatePessimisticFixpoint();
2118 return ChangeStatus::UNCHANGED;
2122struct AANoUnwindFunction final :
public AANoUnwindImpl {
2124 : AANoUnwindImpl(IRP,
A) {}
2131struct AANoUnwindCallSite final
2132 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl> {
2134 : AACalleeToCallSite<
AANoUnwind, AANoUnwindImpl>(IRP,
A) {}
2145 case Intrinsic::nvvm_barrier0:
2146 case Intrinsic::nvvm_barrier0_and:
2147 case Intrinsic::nvvm_barrier0_or:
2148 case Intrinsic::nvvm_barrier0_popc:
2150 case Intrinsic::amdgcn_s_barrier:
2151 if (ExecutedAligned)
2164 if (
auto *FI = dyn_cast<FenceInst>(
I))
2167 if (
auto *AI = dyn_cast<AtomicCmpXchgInst>(
I)) {
2174 switch (
I->getOpcode()) {
2175 case Instruction::AtomicRMW:
2176 Ordering = cast<AtomicRMWInst>(
I)->getOrdering();
2178 case Instruction::Store:
2179 Ordering = cast<StoreInst>(
I)->getOrdering();
2181 case Instruction::Load:
2182 Ordering = cast<LoadInst>(
I)->getOrdering();
2186 "New atomic operations need to be known in the attributor.");
2197 if (
auto *
MI = dyn_cast<MemIntrinsic>(
I))
2198 return !
MI->isVolatile();
2209 assert(!AA::hasAssumedIRAttr<Attribute::NoSync>(
A,
nullptr, getIRPosition(),
2210 DepClassTy::NONE, IsKnown));
2214 const std::string getAsStr(
Attributor *
A)
const override {
2215 return getAssumed() ?
"nosync" :
"may-sync";
2231 if (
I.mayReadOrWriteMemory())
2236 if (AA::hasAssumedIRAttr<Attribute::NoSync>(
2245 bool UsedAssumedInformation =
false;
2246 if (!
A.checkForAllReadWriteInstructions(CheckRWInstForNoSync, *
this,
2247 UsedAssumedInformation) ||
2248 !
A.checkForAllCallLikeInstructions(CheckForNoSync, *
this,
2249 UsedAssumedInformation))
2250 return indicatePessimisticFixpoint();
2255struct AANoSyncFunction final :
public AANoSyncImpl {
2257 : AANoSyncImpl(IRP,
A) {}
2264struct AANoSyncCallSite final : AACalleeToCallSite<AANoSync, AANoSyncImpl> {
2266 : AACalleeToCallSite<
AANoSync, AANoSyncImpl>(IRP,
A) {}
2276struct AANoFreeImpl :
public AANoFree {
2282 assert(!AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
nullptr, getIRPosition(),
2283 DepClassTy::NONE, IsKnown));
2291 return AA::hasAssumedIRAttr<Attribute::NoFree>(
2293 DepClassTy::REQUIRED, IsKnown);
2296 bool UsedAssumedInformation =
false;
2297 if (!
A.checkForAllCallLikeInstructions(CheckForNoFree, *
this,
2298 UsedAssumedInformation))
2299 return indicatePessimisticFixpoint();
2300 return ChangeStatus::UNCHANGED;
2304 const std::string getAsStr(
Attributor *
A)
const override {
2305 return getAssumed() ?
"nofree" :
"may-free";
2309struct AANoFreeFunction final :
public AANoFreeImpl {
2311 : AANoFreeImpl(IRP,
A) {}
2318struct AANoFreeCallSite final : AACalleeToCallSite<AANoFree, AANoFreeImpl> {
2320 : AACalleeToCallSite<
AANoFree, AANoFreeImpl>(IRP,
A) {}
2327struct AANoFreeFloating : AANoFreeImpl {
2329 : AANoFreeImpl(IRP,
A) {}
2339 if (AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
this,
2341 DepClassTy::OPTIONAL, IsKnown))
2342 return ChangeStatus::UNCHANGED;
2344 Value &AssociatedValue = getIRPosition().getAssociatedValue();
2345 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
2347 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
2355 return AA::hasAssumedIRAttr<Attribute::NoFree>(
2357 DepClassTy::REQUIRED, IsKnown);
2360 if (isa<GetElementPtrInst>(UserI) || isa<BitCastInst>(UserI) ||
2361 isa<PHINode>(UserI) || isa<SelectInst>(UserI)) {
2365 if (isa<StoreInst>(UserI) || isa<LoadInst>(UserI) ||
2366 isa<ReturnInst>(UserI))
2372 if (!
A.checkForAllUses(Pred, *
this, AssociatedValue))
2373 return indicatePessimisticFixpoint();
2375 return ChangeStatus::UNCHANGED;
2380struct AANoFreeArgument final : AANoFreeFloating {
2382 : AANoFreeFloating(IRP,
A) {}
2389struct AANoFreeCallSiteArgument final : AANoFreeFloating {
2391 : AANoFreeFloating(IRP,
A) {}
2399 Argument *Arg = getAssociatedArgument();
2401 return indicatePessimisticFixpoint();
2404 if (AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
this, ArgPos,
2405 DepClassTy::REQUIRED, IsKnown))
2406 return ChangeStatus::UNCHANGED;
2407 return indicatePessimisticFixpoint();
2415struct AANoFreeReturned final : AANoFreeFloating {
2417 : AANoFreeFloating(IRP,
A) {
2432 void trackStatistics()
const override {}
2436struct AANoFreeCallSiteReturned final : AANoFreeFloating {
2438 : AANoFreeFloating(IRP,
A) {}
2441 return ChangeStatus::UNCHANGED;
2452 bool IgnoreSubsumingPositions) {
2454 AttrKinds.
push_back(Attribute::NonNull);
2457 AttrKinds.
push_back(Attribute::Dereferenceable);
2458 if (
A.hasAttr(IRP, AttrKinds, IgnoreSubsumingPositions, Attribute::NonNull))
2465 if (!Fn->isDeclaration()) {
2475 bool UsedAssumedInformation =
false;
2476 if (!
A.checkForAllInstructions(
2478 Worklist.push_back({*cast<ReturnInst>(I).getReturnValue(), &I});
2482 UsedAssumedInformation))
2493 Attribute::NonNull)});
2498static int64_t getKnownNonNullAndDerefBytesForUse(
2500 const Use *U,
const Instruction *
I,
bool &IsNonNull,
bool &TrackUse) {
2503 const Value *UseV =
U->get();
2510 if (isa<CastInst>(
I)) {
2515 if (isa<GetElementPtrInst>(
I)) {
2525 if (
const auto *CB = dyn_cast<CallBase>(
I)) {
2528 U, {Attribute::NonNull, Attribute::Dereferenceable})) {
2545 bool IsKnownNonNull;
2546 AA::hasAssumedIRAttr<Attribute::NonNull>(
A, &QueryingAA, IRP,
2548 IsNonNull |= IsKnownNonNull;
2555 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
2556 Loc->Size.isScalable() ||
I->isVolatile())
2562 if (
Base &&
Base == &AssociatedValue) {
2563 int64_t DerefBytes = Loc->Size.getValue() +
Offset;
2565 return std::max(int64_t(0), DerefBytes);
2572 int64_t DerefBytes = Loc->Size.getValue();
2574 return std::max(int64_t(0), DerefBytes);
2585 Value &
V = *getAssociatedValue().stripPointerCasts();
2586 if (isa<ConstantPointerNull>(V)) {
2587 indicatePessimisticFixpoint();
2592 followUsesInMBEC(*
this,
A, getState(), *CtxI);
2598 bool IsNonNull =
false;
2599 bool TrackUse =
false;
2600 getKnownNonNullAndDerefBytesForUse(
A, *
this, getAssociatedValue(), U,
I,
2601 IsNonNull, TrackUse);
2602 State.setKnown(IsNonNull);
2607 const std::string getAsStr(
Attributor *
A)
const override {
2608 return getAssumed() ?
"nonnull" :
"may-null";
2613struct AANonNullFloating :
public AANonNullImpl {
2615 : AANonNullImpl(IRP,
A) {}
2620 bool IsKnownNonNull;
2621 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2622 A, *
this, IRP, DepClassTy::OPTIONAL, IsKnownNonNull);
2626 bool UsedAssumedInformation =
false;
2627 Value *AssociatedValue = &getAssociatedValue();
2629 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
2634 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
2638 if (
auto *
PHI = dyn_cast<PHINode>(AssociatedValue))
2640 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2641 A, this, IRPosition::value(*Op), DepClassTy::OPTIONAL,
2644 return ChangeStatus::UNCHANGED;
2645 if (
auto *
Select = dyn_cast<SelectInst>(AssociatedValue))
2646 if (AA::hasAssumedIRAttr<Attribute::NonNull>(
2648 DepClassTy::OPTIONAL, IsKnown) &&
2649 AA::hasAssumedIRAttr<Attribute::NonNull>(
2651 DepClassTy::OPTIONAL, IsKnown))
2652 return ChangeStatus::UNCHANGED;
2659 if (AVIRP == getIRPosition() || !CheckIRP(AVIRP))
2660 return indicatePessimisticFixpoint();
2661 return ChangeStatus::UNCHANGED;
2664 for (
const auto &VAC : Values)
2666 return indicatePessimisticFixpoint();
2668 return ChangeStatus::UNCHANGED;
2676struct AANonNullReturned final
2677 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2678 false, AANonNull::IRAttributeKind, false> {
2685 const std::string getAsStr(
Attributor *
A)
const override {
2686 return getAssumed() ?
"nonnull" :
"may-null";
2694struct AANonNullArgument final
2695 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl> {
2697 : AAArgumentFromCallSiteArguments<
AANonNull, AANonNullImpl>(IRP,
A) {}
2703struct AANonNullCallSiteArgument final : AANonNullFloating {
2705 : AANonNullFloating(IRP,
A) {}
2712struct AANonNullCallSiteReturned final
2713 : AACalleeToCallSite<AANonNull, AANonNullImpl> {
2715 : AACalleeToCallSite<
AANonNull, AANonNullImpl>(IRP,
A) {}
2731 assert(!AA::hasAssumedIRAttr<Attribute::MustProgress>(
2732 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2737 const std::string getAsStr(
Attributor *
A)
const override {
2738 return getAssumed() ?
"mustprogress" :
"may-not-progress";
2742struct AAMustProgressFunction final : AAMustProgressImpl {
2744 : AAMustProgressImpl(IRP,
A) {}
2749 if (AA::hasAssumedIRAttr<Attribute::WillReturn>(
2750 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnown)) {
2752 return indicateOptimisticFixpoint();
2753 return ChangeStatus::UNCHANGED;
2758 bool IsKnownMustProgress;
2759 return AA::hasAssumedIRAttr<Attribute::MustProgress>(
2760 A,
this, IPos, DepClassTy::REQUIRED, IsKnownMustProgress,
2764 bool AllCallSitesKnown =
true;
2765 if (!
A.checkForAllCallSites(CheckForMustProgress, *
this,
2768 return indicatePessimisticFixpoint();
2770 return ChangeStatus::UNCHANGED;
2774 void trackStatistics()
const override {
2780struct AAMustProgressCallSite final : AAMustProgressImpl {
2782 : AAMustProgressImpl(IRP,
A) {}
2791 bool IsKnownMustProgress;
2792 if (!AA::hasAssumedIRAttr<Attribute::MustProgress>(
2793 A,
this, FnPos, DepClassTy::REQUIRED, IsKnownMustProgress))
2794 return indicatePessimisticFixpoint();
2795 return ChangeStatus::UNCHANGED;
2799 void trackStatistics()
const override {
2814 assert(!AA::hasAssumedIRAttr<Attribute::NoRecurse>(
2815 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2820 const std::string getAsStr(
Attributor *
A)
const override {
2821 return getAssumed() ?
"norecurse" :
"may-recurse";
2825struct AANoRecurseFunction final : AANoRecurseImpl {
2827 : AANoRecurseImpl(IRP,
A) {}
2834 bool IsKnownNoRecurse;
2835 if (!AA::hasAssumedIRAttr<Attribute::NoRecurse>(
2838 DepClassTy::NONE, IsKnownNoRecurse))
2840 return IsKnownNoRecurse;
2842 bool UsedAssumedInformation =
false;
2843 if (
A.checkForAllCallSites(CallSitePred, *
this,
true,
2844 UsedAssumedInformation)) {
2850 if (!UsedAssumedInformation)
2851 indicateOptimisticFixpoint();
2852 return ChangeStatus::UNCHANGED;
2857 DepClassTy::REQUIRED);
2858 if (EdgeReachability && EdgeReachability->
canReach(
A, *getAnchorScope()))
2859 return indicatePessimisticFixpoint();
2860 return ChangeStatus::UNCHANGED;
2867struct AANoRecurseCallSite final
2868 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl> {
2870 : AACalleeToCallSite<
AANoRecurse, AANoRecurseImpl>(IRP,
A) {}
2885 const std::string getAsStr(
Attributor *
A)
const override {
2886 return getAssumed() ?
"non-convergent" :
"may-be-convergent";
2890struct AANonConvergentFunction final : AANonConvergentImpl {
2892 : AANonConvergentImpl(IRP,
A) {}
2898 auto CalleeIsNotConvergent = [&](
Instruction &Inst) {
2899 CallBase &CB = cast<CallBase>(Inst);
2901 if (!Callee ||
Callee->isIntrinsic()) {
2904 if (
Callee->isDeclaration()) {
2905 return !
Callee->hasFnAttribute(Attribute::Convergent);
2912 bool UsedAssumedInformation =
false;
2913 if (!
A.checkForAllCallLikeInstructions(CalleeIsNotConvergent, *
this,
2914 UsedAssumedInformation)) {
2915 return indicatePessimisticFixpoint();
2917 return ChangeStatus::UNCHANGED;
2921 if (isKnownNotConvergent() &&
2922 A.hasAttr(getIRPosition(), Attribute::Convergent)) {
2923 A.removeAttrs(getIRPosition(), {Attribute::Convergent});
2924 return ChangeStatus::CHANGED;
2926 return ChangeStatus::UNCHANGED;
2943 const size_t UBPrevSize = KnownUBInsts.size();
2944 const size_t NoUBPrevSize = AssumedNoUBInsts.size();
2948 if (
I.isVolatile() &&
I.mayWriteToMemory())
2952 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2961 "Expected pointer operand of memory accessing instruction");
2965 std::optional<Value *> SimplifiedPtrOp =
2966 stopOnUndefOrAssumed(
A, PtrOp, &
I);
2967 if (!SimplifiedPtrOp || !*SimplifiedPtrOp)
2969 const Value *PtrOpVal = *SimplifiedPtrOp;
2974 if (!isa<ConstantPointerNull>(PtrOpVal)) {
2975 AssumedNoUBInsts.insert(&
I);
2987 AssumedNoUBInsts.insert(&
I);
2989 KnownUBInsts.insert(&
I);
2998 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3002 auto *BrInst = cast<BranchInst>(&
I);
3005 if (BrInst->isUnconditional())
3010 std::optional<Value *> SimplifiedCond =
3011 stopOnUndefOrAssumed(
A, BrInst->getCondition(), BrInst);
3012 if (!SimplifiedCond || !*SimplifiedCond)
3014 AssumedNoUBInsts.insert(&
I);
3022 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3031 for (
unsigned idx = 0; idx < CB.
arg_size(); idx++) {
3037 if (idx >=
Callee->arg_size())
3049 bool IsKnownNoUndef;
3050 AA::hasAssumedIRAttr<Attribute::NoUndef>(
3051 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNoUndef);
3052 if (!IsKnownNoUndef)
3054 bool UsedAssumedInformation =
false;
3055 std::optional<Value *> SimplifiedVal =
3058 if (UsedAssumedInformation)
3060 if (SimplifiedVal && !*SimplifiedVal)
3062 if (!SimplifiedVal || isa<UndefValue>(**SimplifiedVal)) {
3063 KnownUBInsts.insert(&
I);
3067 !isa<ConstantPointerNull>(**SimplifiedVal))
3069 bool IsKnownNonNull;
3070 AA::hasAssumedIRAttr<Attribute::NonNull>(
3071 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNonNull);
3073 KnownUBInsts.insert(&
I);
3079 auto &RI = cast<ReturnInst>(
I);
3082 std::optional<Value *> SimplifiedRetValue =
3083 stopOnUndefOrAssumed(
A, RI.getReturnValue(), &
I);
3084 if (!SimplifiedRetValue || !*SimplifiedRetValue)
3101 if (isa<ConstantPointerNull>(*SimplifiedRetValue)) {
3102 bool IsKnownNonNull;
3103 AA::hasAssumedIRAttr<Attribute::NonNull>(
3107 KnownUBInsts.insert(&
I);
3113 bool UsedAssumedInformation =
false;
3114 A.checkForAllInstructions(InspectMemAccessInstForUB, *
this,
3115 {Instruction::Load, Instruction::Store,
3116 Instruction::AtomicCmpXchg,
3117 Instruction::AtomicRMW},
3118 UsedAssumedInformation,
3120 A.checkForAllInstructions(InspectBrInstForUB, *
this, {Instruction::Br},
3121 UsedAssumedInformation,
3123 A.checkForAllCallLikeInstructions(InspectCallSiteForUB, *
this,
3124 UsedAssumedInformation);
3128 if (!getAnchorScope()->getReturnType()->isVoidTy()) {
3130 if (!
A.isAssumedDead(ReturnIRP,
this,
nullptr, UsedAssumedInformation)) {
3131 bool IsKnownNoUndef;
3132 AA::hasAssumedIRAttr<Attribute::NoUndef>(
3133 A,
this, ReturnIRP, DepClassTy::NONE, IsKnownNoUndef);
3135 A.checkForAllInstructions(InspectReturnInstForUB, *
this,
3136 {Instruction::Ret}, UsedAssumedInformation,
3141 if (NoUBPrevSize != AssumedNoUBInsts.size() ||
3142 UBPrevSize != KnownUBInsts.size())
3143 return ChangeStatus::CHANGED;
3144 return ChangeStatus::UNCHANGED;
3148 return KnownUBInsts.count(
I);
3151 bool isAssumedToCauseUB(
Instruction *
I)
const override {
3158 switch (
I->getOpcode()) {
3159 case Instruction::Load:
3160 case Instruction::Store:
3161 case Instruction::AtomicCmpXchg:
3162 case Instruction::AtomicRMW:
3163 return !AssumedNoUBInsts.count(
I);
3164 case Instruction::Br: {
3165 auto *BrInst = cast<BranchInst>(
I);
3166 if (BrInst->isUnconditional())
3168 return !AssumedNoUBInsts.count(
I);
3177 if (KnownUBInsts.empty())
3178 return ChangeStatus::UNCHANGED;
3180 A.changeToUnreachableAfterManifest(
I);
3181 return ChangeStatus::CHANGED;
3185 const std::string getAsStr(
Attributor *
A)
const override {
3186 return getAssumed() ?
"undefined-behavior" :
"no-ub";
3231 bool UsedAssumedInformation =
false;
3232 std::optional<Value *> SimplifiedV =
3235 if (!UsedAssumedInformation) {
3240 KnownUBInsts.insert(
I);
3241 return std::nullopt;
3247 if (isa<UndefValue>(V)) {
3248 KnownUBInsts.insert(
I);
3249 return std::nullopt;
3255struct AAUndefinedBehaviorFunction final : AAUndefinedBehaviorImpl {
3257 : AAUndefinedBehaviorImpl(IRP,
A) {}
3260 void trackStatistics()
const override {
3262 "Number of instructions known to have UB");
3264 KnownUBInsts.size();
3285 if (SCCI.hasCycle())
3295 for (
auto *L : LI->getLoopsInPreorder()) {
3309 assert(!AA::hasAssumedIRAttr<Attribute::WillReturn>(
3310 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
3315 bool isImpliedByMustprogressAndReadonly(
Attributor &
A,
bool KnownOnly) {
3316 if (!
A.hasAttr(getIRPosition(), {Attribute::MustProgress}))
3321 return IsKnown || !KnownOnly;
3327 if (isImpliedByMustprogressAndReadonly(
A,
false))
3328 return ChangeStatus::UNCHANGED;
3333 if (AA::hasAssumedIRAttr<Attribute::WillReturn>(
3334 A,
this, IPos, DepClassTy::REQUIRED, IsKnown)) {
3340 bool IsKnownNoRecurse;
3341 return AA::hasAssumedIRAttr<Attribute::NoRecurse>(
3342 A,
this, IPos, DepClassTy::REQUIRED, IsKnownNoRecurse);
3345 bool UsedAssumedInformation =
false;
3346 if (!
A.checkForAllCallLikeInstructions(CheckForWillReturn, *
this,
3347 UsedAssumedInformation))
3348 return indicatePessimisticFixpoint();
3350 return ChangeStatus::UNCHANGED;
3354 const std::string getAsStr(
Attributor *
A)
const override {
3355 return getAssumed() ?
"willreturn" :
"may-noreturn";
3359struct AAWillReturnFunction final : AAWillReturnImpl {
3361 : AAWillReturnImpl(IRP,
A) {}
3365 AAWillReturnImpl::initialize(
A);
3368 assert(
F &&
"Did expect an anchor function");
3369 if (
F->isDeclaration() || mayContainUnboundedCycle(*
F,
A))
3370 indicatePessimisticFixpoint();
3378struct AAWillReturnCallSite final
3379 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl> {
3381 : AACalleeToCallSite<
AAWillReturn, AAWillReturnImpl>(IRP,
A) {}
3385 if (isImpliedByMustprogressAndReadonly(
A,
false))
3386 return ChangeStatus::UNCHANGED;
3388 return AACalleeToCallSite::updateImpl(
A);
3410 const ToTy *To =
nullptr;
3420 assert(Hash == 0 &&
"Computed hash twice!");
3425 InstSetDMI::getHashValue(ExclusionSet));
3435 :
From(&
From), To(&To), ExclusionSet(ES) {
3437 if (!ES || ES->
empty()) {
3438 ExclusionSet =
nullptr;
3439 }
else if (MakeUnique) {
3440 ExclusionSet =
A.getInfoCache().getOrCreateUniqueBlockExecutionSet(ES);
3445 :
From(RQI.
From), To(RQI.To), ExclusionSet(RQI.ExclusionSet) {}
3458 return &TombstoneKey;
3465 if (!PairDMI::isEqual({
LHS->From,
LHS->To}, {
RHS->From,
RHS->To}))
3467 return InstSetDMI::isEqual(
LHS->ExclusionSet,
RHS->ExclusionSet);
3471#define DefineKeys(ToTy) \
3473 ReachabilityQueryInfo<ToTy> \
3474 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::EmptyKey = \
3475 ReachabilityQueryInfo<ToTy>( \
3476 DenseMapInfo<const Instruction *>::getEmptyKey(), \
3477 DenseMapInfo<const ToTy *>::getEmptyKey()); \
3479 ReachabilityQueryInfo<ToTy> \
3480 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::TombstoneKey = \
3481 ReachabilityQueryInfo<ToTy>( \
3482 DenseMapInfo<const Instruction *>::getTombstoneKey(), \
3483 DenseMapInfo<const ToTy *>::getTombstoneKey());
3492template <
typename BaseTy,
typename ToTy>
3493struct CachedReachabilityAA :
public BaseTy {
3499 bool isQueryAA()
const override {
return true; }
3504 for (
unsigned u = 0,
e = QueryVector.size();
u <
e; ++
u) {
3505 RQITy *RQI = QueryVector[
u];
3506 if (RQI->Result == RQITy::Reachable::No &&
3507 isReachableImpl(
A, *RQI,
false))
3508 Changed = ChangeStatus::CHANGED;
3513 virtual bool isReachableImpl(
Attributor &
A, RQITy &RQI,
3514 bool IsTemporaryRQI) = 0;
3517 RQITy &RQI,
bool UsedExclusionSet,
bool IsTemporaryRQI) {
3522 QueryCache.erase(&RQI);
3528 if (
Result == RQITy::Reachable::Yes || !UsedExclusionSet) {
3529 RQITy PlainRQI(RQI.From, RQI.To);
3530 if (!QueryCache.count(&PlainRQI)) {
3531 RQITy *RQIPtr =
new (
A.Allocator) RQITy(RQI.From, RQI.To);
3533 QueryVector.push_back(RQIPtr);
3534 QueryCache.insert(RQIPtr);
3539 if (IsTemporaryRQI &&
Result != RQITy::Reachable::Yes && UsedExclusionSet) {
3540 assert((!RQI.ExclusionSet || !RQI.ExclusionSet->empty()) &&
3541 "Did not expect empty set!");
3542 RQITy *RQIPtr =
new (
A.Allocator)
3543 RQITy(
A, *RQI.From, *RQI.To, RQI.ExclusionSet,
true);
3544 assert(RQIPtr->Result == RQITy::Reachable::No &&
"Already reachable?");
3546 assert(!QueryCache.count(RQIPtr));
3547 QueryVector.push_back(RQIPtr);
3548 QueryCache.insert(RQIPtr);
3551 if (
Result == RQITy::Reachable::No && IsTemporaryRQI)
3552 A.registerForUpdate(*
this);
3553 return Result == RQITy::Reachable::Yes;
3556 const std::string getAsStr(
Attributor *
A)
const override {
3558 return "#queries(" + std::to_string(QueryVector.size()) +
")";
3561 bool checkQueryCache(
Attributor &
A, RQITy &StackRQI,
3562 typename RQITy::Reachable &
Result) {
3563 if (!this->getState().isValidState()) {
3564 Result = RQITy::Reachable::Yes;
3570 if (StackRQI.ExclusionSet) {
3571 RQITy PlainRQI(StackRQI.From, StackRQI.To);
3572 auto It = QueryCache.find(&PlainRQI);
3573 if (It != QueryCache.end() && (*It)->Result == RQITy::Reachable::No) {
3574 Result = RQITy::Reachable::No;
3579 auto It = QueryCache.find(&StackRQI);
3580 if (It != QueryCache.end()) {
3587 QueryCache.insert(&StackRQI);
3596struct AAIntraFnReachabilityFunction final
3597 :
public CachedReachabilityAA<AAIntraFnReachability, Instruction> {
3598 using Base = CachedReachabilityAA<AAIntraFnReachability, Instruction>;
3605 bool isAssumedReachable(
3608 auto *NonConstThis =
const_cast<AAIntraFnReachabilityFunction *
>(
this);
3612 RQITy StackRQI(
A,
From, To, ExclusionSet,
false);
3613 typename RQITy::Reachable
Result;
3614 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
3615 return NonConstThis->isReachableImpl(
A, StackRQI,
3617 return Result == RQITy::Reachable::Yes;
3624 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3627 [&](
const auto &DeadEdge) {
3628 return LivenessAA->isEdgeDead(DeadEdge.first,
3632 return LivenessAA->isAssumedDead(BB);
3634 return ChangeStatus::UNCHANGED;
3638 return Base::updateImpl(
A);
3642 bool IsTemporaryRQI)
override {
3644 bool UsedExclusionSet =
false;
3649 while (IP && IP != &To) {
3650 if (ExclusionSet && IP != Origin && ExclusionSet->
count(IP)) {
3651 UsedExclusionSet =
true;
3662 "Not an intra-procedural query!");
3666 if (FromBB == ToBB &&
3667 WillReachInBlock(*RQI.From, *RQI.To, RQI.ExclusionSet))
3668 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3673 if (!WillReachInBlock(ToBB->
front(), *RQI.To, RQI.ExclusionSet))
3674 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3679 if (RQI.ExclusionSet)
3680 for (
auto *
I : *RQI.ExclusionSet)
3681 if (
I->getFunction() == Fn)
3682 ExclusionBlocks.
insert(
I->getParent());
3685 if (ExclusionBlocks.
count(FromBB) &&
3688 return rememberResult(
A, RQITy::Reachable::No, RQI,
true, IsTemporaryRQI);
3691 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3692 if (LivenessAA && LivenessAA->isAssumedDead(ToBB)) {
3693 DeadBlocks.insert(ToBB);
3694 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3703 while (!Worklist.
empty()) {
3705 if (!Visited.
insert(BB).second)
3708 if (LivenessAA && LivenessAA->isEdgeDead(BB, SuccBB)) {
3709 LocalDeadEdges.
insert({BB, SuccBB});
3714 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3717 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3720 if (ExclusionBlocks.
count(SuccBB)) {
3721 UsedExclusionSet =
true;
3728 DeadEdges.insert(LocalDeadEdges.
begin(), LocalDeadEdges.
end());
3729 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3734 void trackStatistics()
const override {}
3754 bool IgnoreSubsumingPositions) {
3755 assert(ImpliedAttributeKind == Attribute::NoAlias &&
3756 "Unexpected attribute kind");
3759 if (isa<AllocaInst>(Val))
3762 IgnoreSubsumingPositions =
true;
3765 if (isa<UndefValue>(Val))
3768 if (isa<ConstantPointerNull>(Val) &&
3773 if (
A.hasAttr(IRP, {Attribute::ByVal, Attribute::NoAlias},
3774 IgnoreSubsumingPositions, Attribute::NoAlias))
3783 assert(getAssociatedType()->isPointerTy() &&
3784 "Noalias is a pointer attribute");
3787 const std::string getAsStr(
Attributor *
A)
const override {
3788 return getAssumed() ?
"noalias" :
"may-alias";
3793struct AANoAliasFloating final : AANoAliasImpl {
3795 : AANoAliasImpl(IRP,
A) {}
3800 return indicatePessimisticFixpoint();
3804 void trackStatistics()
const override {
3810struct AANoAliasArgument final
3811 : AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl> {
3812 using Base = AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl>;
3824 if (AA::hasAssumedIRAttr<Attribute::NoSync>(
3826 DepClassTy::OPTIONAL, IsKnownNoSycn))
3827 return Base::updateImpl(
A);
3832 return Base::updateImpl(
A);
3836 bool UsedAssumedInformation =
false;
3837 if (
A.checkForAllCallSites(
3839 true, UsedAssumedInformation))
3840 return Base::updateImpl(
A);
3848 return indicatePessimisticFixpoint();
3855struct AANoAliasCallSiteArgument final : AANoAliasImpl {
3857 : AANoAliasImpl(IRP,
A) {}
3863 const CallBase &CB,
unsigned OtherArgNo) {
3865 if (this->getCalleeArgNo() == (
int)OtherArgNo)
3877 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadNone()) {
3878 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3885 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadOnly() &&
3887 A.recordDependence(MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3888 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3894 AAR =
A.getInfoCache().getAnalysisResultForFunction<
AAManager>(
3898 bool IsAliasing = !AAR || !AAR->
isNoAlias(&getAssociatedValue(), ArgOp);
3900 "callsite arguments: "
3901 << getAssociatedValue() <<
" " << *ArgOp <<
" => "
3902 << (IsAliasing ?
"" :
"no-") <<
"alias \n");
3907 bool isKnownNoAliasDueToNoAliasPreservation(
3927 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
3938 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
3943 bool IsKnownNoCapture;
3944 if (AA::hasAssumedIRAttr<Attribute::NoCapture>(
3946 DepClassTy::OPTIONAL, IsKnownNoCapture))
3952 A, *UserI, *getCtxI(), *
this,
nullptr,
3953 [ScopeFn](
const Function &Fn) {
return &Fn != ScopeFn; }))
3961 case UseCaptureKind::NO_CAPTURE:
3963 case UseCaptureKind::MAY_CAPTURE:
3967 case UseCaptureKind::PASSTHROUGH:
3974 bool IsKnownNoCapture;
3976 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
3977 A,
this, VIRP, DepClassTy::NONE, IsKnownNoCapture,
false, &NoCaptureAA);
3978 if (!IsAssumedNoCapture &&
3980 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue())) {
3982 dbgs() <<
"[AANoAliasCSArg] " << getAssociatedValue()
3983 <<
" cannot be noalias as it is potentially captured\n");
3988 A.recordDependence(*NoCaptureAA, *
this, DepClassTy::OPTIONAL);
3993 const auto &CB = cast<CallBase>(getAnchorValue());
3994 for (
unsigned OtherArgNo = 0; OtherArgNo < CB.
arg_size(); OtherArgNo++)
3995 if (mayAliasWithArgument(
A, AAR, MemBehaviorAA, CB, OtherArgNo))
4005 auto *MemBehaviorAA =
4008 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
4009 return ChangeStatus::UNCHANGED;
4012 bool IsKnownNoAlias;
4014 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
4015 A,
this, VIRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
4017 <<
" is not no-alias at the definition\n");
4018 return indicatePessimisticFixpoint();
4022 if (MemBehaviorAA &&
4023 isKnownNoAliasDueToNoAliasPreservation(
A, AAR, *MemBehaviorAA)) {
4025 dbgs() <<
"[AANoAlias] No-Alias deduced via no-alias preservation\n");
4026 return ChangeStatus::UNCHANGED;
4029 return indicatePessimisticFixpoint();
4037struct AANoAliasReturned final : AANoAliasImpl {
4039 : AANoAliasImpl(IRP,
A) {}
4044 auto CheckReturnValue = [&](
Value &RV) ->
bool {
4045 if (
Constant *
C = dyn_cast<Constant>(&RV))
4046 if (
C->isNullValue() || isa<UndefValue>(
C))
4051 if (!isa<CallBase>(&RV))
4055 bool IsKnownNoAlias;
4056 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
4057 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoAlias))
4060 bool IsKnownNoCapture;
4062 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
4063 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
4065 return IsAssumedNoCapture ||
4069 if (!
A.checkForAllReturnedValues(CheckReturnValue, *
this))
4070 return indicatePessimisticFixpoint();
4072 return ChangeStatus::UNCHANGED;
4080struct AANoAliasCallSiteReturned final
4081 : AACalleeToCallSite<AANoAlias, AANoAliasImpl> {
4083 : AACalleeToCallSite<
AANoAlias, AANoAliasImpl>(IRP,
A) {}
4093struct AAIsDeadValueImpl :
public AAIsDead {
4097 bool isAssumedDead()
const override {
return isAssumed(IS_DEAD); }
4100 bool isKnownDead()
const override {
return isKnown(IS_DEAD); }
4103 bool isAssumedDead(
const BasicBlock *BB)
const override {
return false; }
4106 bool isKnownDead(
const BasicBlock *BB)
const override {
return false; }
4109 bool isAssumedDead(
const Instruction *
I)
const override {
4110 return I == getCtxI() && isAssumedDead();
4114 bool isKnownDead(
const Instruction *
I)
const override {
4115 return isAssumedDead(
I) && isKnownDead();
4119 const std::string getAsStr(
Attributor *
A)
const override {
4120 return isAssumedDead() ?
"assumed-dead" :
"assumed-live";
4126 if (
V.getType()->isVoidTy() ||
V.use_empty())
4130 if (!isa<Constant>(V)) {
4131 if (
auto *
I = dyn_cast<Instruction>(&V))
4132 if (!
A.isRunOn(*
I->getFunction()))
4134 bool UsedAssumedInformation =
false;
4135 std::optional<Constant *>
C =
4136 A.getAssumedConstant(V, *
this, UsedAssumedInformation);
4141 auto UsePred = [&](
const Use &
U,
bool &Follow) {
return false; };
4146 return A.checkForAllUses(UsePred, *
this, V,
false,
4147 DepClassTy::REQUIRED,
4156 auto *CB = dyn_cast<CallBase>(
I);
4157 if (!CB || isa<IntrinsicInst>(CB))
4162 bool IsKnownNoUnwind;
4163 if (!AA::hasAssumedIRAttr<Attribute::NoUnwind>(
4164 A,
this, CallIRP, DepClassTy::OPTIONAL, IsKnownNoUnwind))
4172struct AAIsDeadFloating :
public AAIsDeadValueImpl {
4174 : AAIsDeadValueImpl(IRP,
A) {}
4178 AAIsDeadValueImpl::initialize(
A);
4180 if (isa<UndefValue>(getAssociatedValue())) {
4181 indicatePessimisticFixpoint();
4185 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4186 if (!isAssumedSideEffectFree(
A,
I)) {
4187 if (!isa_and_nonnull<StoreInst>(
I) && !isa_and_nonnull<FenceInst>(
I))
4188 indicatePessimisticFixpoint();
4190 removeAssumedBits(HAS_NO_EFFECT);
4197 if (!ExecDomainAA || !ExecDomainAA->isNoOpFence(FI))
4199 A.recordDependence(*ExecDomainAA, *
this, DepClassTy::OPTIONAL);
4206 if (
SI.isVolatile())
4212 bool UsedAssumedInformation =
false;
4213 if (!AssumeOnlyInst) {
4214 PotentialCopies.clear();
4216 UsedAssumedInformation)) {
4219 <<
"[AAIsDead] Could not determine potential copies of store!\n");
4223 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Store has " << PotentialCopies.size()
4224 <<
" potential copies.\n");
4229 UsedAssumedInformation))
4231 if (
auto *LI = dyn_cast<LoadInst>(V)) {
4233 auto &UserI = cast<Instruction>(*U.getUser());
4234 if (InfoCache.isOnlyUsedByAssume(UserI)) {
4236 AssumeOnlyInst->insert(&UserI);
4239 return A.isAssumedDead(U,
this,
nullptr, UsedAssumedInformation);
4245 <<
" is assumed live!\n");
4251 const std::string getAsStr(
Attributor *
A)
const override {
4252 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4253 if (isa_and_nonnull<StoreInst>(
I))
4255 return "assumed-dead-store";
4256 if (isa_and_nonnull<FenceInst>(
I))
4258 return "assumed-dead-fence";
4259 return AAIsDeadValueImpl::getAsStr(
A);
4264 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4265 if (
auto *SI = dyn_cast_or_null<StoreInst>(
I)) {
4266 if (!isDeadStore(
A, *SI))
4267 return indicatePessimisticFixpoint();
4268 }
else if (
auto *FI = dyn_cast_or_null<FenceInst>(
I)) {
4269 if (!isDeadFence(
A, *FI))
4270 return indicatePessimisticFixpoint();
4272 if (!isAssumedSideEffectFree(
A,
I))
4273 return indicatePessimisticFixpoint();
4274 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4275 return indicatePessimisticFixpoint();
4280 bool isRemovableStore()
const override {
4281 return isAssumed(IS_REMOVABLE) && isa<StoreInst>(&getAssociatedValue());
4286 Value &
V = getAssociatedValue();
4287 if (
auto *
I = dyn_cast<Instruction>(&V)) {
4292 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
4294 bool IsDead = isDeadStore(
A, *SI, &AssumeOnlyInst);
4297 A.deleteAfterManifest(*
I);
4298 for (
size_t i = 0; i < AssumeOnlyInst.
size(); ++i) {
4300 for (
auto *Usr : AOI->
users())
4301 AssumeOnlyInst.
insert(cast<Instruction>(Usr));
4302 A.deleteAfterManifest(*AOI);
4306 if (
auto *FI = dyn_cast<FenceInst>(
I)) {
4308 A.deleteAfterManifest(*FI);
4311 if (isAssumedSideEffectFree(
A,
I) && !isa<InvokeInst>(
I)) {
4312 A.deleteAfterManifest(*
I);
4320 void trackStatistics()
const override {
4329struct AAIsDeadArgument :
public AAIsDeadFloating {
4331 : AAIsDeadFloating(IRP,
A) {}
4335 Argument &Arg = *getAssociatedArgument();
4336 if (
A.isValidFunctionSignatureRewrite(Arg, {}))
4337 if (
A.registerFunctionSignatureRewrite(
4341 return ChangeStatus::CHANGED;
4343 return ChangeStatus::UNCHANGED;
4350struct AAIsDeadCallSiteArgument :
public AAIsDeadValueImpl {
4352 : AAIsDeadValueImpl(IRP,
A) {}
4356 AAIsDeadValueImpl::initialize(
A);
4357 if (isa<UndefValue>(getAssociatedValue()))
4358 indicatePessimisticFixpoint();
4367 Argument *Arg = getAssociatedArgument();
4369 return indicatePessimisticFixpoint();
4371 auto *ArgAA =
A.getAAFor<
AAIsDead>(*
this, ArgPos, DepClassTy::REQUIRED);
4373 return indicatePessimisticFixpoint();
4379 CallBase &CB = cast<CallBase>(getAnchorValue());
4381 assert(!isa<UndefValue>(
U.get()) &&
4382 "Expected undef values to be filtered out!");
4384 if (
A.changeUseAfterManifest(U, UV))
4385 return ChangeStatus::CHANGED;
4386 return ChangeStatus::UNCHANGED;
4393struct AAIsDeadCallSiteReturned :
public AAIsDeadFloating {
4395 : AAIsDeadFloating(IRP,
A) {}
4398 bool isAssumedDead()
const override {
4399 return AAIsDeadFloating::isAssumedDead() && IsAssumedSideEffectFree;
4404 AAIsDeadFloating::initialize(
A);
4405 if (isa<UndefValue>(getAssociatedValue())) {
4406 indicatePessimisticFixpoint();
4411 IsAssumedSideEffectFree = isAssumedSideEffectFree(
A, getCtxI());
4417 if (IsAssumedSideEffectFree && !isAssumedSideEffectFree(
A, getCtxI())) {
4418 IsAssumedSideEffectFree =
false;
4419 Changed = ChangeStatus::CHANGED;
4421 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4422 return indicatePessimisticFixpoint();
4427 void trackStatistics()
const override {
4428 if (IsAssumedSideEffectFree)
4435 const std::string getAsStr(
Attributor *
A)
const override {
4436 return isAssumedDead()
4438 : (getAssumed() ?
"assumed-dead-users" :
"assumed-live");
4442 bool IsAssumedSideEffectFree =
true;
4445struct AAIsDeadReturned :
public AAIsDeadValueImpl {
4447 : AAIsDeadValueImpl(IRP,
A) {}
4452 bool UsedAssumedInformation =
false;
4453 A.checkForAllInstructions([](
Instruction &) {
return true; }, *
this,
4454 {Instruction::Ret}, UsedAssumedInformation);
4457 if (ACS.isCallbackCall() || !ACS.getInstruction())
4459 return areAllUsesAssumedDead(
A, *ACS.getInstruction());
4462 if (!
A.checkForAllCallSites(PredForCallSite, *
this,
true,
4463 UsedAssumedInformation))
4464 return indicatePessimisticFixpoint();
4466 return ChangeStatus::UNCHANGED;
4472 bool AnyChange =
false;
4480 bool UsedAssumedInformation =
false;
4481 A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
4482 UsedAssumedInformation);
4483 return AnyChange ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
4490struct AAIsDeadFunction :
public AAIsDead {
4496 assert(
F &&
"Did expect an anchor function");
4497 if (!isAssumedDeadInternalFunction(
A)) {
4498 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4499 assumeLive(
A,
F->getEntryBlock());
4503 bool isAssumedDeadInternalFunction(
Attributor &
A) {
4504 if (!getAnchorScope()->hasLocalLinkage())
4506 bool UsedAssumedInformation =
false;
4508 true, UsedAssumedInformation);
4512 const std::string getAsStr(
Attributor *
A)
const override {
4513 return "Live[#BB " + std::to_string(AssumedLiveBlocks.size()) +
"/" +
4514 std::to_string(getAnchorScope()->
size()) +
"][#TBEP " +
4515 std::to_string(ToBeExploredFrom.size()) +
"][#KDE " +
4516 std::to_string(KnownDeadEnds.size()) +
"]";
4521 assert(getState().isValidState() &&
4522 "Attempted to manifest an invalid state!");
4527 if (AssumedLiveBlocks.empty()) {
4528 A.deleteAfterManifest(
F);
4529 return ChangeStatus::CHANGED;
4535 bool Invoke2CallAllowed = !mayCatchAsynchronousExceptions(
F);
4537 KnownDeadEnds.set_union(ToBeExploredFrom);
4538 for (
const Instruction *DeadEndI : KnownDeadEnds) {
4539 auto *CB = dyn_cast<CallBase>(DeadEndI);
4542 bool IsKnownNoReturn;
4543 bool MayReturn = !AA::hasAssumedIRAttr<Attribute::NoReturn>(
4546 if (MayReturn && (!Invoke2CallAllowed || !isa<InvokeInst>(CB)))
4549 if (
auto *II = dyn_cast<InvokeInst>(DeadEndI))
4550 A.registerInvokeWithDeadSuccessor(
const_cast<InvokeInst &
>(*II));
4552 A.changeToUnreachableAfterManifest(
4553 const_cast<Instruction *
>(DeadEndI->getNextNode()));
4554 HasChanged = ChangeStatus::CHANGED;
4559 if (!AssumedLiveBlocks.count(&BB)) {
4560 A.deleteAfterManifest(BB);
4562 HasChanged = ChangeStatus::CHANGED;
4572 assert(
From->getParent() == getAnchorScope() &&
4574 "Used AAIsDead of the wrong function");
4575 return isValidState() && !AssumedLiveEdges.count(std::make_pair(
From, To));
4579 void trackStatistics()
const override {}
4582 bool isAssumedDead()
const override {
return false; }
4585 bool isKnownDead()
const override {
return false; }
4588 bool isAssumedDead(
const BasicBlock *BB)
const override {
4590 "BB must be in the same anchor scope function.");
4594 return !AssumedLiveBlocks.count(BB);
4598 bool isKnownDead(
const BasicBlock *BB)
const override {
4599 return getKnown() && isAssumedDead(BB);
4603 bool isAssumedDead(
const Instruction *
I)
const override {
4604 assert(
I->getParent()->getParent() == getAnchorScope() &&
4605 "Instruction must be in the same anchor scope function.");
4612 if (!AssumedLiveBlocks.count(
I->getParent()))
4618 if (KnownDeadEnds.count(PrevI) || ToBeExploredFrom.count(PrevI))
4626 bool isKnownDead(
const Instruction *
I)
const override {
4627 return getKnown() && isAssumedDead(
I);
4633 if (!AssumedLiveBlocks.insert(&BB).second)
4641 if (
const auto *CB = dyn_cast<CallBase>(&
I))
4643 if (
F->hasLocalLinkage())
4644 A.markLiveInternalFunction(*
F);
4668 bool IsKnownNoReturn;
4669 if (AA::hasAssumedIRAttr<Attribute::NoReturn>(
4671 return !IsKnownNoReturn;
4683 bool UsedAssumedInformation =
4684 identifyAliveSuccessors(
A, cast<CallBase>(II), AA, AliveSuccessors);
4689 if (AAIsDeadFunction::mayCatchAsynchronousExceptions(*II.
getFunction())) {
4694 bool IsKnownNoUnwind;
4695 if (AA::hasAssumedIRAttr<Attribute::NoUnwind>(
4697 UsedAssumedInformation |= !IsKnownNoUnwind;
4702 return UsedAssumedInformation;
4709 bool UsedAssumedInformation =
false;
4713 std::optional<Constant *>
C =
4714 A.getAssumedConstant(*BI.
getCondition(), AA, UsedAssumedInformation);
4715 if (!
C || isa_and_nonnull<UndefValue>(*
C)) {
4717 }
else if (isa_and_nonnull<ConstantInt>(*
C)) {
4719 BI.
getSuccessor(1 - cast<ConstantInt>(*C)->getValue().getZExtValue());
4724 UsedAssumedInformation =
false;
4727 return UsedAssumedInformation;
4734 bool UsedAssumedInformation =
false;
4738 UsedAssumedInformation)) {
4745 if (Values.
empty() ||
4746 (Values.
size() == 1 &&
4747 isa_and_nonnull<UndefValue>(Values.
front().getValue()))) {
4749 return UsedAssumedInformation;
4752 Type &Ty = *
SI.getCondition()->getType();
4754 auto CheckForConstantInt = [&](
Value *
V) {
4755 if (
auto *CI = dyn_cast_if_present<ConstantInt>(
AA::getWithType(*V, Ty))) {
4763 return CheckForConstantInt(VAC.
getValue());
4767 return UsedAssumedInformation;
4770 unsigned MatchedCases = 0;
4771 for (
const auto &CaseIt :
SI.cases()) {
4772 if (
Constants.count(CaseIt.getCaseValue())) {
4774 AliveSuccessors.
push_back(&CaseIt.getCaseSuccessor()->front());
4781 AliveSuccessors.
push_back(&
SI.getDefaultDest()->front());
4782 return UsedAssumedInformation;
4788 if (AssumedLiveBlocks.empty()) {
4789 if (isAssumedDeadInternalFunction(
A))
4793 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4794 assumeLive(
A,
F->getEntryBlock());
4798 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Live [" << AssumedLiveBlocks.size() <<
"/"
4799 << getAnchorScope()->
size() <<
"] BBs and "
4800 << ToBeExploredFrom.size() <<
" exploration points and "
4801 << KnownDeadEnds.size() <<
" known dead ends\n");
4806 ToBeExploredFrom.end());
4807 decltype(ToBeExploredFrom) NewToBeExploredFrom;
4810 while (!Worklist.
empty()) {
4816 while (!
I->isTerminator() && !isa<CallBase>(
I))
4817 I =
I->getNextNode();
4819 AliveSuccessors.
clear();
4821 bool UsedAssumedInformation =
false;
4822 switch (
I->getOpcode()) {
4826 "Expected non-terminators to be handled already!");
4830 case Instruction::Call:
4831 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<CallInst>(*
I),
4832 *
this, AliveSuccessors);
4834 case Instruction::Invoke:
4835 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<InvokeInst>(*
I),
4836 *
this, AliveSuccessors);
4838 case Instruction::Br:
4839 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<BranchInst>(*
I),
4840 *
this, AliveSuccessors);
4842 case Instruction::Switch:
4843 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<SwitchInst>(*
I),
4844 *
this, AliveSuccessors);
4848 if (UsedAssumedInformation) {
4849 NewToBeExploredFrom.insert(
I);
4850 }
else if (AliveSuccessors.
empty() ||
4851 (
I->isTerminator() &&
4852 AliveSuccessors.
size() <
I->getNumSuccessors())) {
4853 if (KnownDeadEnds.insert(
I))
4858 << AliveSuccessors.
size() <<
" UsedAssumedInformation: "
4859 << UsedAssumedInformation <<
"\n");
4861 for (
const Instruction *AliveSuccessor : AliveSuccessors) {
4862 if (!
I->isTerminator()) {
4863 assert(AliveSuccessors.size() == 1 &&
4864 "Non-terminator expected to have a single successor!");
4868 auto Edge = std::make_pair(
I->getParent(), AliveSuccessor->getParent());
4869 if (AssumedLiveEdges.insert(Edge).second)
4871 if (assumeLive(
A, *AliveSuccessor->getParent()))
4878 if (NewToBeExploredFrom.size() != ToBeExploredFrom.size() ||
4880 return !ToBeExploredFrom.count(I);
4883 ToBeExploredFrom = std::move(NewToBeExploredFrom);
4892 if (ToBeExploredFrom.empty() &&
4893 getAnchorScope()->
size() == AssumedLiveBlocks.size() &&
4895 return DeadEndI->isTerminator() && DeadEndI->getNumSuccessors() == 0;
4897 return indicatePessimisticFixpoint();
4902struct AAIsDeadCallSite final : AAIsDeadFunction {
4904 : AAIsDeadFunction(IRP,
A) {}
4913 "supported for call sites yet!");
4918 return indicatePessimisticFixpoint();
4922 void trackStatistics()
const override {}
4936 Value &
V = *getAssociatedValue().stripPointerCasts();
4938 A.getAttrs(getIRPosition(),
4939 {Attribute::Dereferenceable, Attribute::DereferenceableOrNull},
4942 takeKnownDerefBytesMaximum(Attr.getValueAsInt());
4945 bool IsKnownNonNull;
4946 AA::hasAssumedIRAttr<Attribute::NonNull>(
4947 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNonNull);
4949 bool CanBeNull, CanBeFreed;
4950 takeKnownDerefBytesMaximum(
V.getPointerDereferenceableBytes(
4951 A.getDataLayout(), CanBeNull, CanBeFreed));
4954 followUsesInMBEC(*
this,
A, getState(), *CtxI);
4959 StateType &getState()
override {
return *
this; }
4960 const StateType &getState()
const override {
return *
this; }
4966 const Value *UseV =
U->get();
4971 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
I->isVolatile())
4976 Loc->Ptr,
Offset,
A.getDataLayout(),
true);
4977 if (
Base &&
Base == &getAssociatedValue())
4978 State.addAccessedBytes(
Offset, Loc->Size.getValue());