54#include "llvm/IR/IntrinsicsAMDGPU.h"
55#include "llvm/IR/IntrinsicsNVPTX.h"
80#define DEBUG_TYPE "attributor"
84 cl::desc(
"Manifest Attributor internal string attributes."),
97 cl::desc(
"Maximum number of potential values to be "
98 "tracked for each position."),
103 "attributor-max-potential-values-iterations",
cl::Hidden,
105 "Maximum number of iterations we keep dismantling potential values."),
108STATISTIC(NumAAs,
"Number of abstract attributes created");
109STATISTIC(NumIndirectCallsPromoted,
"Number of indirect calls promoted");
124#define BUILD_STAT_MSG_IR_ATTR(TYPE, NAME) \
125 ("Number of " #TYPE " marked '" #NAME "'")
126#define BUILD_STAT_NAME(NAME, TYPE) NumIR##TYPE##_##NAME
127#define STATS_DECL_(NAME, MSG) STATISTIC(NAME, MSG);
128#define STATS_DECL(NAME, TYPE, MSG) \
129 STATS_DECL_(BUILD_STAT_NAME(NAME, TYPE), MSG);
130#define STATS_TRACK(NAME, TYPE) ++(BUILD_STAT_NAME(NAME, TYPE));
131#define STATS_DECLTRACK(NAME, TYPE, MSG) \
133 STATS_DECL(NAME, TYPE, MSG) \
134 STATS_TRACK(NAME, TYPE) \
136#define STATS_DECLTRACK_ARG_ATTR(NAME) \
137 STATS_DECLTRACK(NAME, Arguments, BUILD_STAT_MSG_IR_ATTR(arguments, NAME))
138#define STATS_DECLTRACK_CSARG_ATTR(NAME) \
139 STATS_DECLTRACK(NAME, CSArguments, \
140 BUILD_STAT_MSG_IR_ATTR(call site arguments, NAME))
141#define STATS_DECLTRACK_FN_ATTR(NAME) \
142 STATS_DECLTRACK(NAME, Function, BUILD_STAT_MSG_IR_ATTR(functions, NAME))
143#define STATS_DECLTRACK_CS_ATTR(NAME) \
144 STATS_DECLTRACK(NAME, CS, BUILD_STAT_MSG_IR_ATTR(call site, NAME))
145#define STATS_DECLTRACK_FNRET_ATTR(NAME) \
146 STATS_DECLTRACK(NAME, FunctionReturn, \
147 BUILD_STAT_MSG_IR_ATTR(function returns, NAME))
148#define STATS_DECLTRACK_CSRET_ATTR(NAME) \
149 STATS_DECLTRACK(NAME, CSReturn, \
150 BUILD_STAT_MSG_IR_ATTR(call site returns, NAME))
151#define STATS_DECLTRACK_FLOATING_ATTR(NAME) \
152 STATS_DECLTRACK(NAME, Floating, \
153 ("Number of floating values known to be '" #NAME "'"))
158#define PIPE_OPERATOR(CLASS) \
159 raw_ostream &operator<<(raw_ostream &OS, const CLASS &AA) { \
160 return OS << static_cast<const AbstractAttribute &>(AA); \
215 bool HeaderOnly,
Cycle **CPtr =
nullptr) {
218 auto *BB =
I->getParent();
224 return !HeaderOnly || BB ==
C->getHeader();
235 if (
DL.getTypeSizeInBits(Ty) !=
DL.getTypeAllocSizeInBits(Ty))
240 if (
VectorType *SeqTy = dyn_cast<VectorType>(Ty))
244 if (
ArrayType *SeqTy = dyn_cast<ArrayType>(Ty))
247 if (!isa<StructType>(Ty))
260 StartPos +=
DL.getTypeAllocSizeInBits(ElTy);
270 bool AllowVolatile) {
271 if (!AllowVolatile &&
I->isVolatile())
274 if (
auto *LI = dyn_cast<LoadInst>(
I)) {
275 return LI->getPointerOperand();
278 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
279 return SI->getPointerOperand();
282 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(
I)) {
283 return CXI->getPointerOperand();
286 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(
I)) {
287 return RMWI->getPointerOperand();
309 bool GetMinOffset,
bool AllowNonInbounds,
310 bool UseAssumed =
false) {
312 auto AttributorAnalysis = [&](
Value &V,
APInt &ROffset) ->
bool {
317 UseAssumed ? DepClassTy::OPTIONAL
319 if (!ValueConstantRangeAA)
342 const Value *
Ptr, int64_t &BytesOffset,
344 APInt OffsetAPInt(
DL.getIndexTypeSizeInBits(
Ptr->getType()), 0);
347 true, AllowNonInbounds);
355template <
typename AAType,
typename StateType =
typename AAType::StateType,
357 bool RecurseForSelectAndPHI =
true>
359 Attributor &
A,
const AAType &QueryingAA, StateType &S,
361 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp return value states for "
362 << QueryingAA <<
" into " << S <<
"\n");
364 assert((QueryingAA.getIRPosition().getPositionKind() ==
366 QueryingAA.getIRPosition().getPositionKind() ==
368 "Can only clamp returned value states for a function returned or call "
369 "site returned position!");
373 std::optional<StateType>
T;
376 auto CheckReturnValue = [&](
Value &RV) ->
bool {
381 return AA::hasAssumedIRAttr<IRAttributeKind>(
382 A, &QueryingAA, RVPos, DepClassTy::REQUIRED, IsKnown);
386 A.getAAFor<AAType>(QueryingAA, RVPos, DepClassTy::REQUIRED);
390 <<
" AA: " << AA->getAsStr(&
A) <<
" @ " << RVPos <<
"\n");
391 const StateType &AAS = AA->getState();
393 T = StateType::getBestState(AAS);
395 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" RV State: " <<
T
397 return T->isValidState();
400 if (!
A.checkForAllReturnedValues(CheckReturnValue, QueryingAA,
401 AA::ValueScope::Intraprocedural,
402 RecurseForSelectAndPHI))
403 S.indicatePessimisticFixpoint();
410template <
typename AAType,
typename BaseType,
411 typename StateType =
typename BaseType::StateType,
412 bool PropagateCallBaseContext =
false,
414 bool RecurseForSelectAndPHI =
true>
415struct AAReturnedFromReturnedValues :
public BaseType {
421 StateType S(StateType::getBestState(this->getState()));
423 RecurseForSelectAndPHI>(
425 PropagateCallBaseContext ? this->getCallBaseContext() : nullptr);
428 return clampStateAndIndicateChange<StateType>(this->getState(), S);
434template <
typename AAType,
typename StateType =
typename AAType::StateType,
436static void clampCallSiteArgumentStates(
Attributor &
A,
const AAType &QueryingAA,
438 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp call site argument states for "
439 << QueryingAA <<
" into " << S <<
"\n");
441 assert(QueryingAA.getIRPosition().getPositionKind() ==
443 "Can only clamp call site argument states for an argument position!");
447 std::optional<StateType>
T;
450 unsigned ArgNo = QueryingAA.getIRPosition().getCallSiteArgNo();
462 return AA::hasAssumedIRAttr<IRAttributeKind>(
463 A, &QueryingAA, ACSArgPos, DepClassTy::REQUIRED, IsKnown);
467 A.getAAFor<AAType>(QueryingAA, ACSArgPos, DepClassTy::REQUIRED);
470 LLVM_DEBUG(
dbgs() <<
"[Attributor] ACS: " << *ACS.getInstruction()
471 <<
" AA: " << AA->getAsStr(&
A) <<
" @" << ACSArgPos
473 const StateType &AAS = AA->getState();
475 T = StateType::getBestState(AAS);
477 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" CSA State: " <<
T
479 return T->isValidState();
482 bool UsedAssumedInformation =
false;
483 if (!
A.checkForAllCallSites(CallSiteCheck, QueryingAA,
true,
484 UsedAssumedInformation))
485 S.indicatePessimisticFixpoint();
492template <
typename AAType,
typename BaseType,
493 typename StateType =
typename AAType::StateType,
495bool getArgumentStateFromCallBaseContext(
Attributor &
A,
499 "Expected an 'argument' position !");
505 assert(ArgNo >= 0 &&
"Invalid Arg No!");
511 return AA::hasAssumedIRAttr<IRAttributeKind>(
512 A, &QueryingAttribute, CBArgPos, DepClassTy::REQUIRED, IsKnown);
516 A.getAAFor<AAType>(QueryingAttribute, CBArgPos, DepClassTy::REQUIRED);
519 const StateType &CBArgumentState =
520 static_cast<const StateType &
>(AA->getState());
522 LLVM_DEBUG(
dbgs() <<
"[Attributor] Briding Call site context to argument"
523 <<
"Position:" << Pos <<
"CB Arg state:" << CBArgumentState
527 State ^= CBArgumentState;
532template <
typename AAType,
typename BaseType,
533 typename StateType =
typename AAType::StateType,
534 bool BridgeCallBaseContext =
false,
536struct AAArgumentFromCallSiteArguments :
public BaseType {
542 StateType S = StateType::getBestState(this->getState());
544 if (BridgeCallBaseContext) {
546 getArgumentStateFromCallBaseContext<AAType,
BaseType, StateType,
548 A, *
this, this->getIRPosition(), S);
550 return clampStateAndIndicateChange<StateType>(this->getState(), S);
552 clampCallSiteArgumentStates<AAType, StateType, IRAttributeKind>(
A, *
this,
557 return clampStateAndIndicateChange<StateType>(this->getState(), S);
562template <
typename AAType,
typename BaseType,
563 typename StateType =
typename BaseType::StateType,
564 bool IntroduceCallBaseContext =
false,
566struct AACalleeToCallSite :
public BaseType {
571 auto IRPKind = this->getIRPosition().getPositionKind();
574 "Can only wrap function returned positions for call site "
575 "returned positions!");
576 auto &S = this->getState();
578 CallBase &CB = cast<CallBase>(this->getAnchorValue());
579 if (IntroduceCallBaseContext)
580 LLVM_DEBUG(
dbgs() <<
"[Attributor] Introducing call base context:" << CB
585 for (
const Function *Callee : Callees) {
589 IntroduceCallBaseContext ? &CB :
nullptr)
591 *
Callee, IntroduceCallBaseContext ? &CB : nullptr);
595 if (!AA::hasAssumedIRAttr<IRAttributeKind>(
596 A,
this, FnPos, DepClassTy::REQUIRED, IsKnown))
602 A.getAAFor<AAType>(*
this, FnPos, DepClassTy::REQUIRED);
606 if (S.isAtFixpoint())
607 return S.isValidState();
611 if (!
A.checkForAllCallees(CalleePred, *
this, CB))
612 return S.indicatePessimisticFixpoint();
618template <
class AAType,
typename StateType =
typename AAType::StateType>
619static void followUsesInContext(AAType &AA,
Attributor &
A,
624 auto EIt = Explorer.
begin(CtxI), EEnd = Explorer.
end(CtxI);
625 for (
unsigned u = 0;
u <
Uses.size(); ++
u) {
627 if (
const Instruction *UserI = dyn_cast<Instruction>(
U->getUser())) {
629 if (Found && AA.followUseInMBEC(
A, U, UserI, State))
630 for (
const Use &Us : UserI->
uses())
645template <
class AAType,
typename StateType =
typename AAType::StateType>
646static void followUsesInMBEC(AAType &AA,
Attributor &
A, StateType &S,
649 A.getInfoCache().getMustBeExecutedContextExplorer();
655 for (
const Use &U : AA.getIRPosition().getAssociatedValue().uses())
658 followUsesInContext<AAType>(AA,
A, *Explorer, &CtxI,
Uses, S);
660 if (S.isAtFixpoint())
665 if (
const BranchInst *Br = dyn_cast<BranchInst>(
I))
666 if (Br->isConditional())
705 StateType ParentState;
709 ParentState.indicateOptimisticFixpoint();
711 for (
const BasicBlock *BB : Br->successors()) {
712 StateType ChildState;
714 size_t BeforeSize =
Uses.size();
715 followUsesInContext(AA,
A, *Explorer, &BB->front(),
Uses, ChildState);
718 for (
auto It =
Uses.begin() + BeforeSize; It !=
Uses.end();)
721 ParentState &= ChildState;
734namespace PointerInfo {
795 R.indicatePessimisticFixpoint();
896 if (!
Range.mayOverlap(ItRange))
898 bool IsExact =
Range == ItRange && !
Range.offsetOrSizeAreUnknown();
899 for (
auto Index : It.getSecond()) {
921 for (
unsigned Index : LocalList->getSecond()) {
924 if (
Range.offsetAndSizeAreUnknown())
940 RemoteI = RemoteI ? RemoteI : &
I;
944 bool AccExists =
false;
946 for (
auto Index : LocalList) {
948 if (
A.getLocalInst() == &
I) {
957 <<
"[AAPointerInfo] Inserting access in new offset bins\n";);
959 for (
auto Key : ToAdd) {
968 "New Access should have been at AccIndex");
969 LocalList.push_back(AccIndex);
983 auto &ExistingRanges =
Before.getRanges();
984 auto &NewRanges = Current.getRanges();
991 <<
"[AAPointerInfo] Removing access from old offset bins\n";);
998 "Expected bin to actually contain the Access.");
1025struct AAPointerInfoImpl
1026 :
public StateWrapper<AA::PointerInfo::State, AAPointerInfo> {
1031 const std::string getAsStr(
Attributor *
A)
const override {
1032 return std::string(
"PointerInfo ") +
1033 (isValidState() ? (std::string(
"#") +
1034 std::to_string(OffsetBins.
size()) +
" bins")
1039 [](int64_t O) {
return std::to_string(O); }),
1047 return AAPointerInfo::manifest(
A);
1050 virtual const_bin_iterator
begin()
const override {
return State::begin(); }
1051 virtual const_bin_iterator
end()
const override {
return State::end(); }
1052 virtual int64_t numOffsetBins()
const override {
1053 return State::numOffsetBins();
1055 virtual bool reachesReturn()
const override {
1058 virtual void addReturnedOffsetsTo(OffsetInfo &OI)
const override {
1064 OffsetInfo MergedOI;
1065 for (
auto Offset : ReturnedOffsets) {
1066 OffsetInfo TmpOI = OI;
1068 MergedOI.merge(TmpOI);
1070 OI = std::move(MergedOI);
1073 ChangeStatus setReachesReturn(
const OffsetInfo &ReachedReturnedOffsets) {
1074 if (ReturnedOffsets.isUnknown())
1075 return ChangeStatus::UNCHANGED;
1076 if (ReachedReturnedOffsets.isUnknown()) {
1077 ReturnedOffsets.setUnknown();
1078 return ChangeStatus::CHANGED;
1080 if (ReturnedOffsets.merge(ReachedReturnedOffsets))
1081 return ChangeStatus::CHANGED;
1082 return ChangeStatus::UNCHANGED;
1085 bool forallInterferingAccesses(
1089 return State::forallInterferingAccesses(
Range, CB);
1092 bool forallInterferingAccesses(
1094 bool FindInterferingWrites,
bool FindInterferingReads,
1098 HasBeenWrittenTo =
false;
1105 bool IsAssumedNoSync = AA::hasAssumedIRAttr<Attribute::NoSync>(
1110 bool AllInSameNoSyncFn = IsAssumedNoSync;
1111 bool InstIsExecutedByInitialThreadOnly =
1112 ExecDomainAA && ExecDomainAA->isExecutedByInitialThreadOnly(
I);
1119 bool InstIsExecutedInAlignedRegion =
1120 FindInterferingReads && ExecDomainAA &&
1121 ExecDomainAA->isExecutedInAlignedRegion(
A,
I);
1123 if (InstIsExecutedInAlignedRegion || InstIsExecutedByInitialThreadOnly)
1124 A.recordDependence(*ExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1127 bool IsThreadLocalObj =
1136 auto CanIgnoreThreadingForInst = [&](
const Instruction &
I) ->
bool {
1137 if (IsThreadLocalObj || AllInSameNoSyncFn)
1139 const auto *FnExecDomainAA =
1140 I.getFunction() == &
Scope
1145 if (!FnExecDomainAA)
1147 if (InstIsExecutedInAlignedRegion ||
1148 (FindInterferingWrites &&
1149 FnExecDomainAA->isExecutedInAlignedRegion(
A,
I))) {
1150 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1153 if (InstIsExecutedByInitialThreadOnly &&
1154 FnExecDomainAA->isExecutedByInitialThreadOnly(
I)) {
1155 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1164 auto CanIgnoreThreading = [&](
const Access &Acc) ->
bool {
1165 return CanIgnoreThreadingForInst(*Acc.getRemoteInst()) ||
1166 (Acc.getRemoteInst() != Acc.getLocalInst() &&
1167 CanIgnoreThreadingForInst(*Acc.getLocalInst()));
1171 bool IsKnownNoRecurse;
1172 AA::hasAssumedIRAttr<Attribute::NoRecurse>(
1179 bool InstInKernel =
Scope.hasFnAttribute(
"kernel");
1180 bool ObjHasKernelLifetime =
false;
1181 const bool UseDominanceReasoning =
1182 FindInterferingWrites && IsKnownNoRecurse;
1193 case AA::GPUAddressSpace::Shared:
1194 case AA::GPUAddressSpace::Constant:
1195 case AA::GPUAddressSpace::Local:
1207 std::function<
bool(
const Function &)> IsLiveInCalleeCB;
1209 if (
auto *AI = dyn_cast<AllocaInst>(&getAssociatedValue())) {
1214 bool IsKnownNoRecurse;
1215 if (AA::hasAssumedIRAttr<Attribute::NoRecurse>(
1217 IsKnownNoRecurse)) {
1218 IsLiveInCalleeCB = [AIFn](
const Function &Fn) {
return AIFn != &Fn; };
1220 }
else if (
auto *GV = dyn_cast<GlobalValue>(&getAssociatedValue())) {
1223 ObjHasKernelLifetime = HasKernelLifetime(GV, *GV->getParent());
1224 if (ObjHasKernelLifetime)
1225 IsLiveInCalleeCB = [](
const Function &Fn) {
1226 return !Fn.hasFnAttribute(
"kernel");
1234 auto AccessCB = [&](
const Access &Acc,
bool Exact) {
1236 bool AccInSameScope = AccScope == &
Scope;
1240 if (InstInKernel && ObjHasKernelLifetime && !AccInSameScope &&
1244 if (Exact && Acc.isMustAccess() && Acc.getRemoteInst() != &
I) {
1245 if (Acc.isWrite() || (isa<LoadInst>(
I) && Acc.isWriteOrAssumption()))
1246 ExclusionSet.
insert(Acc.getRemoteInst());
1249 if ((!FindInterferingWrites || !Acc.isWriteOrAssumption()) &&
1250 (!FindInterferingReads || !Acc.isRead()))
1253 bool Dominates = FindInterferingWrites && DT && Exact &&
1254 Acc.isMustAccess() && AccInSameScope &&
1257 DominatingWrites.
insert(&Acc);
1261 AllInSameNoSyncFn &= Acc.getRemoteInst()->getFunction() == &
Scope;
1263 InterferingAccesses.
push_back({&Acc, Exact});
1266 if (!State::forallInterferingAccesses(
I, AccessCB,
Range))
1269 HasBeenWrittenTo = !DominatingWrites.
empty();
1273 for (
const Access *Acc : DominatingWrites) {
1274 if (!LeastDominatingWriteInst) {
1275 LeastDominatingWriteInst = Acc->getRemoteInst();
1276 }
else if (DT->
dominates(LeastDominatingWriteInst,
1277 Acc->getRemoteInst())) {
1278 LeastDominatingWriteInst = Acc->getRemoteInst();
1283 auto CanSkipAccess = [&](
const Access &Acc,
bool Exact) {
1284 if (SkipCB && SkipCB(Acc))
1286 if (!CanIgnoreThreading(Acc))
1292 bool ReadChecked = !FindInterferingReads;
1293 bool WriteChecked = !FindInterferingWrites;
1299 &ExclusionSet, IsLiveInCalleeCB))
1304 if (!WriteChecked) {
1306 &ExclusionSet, IsLiveInCalleeCB))
1307 WriteChecked =
true;
1321 if (!WriteChecked && HasBeenWrittenTo &&
1322 Acc.getRemoteInst()->getFunction() != &
Scope) {
1326 if (FnReachabilityAA) {
1332 if (!FnReachabilityAA->instructionCanReach(
1333 A, *LeastDominatingWriteInst,
1334 *Acc.getRemoteInst()->getFunction(), &ExclusionSet))
1335 WriteChecked =
true;
1342 if (ReadChecked && WriteChecked)
1345 if (!DT || !UseDominanceReasoning)
1347 if (!DominatingWrites.count(&Acc))
1349 return LeastDominatingWriteInst != Acc.getRemoteInst();
1354 for (
auto &It : InterferingAccesses) {
1355 if ((!AllInSameNoSyncFn && !IsThreadLocalObj && !ExecDomainAA) ||
1356 !CanSkipAccess(*It.first, It.second)) {
1357 if (!UserCB(*It.first, It.second))
1367 using namespace AA::PointerInfo;
1369 return indicatePessimisticFixpoint();
1372 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1373 bool IsByval = OtherAAImpl.getAssociatedArgument()->hasByValAttr();
1374 Changed |= setReachesReturn(OtherAAImpl.ReturnedOffsets);
1377 const auto &State = OtherAAImpl.getState();
1378 for (
const auto &It : State) {
1379 for (
auto Index : It.getSecond()) {
1380 const auto &RAcc = State.getAccess(
Index);
1381 if (IsByval && !RAcc.isRead())
1383 bool UsedAssumedInformation =
false;
1385 auto Content =
A.translateArgumentToCallSiteContent(
1386 RAcc.getContent(), CB, *
this, UsedAssumedInformation);
1387 AK =
AccessKind(AK & (IsByval ? AccessKind::AK_R : AccessKind::AK_RW));
1388 AK =
AccessKind(AK | (RAcc.isMayAccess() ? AK_MAY : AK_MUST));
1390 Changed |= addAccess(
A, RAcc.getRanges(), CB,
Content, AK,
1391 RAcc.getType(), RAcc.getRemoteInst());
1398 const OffsetInfo &Offsets,
CallBase &CB,
1400 using namespace AA::PointerInfo;
1402 return indicatePessimisticFixpoint();
1404 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1408 const auto &State = OtherAAImpl.getState();
1409 for (
const auto &It : State) {
1410 for (
auto Index : It.getSecond()) {
1411 const auto &RAcc = State.getAccess(
Index);
1412 if (!IsMustAcc && RAcc.isAssumption())
1414 for (
auto Offset : Offsets) {
1418 if (!NewRanges.isUnknown()) {
1419 NewRanges.addToAllOffsets(
Offset);
1424 Changed |= addAccess(
A, NewRanges, CB, RAcc.getContent(), AK,
1425 RAcc.getType(), RAcc.getRemoteInst());
1434 void trackPointerInfoStatistics(
const IRPosition &IRP)
const {}
1438 for (
auto &It : OffsetBins) {
1439 O <<
"[" << It.first.Offset <<
"-" << It.first.Offset + It.first.Size
1440 <<
"] : " << It.getSecond().size() <<
"\n";
1441 for (
auto AccIndex : It.getSecond()) {
1442 auto &Acc = AccessList[AccIndex];
1443 O <<
" - " << Acc.getKind() <<
" - " << *Acc.getLocalInst() <<
"\n";
1444 if (Acc.getLocalInst() != Acc.getRemoteInst())
1445 O <<
" --> " << *Acc.getRemoteInst()
1447 if (!Acc.isWrittenValueYetUndetermined()) {
1448 if (isa_and_nonnull<Function>(Acc.getWrittenValue()))
1449 O <<
" - c: func " << Acc.getWrittenValue()->getName()
1451 else if (Acc.getWrittenValue())
1452 O <<
" - c: " << *Acc.getWrittenValue() <<
"\n";
1454 O <<
" - c: <unknown>\n";
1461struct AAPointerInfoFloating :
public AAPointerInfoImpl {
1464 : AAPointerInfoImpl(IRP,
A) {}
1471 using namespace AA::PointerInfo;
1474 TypeSize AccessSize =
DL.getTypeStoreSize(&Ty);
1483 if (!VT || VT->getElementCount().isScalable() ||
1485 (*Content)->getType() != VT ||
1486 DL.getTypeStoreSize(VT->getElementType()).isScalable()) {
1497 int64_t ElementSize =
DL.getTypeStoreSize(ElementType).getFixedValue();
1498 auto *ConstContent = cast<Constant>(*
Content);
1502 for (
int i = 0, e = VT->getElementCount().getFixedValue(); i != e; ++i) {
1504 ConstContent, ConstantInt::get(Int32Ty, i));
1507 Changed = Changed | addAccess(
A, {ElementOffsets, ElementSize},
I,
1511 for (
auto &ElementOffset : ElementOffsets)
1512 ElementOffset += ElementSize;
1526 OffsetInfo &UsrOI,
const OffsetInfo &PtrOI,
1530 void trackStatistics()
const override {
1531 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1535bool AAPointerInfoFloating::collectConstantsForGEP(
Attributor &
A,
1538 const OffsetInfo &PtrOI,
1540 unsigned BitWidth =
DL.getIndexTypeSizeInBits(
GEP->getType());
1544 assert(!UsrOI.isUnknown() && !PtrOI.isUnknown() &&
1545 "Don't look for constant values if the offset has already been "
1546 "determined to be unknown.");
1548 if (!
GEP->collectOffset(
DL,
BitWidth, VariableOffsets, ConstantOffset)) {
1554 << (VariableOffsets.
empty() ?
"" :
"not") <<
" constant "
1558 Union.addToAll(ConstantOffset.getSExtValue());
1563 for (
const auto &VI : VariableOffsets) {
1566 if (!PotentialConstantsAA || !PotentialConstantsAA->isValidState()) {
1572 if (PotentialConstantsAA->undefIsContained())
1580 if (AssumedSet.empty())
1584 for (
const auto &ConstOffset : AssumedSet) {
1585 auto CopyPerOffset =
Union;
1586 CopyPerOffset.addToAll(ConstOffset.getSExtValue() *
1587 VI.second.getZExtValue());
1588 Product.merge(CopyPerOffset);
1593 UsrOI = std::move(Union);
1598 using namespace AA::PointerInfo;
1601 Value &AssociatedValue = getAssociatedValue();
1604 OffsetInfoMap[&AssociatedValue].
insert(0);
1606 auto HandlePassthroughUser = [&](
Value *Usr,
Value *CurPtr,
bool &Follow) {
1617 "CurPtr does not exist in the map!");
1619 auto &UsrOI = OffsetInfoMap[Usr];
1620 auto &PtrOI = OffsetInfoMap[CurPtr];
1621 assert(!PtrOI.isUnassigned() &&
1622 "Cannot pass through if the input Ptr was not visited!");
1628 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
1630 User *Usr =
U.getUser();
1631 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Analyze " << *CurPtr <<
" in " << *Usr
1634 "The current pointer offset should have been seeded!");
1635 assert(!OffsetInfoMap[CurPtr].isUnassigned() &&
1636 "Current pointer should be assigned");
1640 return HandlePassthroughUser(Usr, CurPtr, Follow);
1641 if (!isa<GEPOperator>(CE)) {
1642 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled constant user " << *CE
1647 if (
auto *
GEP = dyn_cast<GEPOperator>(Usr)) {
1650 auto &UsrOI = OffsetInfoMap[Usr];
1651 auto &PtrOI = OffsetInfoMap[CurPtr];
1653 if (UsrOI.isUnknown())
1656 if (PtrOI.isUnknown()) {
1662 Follow = collectConstantsForGEP(
A,
DL, UsrOI, PtrOI,
GEP);
1665 if (isa<PtrToIntInst>(Usr))
1667 if (isa<CastInst>(Usr) || isa<SelectInst>(Usr))
1668 return HandlePassthroughUser(Usr, CurPtr, Follow);
1672 if (
auto *RI = dyn_cast<ReturnInst>(Usr)) {
1673 if (RI->getFunction() == getAssociatedFunction()) {
1674 auto &PtrOI = OffsetInfoMap[CurPtr];
1675 Changed |= setReachesReturn(PtrOI);
1684 if (
auto *
PHI = dyn_cast<PHINode>(Usr)) {
1687 bool IsFirstPHIUser = !OffsetInfoMap.
count(
PHI);
1688 auto &UsrOI = OffsetInfoMap[
PHI];
1689 auto &PtrOI = OffsetInfoMap[CurPtr];
1693 if (PtrOI.isUnknown()) {
1694 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand offset unknown "
1695 << *CurPtr <<
" in " << *
PHI <<
"\n");
1696 Follow = !UsrOI.isUnknown();
1702 if (UsrOI == PtrOI) {
1703 assert(!PtrOI.isUnassigned() &&
1704 "Cannot assign if the current Ptr was not visited!");
1705 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant (so far)");
1715 auto It = OffsetInfoMap.
find(CurPtrBase);
1716 if (It == OffsetInfoMap.
end()) {
1717 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand is too complex "
1718 << *CurPtr <<
" in " << *
PHI
1719 <<
" (base: " << *CurPtrBase <<
")\n");
1734 *
PHI->getFunction());
1736 auto BaseOI = It->getSecond();
1737 BaseOI.addToAll(
Offset.getZExtValue());
1738 if (IsFirstPHIUser || BaseOI == UsrOI) {
1739 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant " << *CurPtr
1740 <<
" in " << *Usr <<
"\n");
1741 return HandlePassthroughUser(Usr, CurPtr, Follow);
1745 dbgs() <<
"[AAPointerInfo] PHI operand pointer offset mismatch "
1746 << *CurPtr <<
" in " << *
PHI <<
"\n");
1757 if (
auto *LoadI = dyn_cast<LoadInst>(Usr)) {
1765 if (!handleAccess(
A, *LoadI,
nullptr, AK,
1766 OffsetInfoMap[CurPtr].Offsets, Changed,
1771 if (
auto *
II = dyn_cast<IntrinsicInst>(&
I))
1772 return II->isAssumeLikeIntrinsic();
1783 }
while (FromI && FromI != ToI);
1789 if (IntrI.getIntrinsicID() != Intrinsic::assume)
1792 if (IntrI.getParent() == BB) {
1793 if (IsImpactedInRange(LoadI->getNextNonDebugInstruction(), &IntrI))
1799 if ((*PredIt) != BB)
1804 if (SuccBB == IntrBB)
1806 if (isa<UnreachableInst>(SuccBB->getTerminator()))
1810 if (IsImpactedInRange(LoadI->getNextNonDebugInstruction(),
1813 if (IsImpactedInRange(&IntrBB->
front(), &IntrI))
1819 std::pair<Value *, IntrinsicInst *> Assumption;
1820 for (
const Use &LoadU : LoadI->
uses()) {
1821 if (
auto *CmpI = dyn_cast<CmpInst>(LoadU.getUser())) {
1822 if (!CmpI->isEquality() || !CmpI->isTrueWhenEqual())
1824 for (
const Use &CmpU : CmpI->
uses()) {
1825 if (
auto *IntrI = dyn_cast<IntrinsicInst>(CmpU.getUser())) {
1826 if (!IsValidAssume(*IntrI))
1828 int Idx = CmpI->getOperandUse(0) == LoadU;
1829 Assumption = {CmpI->getOperand(
Idx), IntrI};
1834 if (Assumption.first)
1839 if (!Assumption.first || !Assumption.second)
1843 << *Assumption.second <<
": " << *LoadI
1844 <<
" == " << *Assumption.first <<
"\n");
1845 bool UsedAssumedInformation =
false;
1846 std::optional<Value *>
Content =
nullptr;
1847 if (Assumption.first)
1849 A.getAssumedSimplified(*Assumption.first, *
this,
1851 return handleAccess(
1852 A, *Assumption.second,
Content, AccessKind::AK_ASSUMPTION,
1853 OffsetInfoMap[CurPtr].Offsets, Changed, *LoadI->getType());
1858 for (
auto *OtherOp : OtherOps) {
1859 if (OtherOp == CurPtr) {
1862 <<
"[AAPointerInfo] Escaping use in store like instruction " <<
I
1874 bool UsedAssumedInformation =
false;
1875 std::optional<Value *>
Content =
nullptr;
1879 return handleAccess(
A,
I,
Content, AK, OffsetInfoMap[CurPtr].Offsets,
1883 if (
auto *StoreI = dyn_cast<StoreInst>(Usr))
1884 return HandleStoreLike(*StoreI, StoreI->getValueOperand(),
1885 *StoreI->getValueOperand()->getType(),
1886 {StoreI->getValueOperand()}, AccessKind::AK_W);
1887 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(Usr))
1888 return HandleStoreLike(*RMWI,
nullptr, *RMWI->getValOperand()->getType(),
1889 {RMWI->getValOperand()}, AccessKind::AK_RW);
1890 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(Usr))
1891 return HandleStoreLike(
1892 *CXI,
nullptr, *CXI->getNewValOperand()->getType(),
1893 {CXI->getCompareOperand(), CXI->getNewValOperand()},
1896 if (
auto *CB = dyn_cast<CallBase>(Usr)) {
1900 A.getInfoCache().getTargetLibraryInfoForFunction(*CB->
getFunction());
1911 Changed = translateAndAddState(
A, *CSArgPI, OffsetInfoMap[CurPtr], *CB,
1914 if (!CSArgPI->reachesReturn())
1915 return isValidState();
1918 if (!Callee ||
Callee->arg_size() <= ArgNo)
1920 bool UsedAssumedInformation =
false;
1921 auto ReturnedValue =
A.getAssumedSimplified(
1925 dyn_cast_or_null<Argument>(ReturnedValue.value_or(
nullptr));
1926 auto *Arg =
Callee->getArg(ArgNo);
1927 if (ReturnedArg && Arg != ReturnedArg)
1929 bool IsRetMustAcc = IsArgMustAcc && (ReturnedArg == Arg);
1934 OffsetInfo OI = OffsetInfoMap[CurPtr];
1935 CSArgPI->addReturnedOffsetsTo(OI);
1937 translateAndAddState(
A, *CSRetPI, OI, *CB, IsRetMustAcc) | Changed;
1938 return isValidState();
1940 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Call user not handled " << *CB
1945 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] User not handled " << *Usr <<
"\n");
1948 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
1949 assert(OffsetInfoMap.
count(OldU) &&
"Old use should be known already!");
1950 assert(!OffsetInfoMap[OldU].isUnassigned() &&
"Old use should be assinged");
1951 if (OffsetInfoMap.
count(NewU)) {
1953 if (!(OffsetInfoMap[NewU] == OffsetInfoMap[OldU])) {
1954 dbgs() <<
"[AAPointerInfo] Equivalent use callback failed: "
1955 << OffsetInfoMap[NewU] <<
" vs " << OffsetInfoMap[OldU]
1959 return OffsetInfoMap[NewU] == OffsetInfoMap[OldU];
1962 return HandlePassthroughUser(NewU.get(), OldU.
get(), Unused);
1964 if (!
A.checkForAllUses(UsePred, *
this, AssociatedValue,
1966 true, EquivalentUseCB)) {
1967 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Check for all uses failed, abort!\n");
1968 return indicatePessimisticFixpoint();
1972 dbgs() <<
"Accesses by bin after update:\n";
1979struct AAPointerInfoReturned final : AAPointerInfoImpl {
1981 : AAPointerInfoImpl(IRP,
A) {}
1985 return indicatePessimisticFixpoint();
1989 void trackStatistics()
const override {
1990 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1994struct AAPointerInfoArgument final : AAPointerInfoFloating {
1996 : AAPointerInfoFloating(IRP,
A) {}
1999 void trackStatistics()
const override {
2000 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2004struct AAPointerInfoCallSiteArgument final : AAPointerInfoFloating {
2006 : AAPointerInfoFloating(IRP,
A) {}
2010 using namespace AA::PointerInfo;
2014 if (
auto *
MI = dyn_cast_or_null<MemIntrinsic>(getCtxI())) {
2018 LengthVal =
Length->getSExtValue();
2019 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
2022 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled memory intrinsic "
2024 return indicatePessimisticFixpoint();
2027 ArgNo == 0 ? AccessKind::AK_MUST_WRITE : AccessKind::AK_MUST_READ;
2029 Changed | addAccess(
A, {0, LengthVal}, *
MI,
nullptr,
Kind,
nullptr);
2032 dbgs() <<
"Accesses by bin after update:\n";
2043 Argument *Arg = getAssociatedArgument();
2048 if (ArgAA && ArgAA->getState().isValidState())
2049 return translateAndAddStateFromCallee(
A, *ArgAA,
2050 *cast<CallBase>(getCtxI()));
2052 return indicatePessimisticFixpoint();
2055 bool IsKnownNoCapture;
2056 if (!AA::hasAssumedIRAttr<Attribute::NoCapture>(
2057 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNoCapture))
2058 return indicatePessimisticFixpoint();
2060 bool IsKnown =
false;
2062 return ChangeStatus::UNCHANGED;
2065 ReadOnly ? AccessKind::AK_MAY_READ : AccessKind::AK_MAY_READ_WRITE;
2071 void trackStatistics()
const override {
2072 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2076struct AAPointerInfoCallSiteReturned final : AAPointerInfoFloating {
2078 : AAPointerInfoFloating(IRP,
A) {}
2081 void trackStatistics()
const override {
2082 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2096 assert(!AA::hasAssumedIRAttr<Attribute::NoUnwind>(
2097 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2101 const std::string getAsStr(
Attributor *
A)
const override {
2102 return getAssumed() ?
"nounwind" :
"may-unwind";
2108 (
unsigned)Instruction::Invoke, (
unsigned)Instruction::CallBr,
2109 (
unsigned)Instruction::Call, (
unsigned)Instruction::CleanupRet,
2110 (
unsigned)Instruction::CatchSwitch, (
unsigned)Instruction::Resume};
2113 if (!
I.mayThrow(
true))
2116 if (
const auto *CB = dyn_cast<CallBase>(&
I)) {
2117 bool IsKnownNoUnwind;
2118 return AA::hasAssumedIRAttr<Attribute::NoUnwind>(
2125 bool UsedAssumedInformation =
false;
2126 if (!
A.checkForAllInstructions(CheckForNoUnwind, *
this, Opcodes,
2127 UsedAssumedInformation))
2128 return indicatePessimisticFixpoint();
2130 return ChangeStatus::UNCHANGED;
2134struct AANoUnwindFunction final :
public AANoUnwindImpl {
2136 : AANoUnwindImpl(IRP,
A) {}
2143struct AANoUnwindCallSite final
2144 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl> {
2146 : AACalleeToCallSite<
AANoUnwind, AANoUnwindImpl>(IRP,
A) {}
2157 case Intrinsic::nvvm_barrier0:
2158 case Intrinsic::nvvm_barrier0_and:
2159 case Intrinsic::nvvm_barrier0_or:
2160 case Intrinsic::nvvm_barrier0_popc:
2162 case Intrinsic::amdgcn_s_barrier:
2163 if (ExecutedAligned)
2176 if (
auto *FI = dyn_cast<FenceInst>(
I))
2179 if (
auto *AI = dyn_cast<AtomicCmpXchgInst>(
I)) {
2186 switch (
I->getOpcode()) {
2187 case Instruction::AtomicRMW:
2188 Ordering = cast<AtomicRMWInst>(
I)->getOrdering();
2190 case Instruction::Store:
2191 Ordering = cast<StoreInst>(
I)->getOrdering();
2193 case Instruction::Load:
2194 Ordering = cast<LoadInst>(
I)->getOrdering();
2198 "New atomic operations need to be known in the attributor.");
2209 if (
auto *
MI = dyn_cast<MemIntrinsic>(
I))
2210 return !
MI->isVolatile();
2221 assert(!AA::hasAssumedIRAttr<Attribute::NoSync>(
A,
nullptr, getIRPosition(),
2222 DepClassTy::NONE, IsKnown));
2226 const std::string getAsStr(
Attributor *
A)
const override {
2227 return getAssumed() ?
"nosync" :
"may-sync";
2243 if (
I.mayReadOrWriteMemory())
2248 if (AA::hasAssumedIRAttr<Attribute::NoSync>(
2257 bool UsedAssumedInformation =
false;
2258 if (!
A.checkForAllReadWriteInstructions(CheckRWInstForNoSync, *
this,
2259 UsedAssumedInformation) ||
2260 !
A.checkForAllCallLikeInstructions(CheckForNoSync, *
this,
2261 UsedAssumedInformation))
2262 return indicatePessimisticFixpoint();
2267struct AANoSyncFunction final :
public AANoSyncImpl {
2269 : AANoSyncImpl(IRP,
A) {}
2276struct AANoSyncCallSite final : AACalleeToCallSite<AANoSync, AANoSyncImpl> {
2278 : AACalleeToCallSite<
AANoSync, AANoSyncImpl>(IRP,
A) {}
2288struct AANoFreeImpl :
public AANoFree {
2294 assert(!AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
nullptr, getIRPosition(),
2295 DepClassTy::NONE, IsKnown));
2303 return AA::hasAssumedIRAttr<Attribute::NoFree>(
2305 DepClassTy::REQUIRED, IsKnown);
2308 bool UsedAssumedInformation =
false;
2309 if (!
A.checkForAllCallLikeInstructions(CheckForNoFree, *
this,
2310 UsedAssumedInformation))
2311 return indicatePessimisticFixpoint();
2312 return ChangeStatus::UNCHANGED;
2316 const std::string getAsStr(
Attributor *
A)
const override {
2317 return getAssumed() ?
"nofree" :
"may-free";
2321struct AANoFreeFunction final :
public AANoFreeImpl {
2323 : AANoFreeImpl(IRP,
A) {}
2330struct AANoFreeCallSite final : AACalleeToCallSite<AANoFree, AANoFreeImpl> {
2332 : AACalleeToCallSite<
AANoFree, AANoFreeImpl>(IRP,
A) {}
2339struct AANoFreeFloating : AANoFreeImpl {
2341 : AANoFreeImpl(IRP,
A) {}
2351 if (AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
this,
2353 DepClassTy::OPTIONAL, IsKnown))
2354 return ChangeStatus::UNCHANGED;
2356 Value &AssociatedValue = getIRPosition().getAssociatedValue();
2357 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
2359 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
2367 return AA::hasAssumedIRAttr<Attribute::NoFree>(
2369 DepClassTy::REQUIRED, IsKnown);
2372 if (isa<GetElementPtrInst>(UserI) || isa<PHINode>(UserI) ||
2373 isa<SelectInst>(UserI)) {
2377 if (isa<StoreInst>(UserI) || isa<LoadInst>(UserI))
2380 if (isa<ReturnInst>(UserI) && getIRPosition().isArgumentPosition())
2386 if (!
A.checkForAllUses(Pred, *
this, AssociatedValue))
2387 return indicatePessimisticFixpoint();
2389 return ChangeStatus::UNCHANGED;
2394struct AANoFreeArgument final : AANoFreeFloating {
2396 : AANoFreeFloating(IRP,
A) {}
2403struct AANoFreeCallSiteArgument final : AANoFreeFloating {
2405 : AANoFreeFloating(IRP,
A) {}
2413 Argument *Arg = getAssociatedArgument();
2415 return indicatePessimisticFixpoint();
2418 if (AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
this, ArgPos,
2419 DepClassTy::REQUIRED, IsKnown))
2420 return ChangeStatus::UNCHANGED;
2421 return indicatePessimisticFixpoint();
2429struct AANoFreeReturned final : AANoFreeFloating {
2431 : AANoFreeFloating(IRP,
A) {
2446 void trackStatistics()
const override {}
2450struct AANoFreeCallSiteReturned final : AANoFreeFloating {
2452 : AANoFreeFloating(IRP,
A) {}
2455 return ChangeStatus::UNCHANGED;
2466 bool IgnoreSubsumingPositions) {
2468 AttrKinds.
push_back(Attribute::NonNull);
2471 AttrKinds.
push_back(Attribute::Dereferenceable);
2472 if (
A.hasAttr(IRP, AttrKinds, IgnoreSubsumingPositions, Attribute::NonNull))
2479 if (!Fn->isDeclaration()) {
2489 bool UsedAssumedInformation =
false;
2490 if (!
A.checkForAllInstructions(
2492 Worklist.push_back({*cast<ReturnInst>(I).getReturnValue(), &I});
2496 UsedAssumedInformation,
false,
true))
2508 Attribute::NonNull)});
2513static int64_t getKnownNonNullAndDerefBytesForUse(
2515 const Use *U,
const Instruction *
I,
bool &IsNonNull,
bool &TrackUse) {
2518 const Value *UseV =
U->get();
2525 if (isa<CastInst>(
I)) {
2530 if (isa<GetElementPtrInst>(
I)) {
2540 if (
const auto *CB = dyn_cast<CallBase>(
I)) {
2543 U, {Attribute::NonNull, Attribute::Dereferenceable})) {
2560 bool IsKnownNonNull;
2561 AA::hasAssumedIRAttr<Attribute::NonNull>(
A, &QueryingAA, IRP,
2563 IsNonNull |= IsKnownNonNull;
2570 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
2571 Loc->Size.isScalable() ||
I->isVolatile())
2577 if (
Base &&
Base == &AssociatedValue) {
2578 int64_t DerefBytes = Loc->Size.getValue() +
Offset;
2580 return std::max(int64_t(0), DerefBytes);
2587 int64_t DerefBytes = Loc->Size.getValue();
2589 return std::max(int64_t(0), DerefBytes);
2600 Value &
V = *getAssociatedValue().stripPointerCasts();
2601 if (isa<ConstantPointerNull>(V)) {
2602 indicatePessimisticFixpoint();
2607 followUsesInMBEC(*
this,
A, getState(), *CtxI);
2613 bool IsNonNull =
false;
2614 bool TrackUse =
false;
2615 getKnownNonNullAndDerefBytesForUse(
A, *
this, getAssociatedValue(), U,
I,
2616 IsNonNull, TrackUse);
2617 State.setKnown(IsNonNull);
2622 const std::string getAsStr(
Attributor *
A)
const override {
2623 return getAssumed() ?
"nonnull" :
"may-null";
2628struct AANonNullFloating :
public AANonNullImpl {
2630 : AANonNullImpl(IRP,
A) {}
2635 bool IsKnownNonNull;
2636 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2637 A, *
this, IRP, DepClassTy::OPTIONAL, IsKnownNonNull);
2641 bool UsedAssumedInformation =
false;
2642 Value *AssociatedValue = &getAssociatedValue();
2644 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
2649 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
2653 if (
auto *
PHI = dyn_cast<PHINode>(AssociatedValue))
2655 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2656 A, this, IRPosition::value(*Op), DepClassTy::OPTIONAL,
2659 return ChangeStatus::UNCHANGED;
2660 if (
auto *
Select = dyn_cast<SelectInst>(AssociatedValue))
2661 if (AA::hasAssumedIRAttr<Attribute::NonNull>(
2663 DepClassTy::OPTIONAL, IsKnown) &&
2664 AA::hasAssumedIRAttr<Attribute::NonNull>(
2666 DepClassTy::OPTIONAL, IsKnown))
2667 return ChangeStatus::UNCHANGED;
2674 if (AVIRP == getIRPosition() || !CheckIRP(AVIRP))
2675 return indicatePessimisticFixpoint();
2676 return ChangeStatus::UNCHANGED;
2679 for (
const auto &VAC : Values)
2681 return indicatePessimisticFixpoint();
2683 return ChangeStatus::UNCHANGED;
2691struct AANonNullReturned final
2692 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2693 false, AANonNull::IRAttributeKind, false> {
2700 const std::string getAsStr(
Attributor *
A)
const override {
2701 return getAssumed() ?
"nonnull" :
"may-null";
2709struct AANonNullArgument final
2710 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl> {
2712 : AAArgumentFromCallSiteArguments<
AANonNull, AANonNullImpl>(IRP,
A) {}
2718struct AANonNullCallSiteArgument final : AANonNullFloating {
2720 : AANonNullFloating(IRP,
A) {}
2727struct AANonNullCallSiteReturned final
2728 : AACalleeToCallSite<AANonNull, AANonNullImpl> {
2730 : AACalleeToCallSite<
AANonNull, AANonNullImpl>(IRP,
A) {}
2746 assert(!AA::hasAssumedIRAttr<Attribute::MustProgress>(
2747 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2752 const std::string getAsStr(
Attributor *
A)
const override {
2753 return getAssumed() ?
"mustprogress" :
"may-not-progress";
2757struct AAMustProgressFunction final : AAMustProgressImpl {
2759 : AAMustProgressImpl(IRP,
A) {}
2764 if (AA::hasAssumedIRAttr<Attribute::WillReturn>(
2765 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnown)) {
2767 return indicateOptimisticFixpoint();
2768 return ChangeStatus::UNCHANGED;
2773 bool IsKnownMustProgress;
2774 return AA::hasAssumedIRAttr<Attribute::MustProgress>(
2775 A,
this, IPos, DepClassTy::REQUIRED, IsKnownMustProgress,
2779 bool AllCallSitesKnown =
true;
2780 if (!
A.checkForAllCallSites(CheckForMustProgress, *
this,
2783 return indicatePessimisticFixpoint();
2785 return ChangeStatus::UNCHANGED;
2789 void trackStatistics()
const override {
2795struct AAMustProgressCallSite final : AAMustProgressImpl {
2797 : AAMustProgressImpl(IRP,
A) {}
2806 bool IsKnownMustProgress;
2807 if (!AA::hasAssumedIRAttr<Attribute::MustProgress>(
2808 A,
this, FnPos, DepClassTy::REQUIRED, IsKnownMustProgress))
2809 return indicatePessimisticFixpoint();
2810 return ChangeStatus::UNCHANGED;
2814 void trackStatistics()
const override {
2829 assert(!AA::hasAssumedIRAttr<Attribute::NoRecurse>(
2830 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2835 const std::string getAsStr(
Attributor *
A)
const override {
2836 return getAssumed() ?
"norecurse" :
"may-recurse";
2840struct AANoRecurseFunction final : AANoRecurseImpl {
2842 : AANoRecurseImpl(IRP,
A) {}
2849 bool IsKnownNoRecurse;
2850 if (!AA::hasAssumedIRAttr<Attribute::NoRecurse>(
2853 DepClassTy::NONE, IsKnownNoRecurse))
2855 return IsKnownNoRecurse;
2857 bool UsedAssumedInformation =
false;
2858 if (
A.checkForAllCallSites(CallSitePred, *
this,
true,
2859 UsedAssumedInformation)) {
2865 if (!UsedAssumedInformation)
2866 indicateOptimisticFixpoint();
2867 return ChangeStatus::UNCHANGED;
2872 DepClassTy::REQUIRED);
2873 if (EdgeReachability && EdgeReachability->
canReach(
A, *getAnchorScope()))
2874 return indicatePessimisticFixpoint();
2875 return ChangeStatus::UNCHANGED;
2882struct AANoRecurseCallSite final
2883 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl> {
2885 : AACalleeToCallSite<
AANoRecurse, AANoRecurseImpl>(IRP,
A) {}
2900 const std::string getAsStr(
Attributor *
A)
const override {
2901 return getAssumed() ?
"non-convergent" :
"may-be-convergent";
2905struct AANonConvergentFunction final : AANonConvergentImpl {
2907 : AANonConvergentImpl(IRP,
A) {}
2913 auto CalleeIsNotConvergent = [&](
Instruction &Inst) {
2914 CallBase &CB = cast<CallBase>(Inst);
2916 if (!Callee ||
Callee->isIntrinsic()) {
2919 if (
Callee->isDeclaration()) {
2920 return !
Callee->hasFnAttribute(Attribute::Convergent);
2927 bool UsedAssumedInformation =
false;
2928 if (!
A.checkForAllCallLikeInstructions(CalleeIsNotConvergent, *
this,
2929 UsedAssumedInformation)) {
2930 return indicatePessimisticFixpoint();
2932 return ChangeStatus::UNCHANGED;
2936 if (isKnownNotConvergent() &&
2937 A.hasAttr(getIRPosition(), Attribute::Convergent)) {
2938 A.removeAttrs(getIRPosition(), {Attribute::Convergent});
2939 return ChangeStatus::CHANGED;
2941 return ChangeStatus::UNCHANGED;
2958 const size_t UBPrevSize = KnownUBInsts.size();
2959 const size_t NoUBPrevSize = AssumedNoUBInsts.size();
2963 if (
I.isVolatile() &&
I.mayWriteToMemory())
2967 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2976 "Expected pointer operand of memory accessing instruction");
2980 std::optional<Value *> SimplifiedPtrOp =
2981 stopOnUndefOrAssumed(
A, PtrOp, &
I);
2982 if (!SimplifiedPtrOp || !*SimplifiedPtrOp)
2984 const Value *PtrOpVal = *SimplifiedPtrOp;
2989 if (!isa<ConstantPointerNull>(PtrOpVal)) {
2990 AssumedNoUBInsts.insert(&
I);
3002 AssumedNoUBInsts.insert(&
I);
3004 KnownUBInsts.insert(&
I);
3013 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3017 auto *BrInst = cast<BranchInst>(&
I);
3020 if (BrInst->isUnconditional())
3025 std::optional<Value *> SimplifiedCond =
3026 stopOnUndefOrAssumed(
A, BrInst->getCondition(), BrInst);
3027 if (!SimplifiedCond || !*SimplifiedCond)
3029 AssumedNoUBInsts.insert(&
I);
3037 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3046 for (
unsigned idx = 0; idx < CB.
arg_size(); idx++) {
3052 if (idx >=
Callee->arg_size())
3064 bool IsKnownNoUndef;
3065 AA::hasAssumedIRAttr<Attribute::NoUndef>(
3066 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNoUndef);
3067 if (!IsKnownNoUndef)
3069 bool UsedAssumedInformation =
false;
3070 std::optional<Value *> SimplifiedVal =
3073 if (UsedAssumedInformation)
3075 if (SimplifiedVal && !*SimplifiedVal)
3077 if (!SimplifiedVal || isa<UndefValue>(**SimplifiedVal)) {
3078 KnownUBInsts.insert(&
I);
3082 !isa<ConstantPointerNull>(**SimplifiedVal))
3084 bool IsKnownNonNull;
3085 AA::hasAssumedIRAttr<Attribute::NonNull>(
3086 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNonNull);
3088 KnownUBInsts.insert(&
I);
3094 auto &RI = cast<ReturnInst>(
I);
3097 std::optional<Value *> SimplifiedRetValue =
3098 stopOnUndefOrAssumed(
A, RI.getReturnValue(), &
I);
3099 if (!SimplifiedRetValue || !*SimplifiedRetValue)
3116 if (isa<ConstantPointerNull>(*SimplifiedRetValue)) {
3117 bool IsKnownNonNull;
3118 AA::hasAssumedIRAttr<Attribute::NonNull>(
3122 KnownUBInsts.insert(&
I);
3128 bool UsedAssumedInformation =
false;
3129 A.checkForAllInstructions(InspectMemAccessInstForUB, *
this,
3130 {Instruction::Load, Instruction::Store,
3131 Instruction::AtomicCmpXchg,
3132 Instruction::AtomicRMW},
3133 UsedAssumedInformation,
3135 A.checkForAllInstructions(InspectBrInstForUB, *
this, {Instruction::Br},
3136 UsedAssumedInformation,
3138 A.checkForAllCallLikeInstructions(InspectCallSiteForUB, *
this,
3139 UsedAssumedInformation);
3143 if (!getAnchorScope()->getReturnType()->isVoidTy()) {
3145 if (!
A.isAssumedDead(ReturnIRP,
this,
nullptr, UsedAssumedInformation)) {
3146 bool IsKnownNoUndef;
3147 AA::hasAssumedIRAttr<Attribute::NoUndef>(
3148 A,
this, ReturnIRP, DepClassTy::NONE, IsKnownNoUndef);
3150 A.checkForAllInstructions(InspectReturnInstForUB, *
this,
3151 {Instruction::Ret}, UsedAssumedInformation,
3156 if (NoUBPrevSize != AssumedNoUBInsts.size() ||
3157 UBPrevSize != KnownUBInsts.size())
3158 return ChangeStatus::CHANGED;
3159 return ChangeStatus::UNCHANGED;
3163 return KnownUBInsts.count(
I);
3166 bool isAssumedToCauseUB(
Instruction *
I)
const override {
3173 switch (
I->getOpcode()) {
3174 case Instruction::Load:
3175 case Instruction::Store:
3176 case Instruction::AtomicCmpXchg:
3177 case Instruction::AtomicRMW:
3178 return !AssumedNoUBInsts.count(
I);
3179 case Instruction::Br: {
3180 auto *BrInst = cast<BranchInst>(
I);
3181 if (BrInst->isUnconditional())
3183 return !AssumedNoUBInsts.count(
I);
3192 if (KnownUBInsts.empty())
3193 return ChangeStatus::UNCHANGED;
3195 A.changeToUnreachableAfterManifest(
I);
3196 return ChangeStatus::CHANGED;
3200 const std::string getAsStr(
Attributor *
A)
const override {
3201 return getAssumed() ?
"undefined-behavior" :
"no-ub";
3246 bool UsedAssumedInformation =
false;
3247 std::optional<Value *> SimplifiedV =
3250 if (!UsedAssumedInformation) {
3255 KnownUBInsts.insert(
I);
3256 return std::nullopt;
3262 if (isa<UndefValue>(V)) {
3263 KnownUBInsts.insert(
I);
3264 return std::nullopt;
3270struct AAUndefinedBehaviorFunction final : AAUndefinedBehaviorImpl {
3272 : AAUndefinedBehaviorImpl(IRP,
A) {}
3275 void trackStatistics()
const override {
3277 "Number of instructions known to have UB");
3279 KnownUBInsts.size();
3300 if (SCCI.hasCycle())
3310 for (
auto *L : LI->getLoopsInPreorder()) {
3324 assert(!AA::hasAssumedIRAttr<Attribute::WillReturn>(
3325 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
3330 bool isImpliedByMustprogressAndReadonly(
Attributor &
A,
bool KnownOnly) {
3331 if (!
A.hasAttr(getIRPosition(), {Attribute::MustProgress}))
3336 return IsKnown || !KnownOnly;
3342 if (isImpliedByMustprogressAndReadonly(
A,
false))
3343 return ChangeStatus::UNCHANGED;
3348 if (AA::hasAssumedIRAttr<Attribute::WillReturn>(
3349 A,
this, IPos, DepClassTy::REQUIRED, IsKnown)) {
3355 bool IsKnownNoRecurse;
3356 return AA::hasAssumedIRAttr<Attribute::NoRecurse>(
3357 A,
this, IPos, DepClassTy::REQUIRED, IsKnownNoRecurse);
3360 bool UsedAssumedInformation =
false;
3361 if (!
A.checkForAllCallLikeInstructions(CheckForWillReturn, *
this,
3362 UsedAssumedInformation))
3363 return indicatePessimisticFixpoint();
3365 return ChangeStatus::UNCHANGED;
3369 const std::string getAsStr(
Attributor *
A)
const override {
3370 return getAssumed() ?
"willreturn" :
"may-noreturn";
3374struct AAWillReturnFunction final : AAWillReturnImpl {
3376 : AAWillReturnImpl(IRP,
A) {}
3380 AAWillReturnImpl::initialize(
A);
3383 assert(
F &&
"Did expect an anchor function");
3384 if (
F->isDeclaration() || mayContainUnboundedCycle(*
F,
A))
3385 indicatePessimisticFixpoint();
3393struct AAWillReturnCallSite final
3394 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl> {
3396 : AACalleeToCallSite<
AAWillReturn, AAWillReturnImpl>(IRP,
A) {}
3400 if (isImpliedByMustprogressAndReadonly(
A,
false))
3401 return ChangeStatus::UNCHANGED;
3403 return AACalleeToCallSite::updateImpl(
A);
3425 const ToTy *To =
nullptr;
3435 assert(Hash == 0 &&
"Computed hash twice!");
3439 detail::combineHashValue(PairDMI ::getHashValue({
From, To}),
3440 InstSetDMI::getHashValue(ExclusionSet));
3450 :
From(&
From), To(&To), ExclusionSet(ES) {
3452 if (!ES || ES->
empty()) {
3453 ExclusionSet =
nullptr;
3454 }
else if (MakeUnique) {
3455 ExclusionSet =
A.getInfoCache().getOrCreateUniqueBlockExecutionSet(ES);
3460 :
From(RQI.
From), To(RQI.To), ExclusionSet(RQI.ExclusionSet) {}
3473 return &TombstoneKey;
3480 if (!PairDMI::isEqual({
LHS->From,
LHS->To}, {
RHS->From,
RHS->To}))
3482 return InstSetDMI::isEqual(
LHS->ExclusionSet,
RHS->ExclusionSet);
3486#define DefineKeys(ToTy) \
3488 ReachabilityQueryInfo<ToTy> \
3489 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::EmptyKey = \
3490 ReachabilityQueryInfo<ToTy>( \
3491 DenseMapInfo<const Instruction *>::getEmptyKey(), \
3492 DenseMapInfo<const ToTy *>::getEmptyKey()); \
3494 ReachabilityQueryInfo<ToTy> \
3495 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::TombstoneKey = \
3496 ReachabilityQueryInfo<ToTy>( \
3497 DenseMapInfo<const Instruction *>::getTombstoneKey(), \
3498 DenseMapInfo<const ToTy *>::getTombstoneKey());
3507template <
typename BaseTy,
typename ToTy>
3508struct CachedReachabilityAA :
public BaseTy {
3514 bool isQueryAA()
const override {
return true; }
3519 for (
unsigned u = 0,
e = QueryVector.size();
u <
e; ++
u) {
3520 RQITy *RQI = QueryVector[
u];
3521 if (RQI->Result == RQITy::Reachable::No &&
3523 Changed = ChangeStatus::CHANGED;
3529 bool IsTemporaryRQI) = 0;
3532 RQITy &RQI,
bool UsedExclusionSet,
bool IsTemporaryRQI) {
3537 QueryCache.erase(&RQI);
3543 if (
Result == RQITy::Reachable::Yes || !UsedExclusionSet) {
3544 RQITy PlainRQI(RQI.From, RQI.To);
3545 if (!QueryCache.count(&PlainRQI)) {
3546 RQITy *RQIPtr =
new (
A.Allocator) RQITy(RQI.From, RQI.To);
3548 QueryVector.push_back(RQIPtr);
3549 QueryCache.insert(RQIPtr);
3554 if (IsTemporaryRQI &&
Result != RQITy::Reachable::Yes && UsedExclusionSet) {
3555 assert((!RQI.ExclusionSet || !RQI.ExclusionSet->empty()) &&
3556 "Did not expect empty set!");
3557 RQITy *RQIPtr =
new (
A.Allocator)
3558 RQITy(
A, *RQI.From, *RQI.To, RQI.ExclusionSet,
true);
3559 assert(RQIPtr->Result == RQITy::Reachable::No &&
"Already reachable?");
3561 assert(!QueryCache.count(RQIPtr));
3562 QueryVector.push_back(RQIPtr);
3563 QueryCache.insert(RQIPtr);
3566 if (
Result == RQITy::Reachable::No && IsTemporaryRQI)
3567 A.registerForUpdate(*
this);
3568 return Result == RQITy::Reachable::Yes;
3571 const std::string getAsStr(
Attributor *
A)
const override {
3573 return "#queries(" + std::to_string(QueryVector.size()) +
")";
3576 bool checkQueryCache(
Attributor &
A, RQITy &StackRQI,
3577 typename RQITy::Reachable &
Result) {
3578 if (!this->getState().isValidState()) {
3579 Result = RQITy::Reachable::Yes;
3585 if (StackRQI.ExclusionSet) {
3586 RQITy PlainRQI(StackRQI.From, StackRQI.To);
3587 auto It = QueryCache.find(&PlainRQI);
3588 if (It != QueryCache.end() && (*It)->Result == RQITy::Reachable::No) {
3589 Result = RQITy::Reachable::No;
3594 auto It = QueryCache.find(&StackRQI);
3595 if (It != QueryCache.end()) {
3602 QueryCache.insert(&StackRQI);
3611struct AAIntraFnReachabilityFunction final
3612 :
public CachedReachabilityAA<AAIntraFnReachability, Instruction> {
3613 using Base = CachedReachabilityAA<AAIntraFnReachability, Instruction>;
3620 bool isAssumedReachable(
3623 auto *NonConstThis =
const_cast<AAIntraFnReachabilityFunction *
>(
this);
3627 RQITy StackRQI(
A,
From, To, ExclusionSet,
false);
3628 typename RQITy::Reachable
Result;
3629 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
3630 return NonConstThis->isReachableImpl(
A, StackRQI,
3632 return Result == RQITy::Reachable::Yes;
3639 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3642 [&](
const auto &DeadEdge) {
3643 return LivenessAA->isEdgeDead(DeadEdge.first,
3647 return LivenessAA->isAssumedDead(BB);
3649 return ChangeStatus::UNCHANGED;
3653 return Base::updateImpl(
A);
3657 bool IsTemporaryRQI)
override {
3659 bool UsedExclusionSet =
false;
3664 while (IP && IP != &To) {
3665 if (ExclusionSet && IP != Origin && ExclusionSet->
count(IP)) {
3666 UsedExclusionSet =
true;
3677 "Not an intra-procedural query!");
3681 if (FromBB == ToBB &&
3682 WillReachInBlock(*RQI.From, *RQI.To, RQI.ExclusionSet))
3683 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3688 if (!WillReachInBlock(ToBB->
front(), *RQI.To, RQI.ExclusionSet))
3689 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3694 if (RQI.ExclusionSet)
3695 for (
auto *
I : *RQI.ExclusionSet)
3696 if (
I->getFunction() == Fn)
3697 ExclusionBlocks.
insert(
I->getParent());
3700 if (ExclusionBlocks.
count(FromBB) &&
3703 return rememberResult(
A, RQITy::Reachable::No, RQI,
true, IsTemporaryRQI);
3706 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3707 if (LivenessAA && LivenessAA->isAssumedDead(ToBB)) {
3708 DeadBlocks.insert(ToBB);
3709 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3718 while (!Worklist.
empty()) {
3720 if (!Visited.
insert(BB).second)
3723 if (LivenessAA && LivenessAA->isEdgeDead(BB, SuccBB)) {
3724 LocalDeadEdges.
insert({BB, SuccBB});
3729 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3732 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3735 if (ExclusionBlocks.
count(SuccBB)) {
3736 UsedExclusionSet =
true;
3743 DeadEdges.insert(LocalDeadEdges.
begin(), LocalDeadEdges.
end());
3744 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3749 void trackStatistics()
const override {}
3769 bool IgnoreSubsumingPositions) {
3770 assert(ImpliedAttributeKind == Attribute::NoAlias &&
3771 "Unexpected attribute kind");
3774 if (isa<AllocaInst>(Val))
3777 IgnoreSubsumingPositions =
true;
3780 if (isa<UndefValue>(Val))
3783 if (isa<ConstantPointerNull>(Val) &&
3788 if (
A.hasAttr(IRP, {Attribute::ByVal, Attribute::NoAlias},
3789 IgnoreSubsumingPositions, Attribute::NoAlias))
3799 "Noalias is a pointer attribute");
3802 const std::string getAsStr(
Attributor *
A)
const override {
3803 return getAssumed() ?
"noalias" :
"may-alias";
3808struct AANoAliasFloating final : AANoAliasImpl {
3810 : AANoAliasImpl(IRP,
A) {}
3815 return indicatePessimisticFixpoint();
3819 void trackStatistics()
const override {
3825struct AANoAliasArgument final
3826 : AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl> {
3827 using Base = AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl>;
3839 if (AA::hasAssumedIRAttr<Attribute::NoSync>(
3841 DepClassTy::OPTIONAL, IsKnownNoSycn))
3842 return Base::updateImpl(
A);
3847 return Base::updateImpl(
A);
3851 bool UsedAssumedInformation =
false;
3852 if (
A.checkForAllCallSites(
3854 true, UsedAssumedInformation))
3855 return Base::updateImpl(
A);
3863 return indicatePessimisticFixpoint();
3870struct AANoAliasCallSiteArgument final : AANoAliasImpl {
3872 : AANoAliasImpl(IRP,
A) {}
3878 const CallBase &CB,
unsigned OtherArgNo) {
3880 if (this->getCalleeArgNo() == (
int)OtherArgNo)
3892 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadNone()) {
3893 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3900 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadOnly() &&
3902 A.recordDependence(MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3903 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3909 AAR =
A.getInfoCache().getAnalysisResultForFunction<
AAManager>(
3913 bool IsAliasing = !AAR || !AAR->
isNoAlias(&getAssociatedValue(), ArgOp);
3915 "callsite arguments: "
3916 << getAssociatedValue() <<
" " << *ArgOp <<
" => "
3917 << (IsAliasing ?
"" :
"no-") <<
"alias \n");
3922 bool isKnownNoAliasDueToNoAliasPreservation(
3942 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
3953 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
3958 bool IsKnownNoCapture;
3959 if (AA::hasAssumedIRAttr<Attribute::NoCapture>(
3961 DepClassTy::OPTIONAL, IsKnownNoCapture))
3967 A, *UserI, *getCtxI(), *
this,
nullptr,
3968 [ScopeFn](
const Function &Fn) {
return &Fn != ScopeFn; }))
3976 case UseCaptureKind::NO_CAPTURE:
3978 case UseCaptureKind::MAY_CAPTURE:
3982 case UseCaptureKind::PASSTHROUGH:
3989 bool IsKnownNoCapture;
3991 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
3992 A,
this, VIRP, DepClassTy::NONE, IsKnownNoCapture,
false, &NoCaptureAA);
3993 if (!IsAssumedNoCapture &&
3995 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue())) {
3997 dbgs() <<
"[AANoAliasCSArg] " << getAssociatedValue()
3998 <<
" cannot be noalias as it is potentially captured\n");
4003 A.recordDependence(*NoCaptureAA, *
this, DepClassTy::OPTIONAL);
4008 const auto &CB = cast<CallBase>(getAnchorValue());
4009 for (
unsigned OtherArgNo = 0; OtherArgNo < CB.
arg_size(); OtherArgNo++)
4010 if (mayAliasWithArgument(
A, AAR, MemBehaviorAA, CB, OtherArgNo))
4020 auto *MemBehaviorAA =
4023 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
4024 return ChangeStatus::UNCHANGED;
4027 bool IsKnownNoAlias;
4029 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
4030 A,
this, VIRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
4032 <<
" is not no-alias at the definition\n");
4033 return indicatePessimisticFixpoint();
4037 if (MemBehaviorAA &&
4038 isKnownNoAliasDueToNoAliasPreservation(
A, AAR, *MemBehaviorAA)) {
4040 dbgs() <<
"[AANoAlias] No-Alias deduced via no-alias preservation\n");
4041 return ChangeStatus::UNCHANGED;
4044 return indicatePessimisticFixpoint();
4052struct AANoAliasReturned final : AANoAliasImpl {
4054 : AANoAliasImpl(IRP,
A) {}
4059 auto CheckReturnValue = [&](
Value &RV) ->
bool {
4060 if (
Constant *
C = dyn_cast<Constant>(&RV))
4061 if (
C->isNullValue() || isa<UndefValue>(
C))
4066 if (!isa<CallBase>(&RV))
4070 bool IsKnownNoAlias;
4071 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
4072 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoAlias))
4075 bool IsKnownNoCapture;
4077 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
4078 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
4080 return IsAssumedNoCapture ||
4084 if (!
A.checkForAllReturnedValues(CheckReturnValue, *
this))
4085 return indicatePessimisticFixpoint();
4087 return ChangeStatus::UNCHANGED;
4095struct AANoAliasCallSiteReturned final
4096 : AACalleeToCallSite<AANoAlias, AANoAliasImpl> {
4098 : AACalleeToCallSite<
AANoAlias, AANoAliasImpl>(IRP,
A) {}
4108struct AAIsDeadValueImpl :
public AAIsDead {
4112 bool isAssumedDead()
const override {
return isAssumed(IS_DEAD); }
4115 bool isKnownDead()
const override {
return isKnown(IS_DEAD); }
4118 bool isAssumedDead(
const BasicBlock *BB)
const override {
return false; }
4121 bool isKnownDead(
const BasicBlock *BB)
const override {
return false; }
4124 bool isAssumedDead(
const Instruction *
I)
const override {
4125 return I == getCtxI() && isAssumedDead();
4129 bool isKnownDead(
const Instruction *
I)
const override {
4130 return isAssumedDead(
I) && isKnownDead();
4134 const std::string getAsStr(
Attributor *
A)
const override {
4135 return isAssumedDead() ?
"assumed-dead" :
"assumed-live";
4141 if (
V.getType()->isVoidTy() ||
V.use_empty())
4145 if (!isa<Constant>(V)) {
4146 if (
auto *
I = dyn_cast<Instruction>(&V))
4147 if (!
A.isRunOn(*
I->getFunction()))
4149 bool UsedAssumedInformation =
false;
4150 std::optional<Constant *>
C =
4151 A.getAssumedConstant(V, *
this, UsedAssumedInformation);
4156 auto UsePred = [&](
const Use &
U,
bool &Follow) {
return false; };
4161 return A.checkForAllUses(UsePred, *
this, V,
false,
4162 DepClassTy::REQUIRED,
4171 auto *CB = dyn_cast<CallBase>(
I);
4172 if (!CB || isa<IntrinsicInst>(CB))
4177 bool IsKnownNoUnwind;
4178 if (!AA::hasAssumedIRAttr<Attribute::NoUnwind>(
4179 A,
this, CallIRP, DepClassTy::OPTIONAL, IsKnownNoUnwind))
4187struct AAIsDeadFloating :
public AAIsDeadValueImpl {
4189 : AAIsDeadValueImpl(IRP,
A) {}
4193 AAIsDeadValueImpl::initialize(
A);
4195 if (isa<UndefValue>(getAssociatedValue())) {
4196 indicatePessimisticFixpoint();
4200 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4201 if (!isAssumedSideEffectFree(
A,
I)) {
4202 if (!isa_and_nonnull<StoreInst>(
I) && !isa_and_nonnull<FenceInst>(
I))
4203 indicatePessimisticFixpoint();
4205 removeAssumedBits(HAS_NO_EFFECT);
4212 if (!ExecDomainAA || !ExecDomainAA->isNoOpFence(FI))
4214 A.recordDependence(*ExecDomainAA, *
this, DepClassTy::OPTIONAL);
4221 if (
SI.isVolatile())
4227 bool UsedAssumedInformation =
false;
4228 if (!AssumeOnlyInst) {
4229 PotentialCopies.clear();
4231 UsedAssumedInformation)) {
4234 <<
"[AAIsDead] Could not determine potential copies of store!\n");
4238 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Store has " << PotentialCopies.size()
4239 <<
" potential copies.\n");
4244 UsedAssumedInformation))
4246 if (
auto *LI = dyn_cast<LoadInst>(V)) {
4248 auto &UserI = cast<Instruction>(*U.getUser());
4249 if (InfoCache.isOnlyUsedByAssume(UserI)) {
4251 AssumeOnlyInst->insert(&UserI);
4254 return A.isAssumedDead(U,
this,
nullptr, UsedAssumedInformation);
4260 <<
" is assumed live!\n");
4266 const std::string getAsStr(
Attributor *
A)
const override {
4267 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4268 if (isa_and_nonnull<StoreInst>(
I))
4270 return "assumed-dead-store";
4271 if (isa_and_nonnull<FenceInst>(
I))
4273 return "assumed-dead-fence";
4274 return AAIsDeadValueImpl::getAsStr(
A);
4279 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4280 if (
auto *SI = dyn_cast_or_null<StoreInst>(
I)) {
4281 if (!isDeadStore(
A, *SI))
4282 return indicatePessimisticFixpoint();
4283 }
else if (
auto *FI = dyn_cast_or_null<FenceInst>(
I)) {
4284 if (!isDeadFence(
A, *FI))
4285 return indicatePessimisticFixpoint();
4287 if (!isAssumedSideEffectFree(
A,
I))
4288 return indicatePessimisticFixpoint();
4289 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4290 return indicatePessimisticFixpoint();
4295 bool isRemovableStore()
const override {
4296 return isAssumed(IS_REMOVABLE) && isa<StoreInst>(&getAssociatedValue());
4301 Value &
V = getAssociatedValue();
4302 if (
auto *
I = dyn_cast<Instruction>(&V)) {
4307 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
4309 bool IsDead = isDeadStore(
A, *SI, &AssumeOnlyInst);
4312 A.deleteAfterManifest(*
I);
4313 for (
size_t i = 0; i < AssumeOnlyInst.
size(); ++i) {
4315 for (
auto *Usr : AOI->
users())
4316 AssumeOnlyInst.
insert(cast<Instruction>(Usr));
4317 A.deleteAfterManifest(*AOI);
4321 if (
auto *FI = dyn_cast<FenceInst>(
I)) {
4323 A.deleteAfterManifest(*FI);
4326 if (isAssumedSideEffectFree(
A,
I) && !isa<InvokeInst>(
I)) {
4327 A.deleteAfterManifest(*
I);
4335 void trackStatistics()
const override {
4344struct AAIsDeadArgument :
public AAIsDeadFloating {
4346 : AAIsDeadFloating(IRP,
A) {}
4350 Argument &Arg = *getAssociatedArgument();
4351 if (
A.isValidFunctionSignatureRewrite(Arg, {}))
4352 if (
A.registerFunctionSignatureRewrite(
4356 return ChangeStatus::CHANGED;
4358 return ChangeStatus::UNCHANGED;
4365struct AAIsDeadCallSiteArgument :
public AAIsDeadValueImpl {
4367 : AAIsDeadValueImpl(IRP,
A) {}
4371 AAIsDeadValueImpl::initialize(
A);
4372 if (isa<UndefValue>(getAssociatedValue()))
4373 indicatePessimisticFixpoint();
4382 Argument *Arg = getAssociatedArgument();
4384 return indicatePessimisticFixpoint();
4386 auto *ArgAA =
A.getAAFor<
AAIsDead>(*
this, ArgPos, DepClassTy::REQUIRED);
4388 return indicatePessimisticFixpoint();
4394 CallBase &CB = cast<CallBase>(getAnchorValue());
4396 assert(!isa<UndefValue>(
U.get()) &&
4397 "Expected undef values to be filtered out!");
4399 if (
A.changeUseAfterManifest(U, UV))
4400 return ChangeStatus::CHANGED;
4401 return ChangeStatus::UNCHANGED;
4408struct AAIsDeadCallSiteReturned :
public AAIsDeadFloating {
4410 : AAIsDeadFloating(IRP,
A) {}
4413 bool isAssumedDead()
const override {
4414 return AAIsDeadFloating::isAssumedDead() && IsAssumedSideEffectFree;
4419 AAIsDeadFloating::initialize(
A);
4420 if (isa<UndefValue>(getAssociatedValue())) {
4421 indicatePessimisticFixpoint();
4426 IsAssumedSideEffectFree = isAssumedSideEffectFree(
A, getCtxI());
4432 if (IsAssumedSideEffectFree && !isAssumedSideEffectFree(
A, getCtxI())) {
4433 IsAssumedSideEffectFree =
false;
4434 Changed = ChangeStatus::CHANGED;
4436 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4437 return indicatePessimisticFixpoint();
4442 void trackStatistics()
const override {
4443 if (IsAssumedSideEffectFree)
4450 const std::string getAsStr(
Attributor *
A)
const override {
4451 return isAssumedDead()
4453 : (getAssumed() ?
"assumed-dead-users" :
"assumed-live");
4457 bool IsAssumedSideEffectFree =
true;
4460struct AAIsDeadReturned :
public AAIsDeadValueImpl {
4462 : AAIsDeadValueImpl(IRP,
A) {}
4467 bool UsedAssumedInformation =
false;
4468 A.checkForAllInstructions([](
Instruction &) {
return true; }, *
this,
4469 {Instruction::Ret}, UsedAssumedInformation);
4472 if (ACS.isCallbackCall() || !ACS.getInstruction())
4474 return areAllUsesAssumedDead(
A, *ACS.getInstruction());
4477 if (!
A.checkForAllCallSites(PredForCallSite, *
this,
true,
4478 UsedAssumedInformation))
4479 return indicatePessimisticFixpoint();
4481 return ChangeStatus::UNCHANGED;
4487 bool AnyChange =
false;
4495 bool UsedAssumedInformation =
false;
4496 A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
4497 UsedAssumedInformation);
4498 return AnyChange ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
4505struct AAIsDeadFunction :
public AAIsDead {
4511 assert(
F &&
"Did expect an anchor function");
4512 if (!isAssumedDeadInternalFunction(
A)) {
4513 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4514 assumeLive(
A,
F->getEntryBlock());
4518 bool isAssumedDeadInternalFunction(
Attributor &
A) {
4519 if (!getAnchorScope()->hasLocalLinkage())
4521 bool UsedAssumedInformation =
false;
4523 true, UsedAssumedInformation);
4527 const std::string getAsStr(
Attributor *
A)
const override {
4528 return "Live[#BB " + std::to_string(AssumedLiveBlocks.size()) +
"/" +
4529 std::to_string(getAnchorScope()->
size()) +
"][#TBEP " +
4530 std::to_string(ToBeExploredFrom.size()) +
"][#KDE " +
4531 std::to_string(KnownDeadEnds.size()) +
"]";
4536 assert(getState().isValidState() &&
4537 "Attempted to manifest an invalid state!");
4542 if (AssumedLiveBlocks.empty()) {
4543 A.deleteAfterManifest(
F);
4544 return ChangeStatus::CHANGED;
4550 bool Invoke2CallAllowed = !mayCatchAsynchronousExceptions(
F);
4552 KnownDeadEnds.set_union(ToBeExploredFrom);
4553 for (
const Instruction *DeadEndI : KnownDeadEnds) {
4554 auto *CB = dyn_cast<CallBase>(DeadEndI);
4557 bool IsKnownNoReturn;
4558 bool MayReturn = !AA::hasAssumedIRAttr<Attribute::NoReturn>(
4561 if (MayReturn && (!Invoke2CallAllowed || !isa<InvokeInst>(CB)))
4564 if (
auto *
II = dyn_cast<InvokeInst>(DeadEndI))
4565 A.registerInvokeWithDeadSuccessor(
const_cast<InvokeInst &
>(*
II));
4567 A.changeToUnreachableAfterManifest(
4568 const_cast<Instruction *
>(DeadEndI->getNextNode()));
4569 HasChanged = ChangeStatus::CHANGED;
4574 if (!AssumedLiveBlocks.count(&BB)) {
4575 A.deleteAfterManifest(BB);
4577 HasChanged = ChangeStatus::CHANGED;
4587 assert(
From->getParent() == getAnchorScope() &&
4589 "Used AAIsDead of the wrong function");
4590 return isValidState() && !AssumedLiveEdges.count(std::make_pair(
From, To));
4594 void trackStatistics()
const override {}
4597 bool isAssumedDead()
const override {
return false; }
4600 bool isKnownDead()
const override {
return false; }
4603 bool isAssumedDead(
const BasicBlock *BB)
const override {
4605 "BB must be in the same anchor scope function.");
4609 return !AssumedLiveBlocks.count(BB);
4613 bool isKnownDead(
const BasicBlock *BB)
const override {
4614 return getKnown() && isAssumedDead(BB);
4618 bool isAssumedDead(
const Instruction *
I)
const override {
4619 assert(
I->getParent()->getParent() == getAnchorScope() &&
4620 "Instruction must be in the same anchor scope function.");
4627 if (!AssumedLiveBlocks.count(
I->getParent()))
4633 if (KnownDeadEnds.count(PrevI) || ToBeExploredFrom.count(PrevI))
4641 bool isKnownDead(
const Instruction *
I)
const override {
4642 return getKnown() && isAssumedDead(
I);
4648 if (!AssumedLiveBlocks.insert(&BB).second)
4656 if (
const auto *CB = dyn_cast<CallBase>(&
I))
4658 if (
F->hasLocalLinkage())
4659 A.markLiveInternalFunction(*
F);
4683 bool IsKnownNoReturn;
4684 if (AA::hasAssumedIRAttr<Attribute::NoReturn>(
4686 return !IsKnownNoReturn;
4698 bool UsedAssumedInformation =
4699 identifyAliveSuccessors(
A, cast<CallBase>(
II), AA, AliveSuccessors);
4704 if (AAIsDeadFunction::mayCatchAsynchronousExceptions(*
II.getFunction())) {
4705 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4709 bool IsKnownNoUnwind;
4710 if (AA::hasAssumedIRAttr<Attribute::NoUnwind>(
4712 UsedAssumedInformation |= !IsKnownNoUnwind;
4714 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4717 return UsedAssumedInformation;
4724 bool UsedAssumedInformation =
false;
4728 std::optional<Constant *>
C =
4729 A.getAssumedConstant(*BI.
getCondition(), AA, UsedAssumedInformation);
4730 if (!
C || isa_and_nonnull<UndefValue>(*
C)) {
4732 }
else if (isa_and_nonnull<ConstantInt>(*
C)) {
4734 BI.
getSuccessor(1 - cast<ConstantInt>(*C)->getValue().getZExtValue());
4739 UsedAssumedInformation =
false;
4742 return UsedAssumedInformation;
4749 bool UsedAssumedInformation =
false;
4753 UsedAssumedInformation)) {
4760 if (Values.
empty() ||
4761 (Values.
size() == 1 &&
4762 isa_and_nonnull<UndefValue>(Values.
front().getValue()))) {
4764 return UsedAssumedInformation;
4767 Type &Ty = *
SI.getCondition()->getType();
4769 auto CheckForConstantInt = [&](
Value *
V) {
4770 if (
auto *CI = dyn_cast_if_present<ConstantInt>(
AA::getWithType(*V, Ty))) {
4778 return CheckForConstantInt(
VAC.getValue());
4782 return UsedAssumedInformation;
4785 unsigned MatchedCases = 0;
4786 for (
const auto &CaseIt :
SI.cases()) {
4787 if (
Constants.count(CaseIt.getCaseValue())) {
4789 AliveSuccessors.
push_back(&CaseIt.getCaseSuccessor()->front());
4796 AliveSuccessors.
push_back(&
SI.getDefaultDest()->front());
4797 return UsedAssumedInformation;
4803 if (AssumedLiveBlocks.empty()) {
4804 if (isAssumedDeadInternalFunction(
A))
4808 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4809 assumeLive(
A,
F->getEntryBlock());
4813 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Live [" << AssumedLiveBlocks.size() <<
"/"
4814 << getAnchorScope()->
size() <<
"] BBs and "
4815 << ToBeExploredFrom.size() <<
" exploration points and "
4816 << KnownDeadEnds.size() <<
" known dead ends\n");
4821 ToBeExploredFrom.end());
4822 decltype(ToBeExploredFrom) NewToBeExploredFrom;
4825 while (!Worklist.
empty()) {
4831 while (!
I->isTerminator() && !isa<CallBase>(
I))
4832 I =
I->getNextNode();
4834 AliveSuccessors.
clear();
4836 bool UsedAssumedInformation =
false;
4837 switch (
I->getOpcode()) {
4841 "Expected non-terminators to be handled already!");
4845 case Instruction::Call:
4846 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<CallInst>(*
I),
4847 *
this, AliveSuccessors);
4849 case Instruction::Invoke:
4850 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<InvokeInst>(*
I),
4851 *
this, AliveSuccessors);
4853 case Instruction::Br:
4854 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<BranchInst>(*
I),
4855 *
this, AliveSuccessors);
4857 case Instruction::Switch:
4858 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<SwitchInst>(*
I),
4859 *
this, AliveSuccessors);
4863 if (UsedAssumedInformation) {
4864 NewToBeExploredFrom.insert(
I);
4865 }
else if (AliveSuccessors.
empty() ||
4866 (
I->isTerminator() &&
4867 AliveSuccessors.
size() <
I->getNumSuccessors())) {
4868 if (KnownDeadEnds.insert(
I))
4873 << AliveSuccessors.
size() <<
" UsedAssumedInformation: "
4874 << UsedAssumedInformation <<
"\n");
4876 for (
const Instruction *AliveSuccessor : AliveSuccessors) {
4877 if (!
I->isTerminator()) {
4878 assert(AliveSuccessors.size() == 1 &&
4879 "Non-terminator expected to have a single successor!");
4883 auto Edge = std::make_pair(
I->getParent(), AliveSuccessor->getParent());
4884 if (AssumedLiveEdges.insert(Edge).second)
4886 if (assumeLive(
A, *AliveSuccessor->getParent()))
4893 if (NewToBeExploredFrom.size() != ToBeExploredFrom.size() ||
4895 return !ToBeExploredFrom.count(I);
4898 ToBeExploredFrom = std::move(NewToBeExploredFrom);
4907 if (ToBeExploredFrom.empty() &&
4908 getAnchorScope()->
size() == AssumedLiveBlocks.size() &&
4910 return DeadEndI->isTerminator() && DeadEndI->getNumSuccessors() == 0;
4912 return indicatePessimisticFixpoint();
4917struct AAIsDeadCallSite final : AAIsDeadFunction {
4919 : AAIsDeadFunction(IRP,
A) {}
4928 "supported for call sites yet!");
4933 return indicatePessimisticFixpoint();
4937 void trackStatistics()
const override {}
4951 Value &
V = *getAssociatedValue().stripPointerCasts();
4953 A.getAttrs(getIRPosition(),
4954 {Attribute::Dereferenceable, Attribute::DereferenceableOrNull},
4957 takeKnownDerefBytesMaximum(Attr.getValueAsInt());
4960 bool IsKnownNonNull;
4961 AA::hasAssumedIRAttr<Attribute::NonNull>(
4962 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNonNull);
4964 bool CanBeNull, CanBeFreed;
4965 takeKnownDerefBytesMaximum(
V.getPointerDereferenceableBytes(
4966 A.getDataLayout(), CanBeNull, CanBeFreed));
4969 followUsesInMBEC(*
this,
A, getState(), *CtxI);
4974 StateType &getState()
override {
return *
this; }
4975 const StateType &getState()
const override {
return *
this; }
4981 const Value *UseV =
U->get();