55#include "llvm/IR/IntrinsicsAMDGPU.h"
56#include "llvm/IR/IntrinsicsNVPTX.h"
81#define DEBUG_TYPE "attributor"
85 cl::desc(
"Manifest Attributor internal string attributes."),
98 cl::desc(
"Maximum number of potential values to be "
99 "tracked for each position."),
104 "attributor-max-potential-values-iterations",
cl::Hidden,
106 "Maximum number of iterations we keep dismantling potential values."),
109STATISTIC(NumAAs,
"Number of abstract attributes created");
124#define BUILD_STAT_MSG_IR_ATTR(TYPE, NAME) \
125 ("Number of " #TYPE " marked '" #NAME "'")
126#define BUILD_STAT_NAME(NAME, TYPE) NumIR##TYPE##_##NAME
127#define STATS_DECL_(NAME, MSG) STATISTIC(NAME, MSG);
128#define STATS_DECL(NAME, TYPE, MSG) \
129 STATS_DECL_(BUILD_STAT_NAME(NAME, TYPE), MSG);
130#define STATS_TRACK(NAME, TYPE) ++(BUILD_STAT_NAME(NAME, TYPE));
131#define STATS_DECLTRACK(NAME, TYPE, MSG) \
133 STATS_DECL(NAME, TYPE, MSG) \
134 STATS_TRACK(NAME, TYPE) \
136#define STATS_DECLTRACK_ARG_ATTR(NAME) \
137 STATS_DECLTRACK(NAME, Arguments, BUILD_STAT_MSG_IR_ATTR(arguments, NAME))
138#define STATS_DECLTRACK_CSARG_ATTR(NAME) \
139 STATS_DECLTRACK(NAME, CSArguments, \
140 BUILD_STAT_MSG_IR_ATTR(call site arguments, NAME))
141#define STATS_DECLTRACK_FN_ATTR(NAME) \
142 STATS_DECLTRACK(NAME, Function, BUILD_STAT_MSG_IR_ATTR(functions, NAME))
143#define STATS_DECLTRACK_CS_ATTR(NAME) \
144 STATS_DECLTRACK(NAME, CS, BUILD_STAT_MSG_IR_ATTR(call site, NAME))
145#define STATS_DECLTRACK_FNRET_ATTR(NAME) \
146 STATS_DECLTRACK(NAME, FunctionReturn, \
147 BUILD_STAT_MSG_IR_ATTR(function returns, NAME))
148#define STATS_DECLTRACK_CSRET_ATTR(NAME) \
149 STATS_DECLTRACK(NAME, CSReturn, \
150 BUILD_STAT_MSG_IR_ATTR(call site returns, NAME))
151#define STATS_DECLTRACK_FLOATING_ATTR(NAME) \
152 STATS_DECLTRACK(NAME, Floating, \
153 ("Number of floating values known to be '" #NAME "'"))
158#define PIPE_OPERATOR(CLASS) \
159 raw_ostream &operator<<(raw_ostream &OS, const CLASS &AA) { \
160 return OS << static_cast<const AbstractAttribute &>(AA); \
215 bool HeaderOnly,
Cycle **CPtr =
nullptr) {
218 auto *BB =
I->getParent();
224 return !HeaderOnly || BB ==
C->getHeader();
235 if (
DL.getTypeSizeInBits(Ty) !=
DL.getTypeAllocSizeInBits(Ty))
240 if (
VectorType *SeqTy = dyn_cast<VectorType>(Ty))
244 if (
ArrayType *SeqTy = dyn_cast<ArrayType>(Ty))
247 if (!isa<StructType>(Ty))
260 StartPos +=
DL.getTypeAllocSizeInBits(ElTy);
270 bool AllowVolatile) {
271 if (!AllowVolatile &&
I->isVolatile())
274 if (
auto *LI = dyn_cast<LoadInst>(
I)) {
275 return LI->getPointerOperand();
278 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
279 return SI->getPointerOperand();
282 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(
I)) {
283 return CXI->getPointerOperand();
286 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(
I)) {
287 return RMWI->getPointerOperand();
309 bool GetMinOffset,
bool AllowNonInbounds,
310 bool UseAssumed =
false) {
312 auto AttributorAnalysis = [&](
Value &V,
APInt &ROffset) ->
bool {
317 UseAssumed ? DepClassTy::OPTIONAL
319 if (!ValueConstantRangeAA)
323 if (Range.isFullSet())
329 ROffset = Range.getSignedMin();
331 ROffset = Range.getSignedMax();
342 const Value *
Ptr, int64_t &BytesOffset,
344 APInt OffsetAPInt(
DL.getIndexTypeSizeInBits(
Ptr->getType()), 0);
347 true, AllowNonInbounds);
355template <
typename AAType,
typename StateType =
typename AAType::StateType,
357 bool RecurseForSelectAndPHI =
true>
359 Attributor &
A,
const AAType &QueryingAA, StateType &S,
361 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp return value states for "
362 << QueryingAA <<
" into " << S <<
"\n");
364 assert((QueryingAA.getIRPosition().getPositionKind() ==
366 QueryingAA.getIRPosition().getPositionKind() ==
368 "Can only clamp returned value states for a function returned or call "
369 "site returned position!");
373 std::optional<StateType>
T;
376 auto CheckReturnValue = [&](
Value &RV) ->
bool {
381 return AA::hasAssumedIRAttr<IRAttributeKind>(
382 A, &QueryingAA, RVPos, DepClassTy::REQUIRED, IsKnown);
386 A.getAAFor<AAType>(QueryingAA, RVPos, DepClassTy::REQUIRED);
390 <<
" AA: " << AA->getAsStr(&
A) <<
" @ " << RVPos <<
"\n");
391 const StateType &AAS = AA->getState();
393 T = StateType::getBestState(AAS);
395 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" RV State: " <<
T
397 return T->isValidState();
400 if (!
A.checkForAllReturnedValues(CheckReturnValue, QueryingAA,
401 AA::ValueScope::Intraprocedural,
402 RecurseForSelectAndPHI))
403 S.indicatePessimisticFixpoint();
410template <
typename AAType,
typename BaseType,
411 typename StateType =
typename BaseType::StateType,
412 bool PropagateCallBaseContext =
false,
414 bool RecurseForSelectAndPHI =
true>
415struct AAReturnedFromReturnedValues :
public BaseType {
421 StateType S(StateType::getBestState(this->getState()));
422 clampReturnedValueStates<AAType, StateType, IRAttributeKind, RecurseForSelectAndPHI>(
424 PropagateCallBaseContext ? this->getCallBaseContext() :
nullptr);
427 return clampStateAndIndicateChange<StateType>(this->getState(), S);
433template <
typename AAType,
typename StateType =
typename AAType::StateType,
435static void clampCallSiteArgumentStates(
Attributor &
A,
const AAType &QueryingAA,
437 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp call site argument states for "
438 << QueryingAA <<
" into " << S <<
"\n");
440 assert(QueryingAA.getIRPosition().getPositionKind() ==
442 "Can only clamp call site argument states for an argument position!");
446 std::optional<StateType>
T;
449 unsigned ArgNo = QueryingAA.getIRPosition().getCallSiteArgNo();
461 return AA::hasAssumedIRAttr<IRAttributeKind>(
462 A, &QueryingAA, ACSArgPos, DepClassTy::REQUIRED, IsKnown);
466 A.getAAFor<AAType>(QueryingAA, ACSArgPos, DepClassTy::REQUIRED);
469 LLVM_DEBUG(
dbgs() <<
"[Attributor] ACS: " << *ACS.getInstruction()
470 <<
" AA: " << AA->getAsStr(&
A) <<
" @" << ACSArgPos
472 const StateType &AAS = AA->getState();
474 T = StateType::getBestState(AAS);
476 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" CSA State: " <<
T
478 return T->isValidState();
481 bool UsedAssumedInformation =
false;
482 if (!
A.checkForAllCallSites(CallSiteCheck, QueryingAA,
true,
483 UsedAssumedInformation))
484 S.indicatePessimisticFixpoint();
491template <
typename AAType,
typename BaseType,
492 typename StateType =
typename AAType::StateType,
494bool getArgumentStateFromCallBaseContext(
Attributor &
A,
498 "Expected an 'argument' position !");
504 assert(ArgNo >= 0 &&
"Invalid Arg No!");
510 return AA::hasAssumedIRAttr<IRAttributeKind>(
511 A, &QueryingAttribute, CBArgPos, DepClassTy::REQUIRED, IsKnown);
515 A.getAAFor<AAType>(QueryingAttribute, CBArgPos, DepClassTy::REQUIRED);
518 const StateType &CBArgumentState =
519 static_cast<const StateType &
>(AA->getState());
521 LLVM_DEBUG(
dbgs() <<
"[Attributor] Briding Call site context to argument"
522 <<
"Position:" << Pos <<
"CB Arg state:" << CBArgumentState
526 State ^= CBArgumentState;
531template <
typename AAType,
typename BaseType,
532 typename StateType =
typename AAType::StateType,
533 bool BridgeCallBaseContext =
false,
535struct AAArgumentFromCallSiteArguments :
public BaseType {
541 StateType S = StateType::getBestState(this->getState());
543 if (BridgeCallBaseContext) {
545 getArgumentStateFromCallBaseContext<AAType,
BaseType, StateType,
547 A, *
this, this->getIRPosition(), S);
549 return clampStateAndIndicateChange<StateType>(this->getState(), S);
551 clampCallSiteArgumentStates<AAType, StateType, IRAttributeKind>(
A, *
this,
556 return clampStateAndIndicateChange<StateType>(this->getState(), S);
561template <
typename AAType,
typename BaseType,
562 typename StateType =
typename BaseType::StateType,
563 bool IntroduceCallBaseContext =
false,
565struct AACalleeToCallSite :
public BaseType {
570 auto IRPKind = this->getIRPosition().getPositionKind();
573 "Can only wrap function returned positions for call site "
574 "returned positions!");
575 auto &S = this->getState();
577 CallBase &CB = cast<CallBase>(this->getAnchorValue());
578 if (IntroduceCallBaseContext)
579 LLVM_DEBUG(
dbgs() <<
"[Attributor] Introducing call base context:" << CB
584 for (
const Function *Callee : Callees) {
588 IntroduceCallBaseContext ? &CB :
nullptr)
590 *
Callee, IntroduceCallBaseContext ? &CB : nullptr);
594 if (!AA::hasAssumedIRAttr<IRAttributeKind>(
595 A,
this, FnPos, DepClassTy::REQUIRED, IsKnown))
601 A.getAAFor<AAType>(*
this, FnPos, DepClassTy::REQUIRED);
605 if (S.isAtFixpoint())
606 return S.isValidState();
610 if (!
A.checkForAllCallees(CalleePred, *
this, CB))
611 return S.indicatePessimisticFixpoint();
617template <
class AAType,
typename StateType =
typename AAType::StateType>
618static void followUsesInContext(AAType &AA,
Attributor &
A,
623 auto EIt = Explorer.
begin(CtxI), EEnd = Explorer.
end(CtxI);
624 for (
unsigned u = 0;
u <
Uses.size(); ++
u) {
626 if (
const Instruction *UserI = dyn_cast<Instruction>(
U->getUser())) {
628 if (Found && AA.followUseInMBEC(
A, U, UserI, State))
629 for (
const Use &Us : UserI->
uses())
644template <
class AAType,
typename StateType =
typename AAType::StateType>
645static void followUsesInMBEC(AAType &AA,
Attributor &
A, StateType &S,
648 A.getInfoCache().getMustBeExecutedContextExplorer();
654 for (
const Use &U : AA.getIRPosition().getAssociatedValue().uses())
657 followUsesInContext<AAType>(AA,
A, *Explorer, &CtxI,
Uses, S);
659 if (S.isAtFixpoint())
664 if (
const BranchInst *Br = dyn_cast<BranchInst>(
I))
665 if (Br->isConditional())
704 StateType ParentState;
708 ParentState.indicateOptimisticFixpoint();
710 for (
const BasicBlock *BB : Br->successors()) {
711 StateType ChildState;
713 size_t BeforeSize =
Uses.size();
714 followUsesInContext(AA,
A, *Explorer, &BB->front(),
Uses, ChildState);
717 for (
auto It =
Uses.begin() + BeforeSize; It !=
Uses.end();)
720 ParentState &= ChildState;
733namespace PointerInfo {
794 R.indicatePessimisticFixpoint();
888 if (!Range.mayOverlap(ItRange))
890 bool IsExact = Range == ItRange && !Range.offsetOrSizeAreUnknown();
891 for (
auto Index : It.getSecond()) {
893 if (!CB(Access, IsExact))
913 for (
unsigned Index : LocalList->getSecond()) {
916 if (Range.offsetAndSizeAreUnknown())
932 RemoteI = RemoteI ? RemoteI : &
I;
936 bool AccExists =
false;
938 for (
auto Index : LocalList) {
940 if (
A.getLocalInst() == &
I) {
949 <<
"[AAPointerInfo] Inserting access in new offset bins\n";);
951 for (
auto Key : ToAdd) {
960 "New Access should have been at AccIndex");
961 LocalList.push_back(AccIndex);
975 auto &ExistingRanges =
Before.getRanges();
976 auto &NewRanges = Current.getRanges();
983 <<
"[AAPointerInfo] Removing access from old offset bins\n";);
990 "Expected bin to actually contain the Access.");
1009 using const_iterator = VecTy::const_iterator;
1012 const_iterator begin()
const {
return Offsets.begin(); }
1013 const_iterator end()
const {
return Offsets.end(); }
1016 return Offsets ==
RHS.Offsets;
1022 bool isUnassigned()
const {
return Offsets.size() == 0; }
1024 bool isUnknown()
const {
1037 void addToAll(int64_t Inc) {
1038 for (
auto &
Offset : Offsets) {
1047 void merge(
const OffsetInfo &R) {
Offsets.append(
R.Offsets); }
1062struct AAPointerInfoImpl
1063 :
public StateWrapper<AA::PointerInfo::State, AAPointerInfo> {
1068 const std::string getAsStr(
Attributor *
A)
const override {
1069 return std::string(
"PointerInfo ") +
1070 (isValidState() ? (std::string(
"#") +
1071 std::to_string(OffsetBins.
size()) +
" bins")
1077 return AAPointerInfo::manifest(
A);
1080 virtual const_bin_iterator
begin()
const override {
return State::begin(); }
1081 virtual const_bin_iterator
end()
const override {
return State::end(); }
1082 virtual int64_t numOffsetBins()
const override {
1083 return State::numOffsetBins();
1086 bool forallInterferingAccesses(
1090 return State::forallInterferingAccesses(Range, CB);
1093 bool forallInterferingAccesses(
1095 bool FindInterferingWrites,
bool FindInterferingReads,
1096 function_ref<
bool(
const Access &,
bool)> UserCB,
bool &HasBeenWrittenTo,
1098 function_ref<
bool(
const Access &)> SkipCB)
const override {
1099 HasBeenWrittenTo =
false;
1106 bool IsAssumedNoSync = AA::hasAssumedIRAttr<Attribute::NoSync>(
1111 bool AllInSameNoSyncFn = IsAssumedNoSync;
1112 bool InstIsExecutedByInitialThreadOnly =
1113 ExecDomainAA && ExecDomainAA->isExecutedByInitialThreadOnly(
I);
1120 bool InstIsExecutedInAlignedRegion =
1121 FindInterferingReads && ExecDomainAA &&
1122 ExecDomainAA->isExecutedInAlignedRegion(
A,
I);
1124 if (InstIsExecutedInAlignedRegion || InstIsExecutedByInitialThreadOnly)
1125 A.recordDependence(*ExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1128 bool IsThreadLocalObj =
1137 auto CanIgnoreThreadingForInst = [&](
const Instruction &
I) ->
bool {
1138 if (IsThreadLocalObj || AllInSameNoSyncFn)
1140 const auto *FnExecDomainAA =
1141 I.getFunction() == &
Scope
1146 if (!FnExecDomainAA)
1148 if (InstIsExecutedInAlignedRegion ||
1149 (FindInterferingWrites &&
1150 FnExecDomainAA->isExecutedInAlignedRegion(
A,
I))) {
1151 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1154 if (InstIsExecutedByInitialThreadOnly &&
1155 FnExecDomainAA->isExecutedByInitialThreadOnly(
I)) {
1156 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1165 auto CanIgnoreThreading = [&](
const Access &Acc) ->
bool {
1166 return CanIgnoreThreadingForInst(*Acc.getRemoteInst()) ||
1167 (Acc.getRemoteInst() != Acc.getLocalInst() &&
1168 CanIgnoreThreadingForInst(*Acc.getLocalInst()));
1172 bool IsKnownNoRecurse;
1173 AA::hasAssumedIRAttr<Attribute::NoRecurse>(
1180 bool InstInKernel =
Scope.hasFnAttribute(
"kernel");
1181 bool ObjHasKernelLifetime =
false;
1182 const bool UseDominanceReasoning =
1183 FindInterferingWrites && IsKnownNoRecurse;
1194 case AA::GPUAddressSpace::Shared:
1195 case AA::GPUAddressSpace::Constant:
1196 case AA::GPUAddressSpace::Local:
1208 std::function<
bool(
const Function &)> IsLiveInCalleeCB;
1210 if (
auto *AI = dyn_cast<AllocaInst>(&getAssociatedValue())) {
1215 bool IsKnownNoRecurse;
1216 if (AA::hasAssumedIRAttr<Attribute::NoRecurse>(
1218 IsKnownNoRecurse)) {
1219 IsLiveInCalleeCB = [AIFn](
const Function &Fn) {
return AIFn != &Fn; };
1221 }
else if (
auto *GV = dyn_cast<GlobalValue>(&getAssociatedValue())) {
1224 ObjHasKernelLifetime = HasKernelLifetime(GV, *GV->getParent());
1225 if (ObjHasKernelLifetime)
1226 IsLiveInCalleeCB = [](
const Function &Fn) {
1227 return !Fn.hasFnAttribute(
"kernel");
1235 auto AccessCB = [&](
const Access &Acc,
bool Exact) {
1237 bool AccInSameScope = AccScope == &
Scope;
1241 if (InstInKernel && ObjHasKernelLifetime && !AccInSameScope &&
1245 if (Exact && Acc.isMustAccess() && Acc.getRemoteInst() != &
I) {
1246 if (Acc.isWrite() || (isa<LoadInst>(
I) && Acc.isWriteOrAssumption()))
1247 ExclusionSet.
insert(Acc.getRemoteInst());
1250 if ((!FindInterferingWrites || !Acc.isWriteOrAssumption()) &&
1251 (!FindInterferingReads || !Acc.isRead()))
1254 bool Dominates = FindInterferingWrites && DT && Exact &&
1255 Acc.isMustAccess() && AccInSameScope &&
1258 DominatingWrites.
insert(&Acc);
1262 AllInSameNoSyncFn &= Acc.getRemoteInst()->getFunction() == &
Scope;
1264 InterferingAccesses.
push_back({&Acc, Exact});
1267 if (!State::forallInterferingAccesses(
I, AccessCB, Range))
1270 HasBeenWrittenTo = !DominatingWrites.
empty();
1274 for (
const Access *Acc : DominatingWrites) {
1275 if (!LeastDominatingWriteInst) {
1276 LeastDominatingWriteInst = Acc->getRemoteInst();
1277 }
else if (DT->
dominates(LeastDominatingWriteInst,
1278 Acc->getRemoteInst())) {
1279 LeastDominatingWriteInst = Acc->getRemoteInst();
1284 auto CanSkipAccess = [&](
const Access &Acc,
bool Exact) {
1285 if (SkipCB && SkipCB(Acc))
1287 if (!CanIgnoreThreading(Acc))
1293 bool ReadChecked = !FindInterferingReads;
1294 bool WriteChecked = !FindInterferingWrites;
1300 &ExclusionSet, IsLiveInCalleeCB))
1305 if (!WriteChecked) {
1307 &ExclusionSet, IsLiveInCalleeCB))
1308 WriteChecked =
true;
1322 if (!WriteChecked && HasBeenWrittenTo &&
1323 Acc.getRemoteInst()->getFunction() != &
Scope) {
1333 if (!FnReachabilityAA ||
1334 !FnReachabilityAA->instructionCanReach(
1335 A, *LeastDominatingWriteInst,
1336 *Acc.getRemoteInst()->getFunction(), &ExclusionSet))
1337 WriteChecked =
true;
1343 if (ReadChecked && WriteChecked)
1346 if (!DT || !UseDominanceReasoning)
1348 if (!DominatingWrites.count(&Acc))
1350 return LeastDominatingWriteInst != Acc.getRemoteInst();
1355 for (
auto &It : InterferingAccesses) {
1356 if ((!AllInSameNoSyncFn && !IsThreadLocalObj && !ExecDomainAA) ||
1357 !CanSkipAccess(*It.first, It.second)) {
1358 if (!UserCB(*It.first, It.second))
1368 using namespace AA::PointerInfo;
1370 return indicatePessimisticFixpoint();
1372 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1373 bool IsByval = OtherAAImpl.getAssociatedArgument()->hasByValAttr();
1377 const auto &State = OtherAAImpl.getState();
1378 for (
const auto &It : State) {
1379 for (
auto Index : It.getSecond()) {
1380 const auto &RAcc = State.getAccess(
Index);
1381 if (IsByval && !RAcc.isRead())
1383 bool UsedAssumedInformation =
false;
1385 auto Content =
A.translateArgumentToCallSiteContent(
1386 RAcc.getContent(), CB, *
this, UsedAssumedInformation);
1387 AK =
AccessKind(AK & (IsByval ? AccessKind::AK_R : AccessKind::AK_RW));
1388 AK =
AccessKind(AK | (RAcc.isMayAccess() ? AK_MAY : AK_MUST));
1390 Changed |= addAccess(
A, RAcc.getRanges(), CB,
Content, AK,
1391 RAcc.getType(), RAcc.getRemoteInst());
1398 const OffsetInfo &Offsets,
CallBase &CB) {
1399 using namespace AA::PointerInfo;
1401 return indicatePessimisticFixpoint();
1403 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1407 const auto &State = OtherAAImpl.getState();
1408 for (
const auto &It : State) {
1409 for (
auto Index : It.getSecond()) {
1410 const auto &RAcc = State.getAccess(
Index);
1411 for (
auto Offset : Offsets) {
1415 if (!NewRanges.isUnknown()) {
1416 NewRanges.addToAllOffsets(
Offset);
1419 addAccess(
A, NewRanges, CB, RAcc.getContent(), RAcc.getKind(),
1420 RAcc.getType(), RAcc.getRemoteInst());
1429 void trackPointerInfoStatistics(
const IRPosition &IRP)
const {}
1433 for (
auto &It : OffsetBins) {
1434 O <<
"[" << It.first.Offset <<
"-" << It.first.Offset + It.first.Size
1435 <<
"] : " << It.getSecond().size() <<
"\n";
1436 for (
auto AccIndex : It.getSecond()) {
1437 auto &Acc = AccessList[AccIndex];
1438 O <<
" - " << Acc.getKind() <<
" - " << *Acc.getLocalInst() <<
"\n";
1439 if (Acc.getLocalInst() != Acc.getRemoteInst())
1440 O <<
" --> " << *Acc.getRemoteInst()
1442 if (!Acc.isWrittenValueYetUndetermined()) {
1443 if (isa_and_nonnull<Function>(Acc.getWrittenValue()))
1444 O <<
" - c: func " << Acc.getWrittenValue()->getName()
1446 else if (Acc.getWrittenValue())
1447 O <<
" - c: " << *Acc.getWrittenValue() <<
"\n";
1449 O <<
" - c: <unknown>\n";
1456struct AAPointerInfoFloating :
public AAPointerInfoImpl {
1459 : AAPointerInfoImpl(IRP,
A) {}
1466 using namespace AA::PointerInfo;
1469 TypeSize AccessSize =
DL.getTypeStoreSize(&Ty);
1479 if (!VT || VT->getElementCount().isScalable() ||
1481 (*Content)->getType() != VT ||
1482 DL.getTypeStoreSize(VT->getElementType()).isScalable()) {
1492 int64_t ElementSize =
DL.getTypeStoreSize(ElementType).getFixedValue();
1493 auto *ConstContent = cast<Constant>(*
Content);
1497 for (
int i = 0, e = VT->getElementCount().getFixedValue(); i != e; ++i) {
1499 ConstContent, ConstantInt::get(
Int32Ty, i));
1502 Changed = Changed | addAccess(
A, {ElementOffsets, ElementSize},
I,
1506 for (
auto &ElementOffset : ElementOffsets)
1507 ElementOffset += ElementSize;
1521 OffsetInfo &UsrOI,
const OffsetInfo &PtrOI,
1525 void trackStatistics()
const override {
1526 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1530bool AAPointerInfoFloating::collectConstantsForGEP(
Attributor &
A,
1533 const OffsetInfo &PtrOI,
1535 unsigned BitWidth =
DL.getIndexTypeSizeInBits(
GEP->getType());
1539 assert(!UsrOI.isUnknown() && !PtrOI.isUnknown() &&
1540 "Don't look for constant values if the offset has already been "
1541 "determined to be unknown.");
1543 if (!
GEP->collectOffset(
DL,
BitWidth, VariableOffsets, ConstantOffset)) {
1549 << (VariableOffsets.
empty() ?
"" :
"not") <<
" constant "
1553 Union.addToAll(ConstantOffset.getSExtValue());
1558 for (
const auto &VI : VariableOffsets) {
1561 if (!PotentialConstantsAA || !PotentialConstantsAA->isValidState()) {
1567 if (PotentialConstantsAA->undefIsContained())
1575 if (AssumedSet.empty())
1579 for (
const auto &ConstOffset : AssumedSet) {
1580 auto CopyPerOffset =
Union;
1581 CopyPerOffset.addToAll(ConstOffset.getSExtValue() *
1582 VI.second.getZExtValue());
1583 Product.merge(CopyPerOffset);
1588 UsrOI = std::move(Union);
1593 using namespace AA::PointerInfo;
1596 Value &AssociatedValue = getAssociatedValue();
1599 OffsetInfoMap[&AssociatedValue].
insert(0);
1601 auto HandlePassthroughUser = [&](
Value *Usr,
Value *CurPtr,
bool &Follow) {
1610 auto &UsrOI = OffsetInfoMap[Usr];
1611 auto &PtrOI = OffsetInfoMap[CurPtr];
1612 assert(!PtrOI.isUnassigned() &&
1613 "Cannot pass through if the input Ptr was not visited!");
1619 const auto *
F = getAnchorScope();
1624 F ?
A.getInfoCache().getTargetLibraryInfoForFunction(*
F) :
nullptr;
1626 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
1628 User *Usr =
U.getUser();
1629 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Analyze " << *CurPtr <<
" in " << *Usr
1632 "The current pointer offset should have been seeded!");
1636 return HandlePassthroughUser(Usr, CurPtr, Follow);
1637 if (
CE->isCompare())
1639 if (!isa<GEPOperator>(CE)) {
1640 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled constant user " << *CE
1645 if (
auto *
GEP = dyn_cast<GEPOperator>(Usr)) {
1648 auto &UsrOI = OffsetInfoMap[Usr];
1649 auto &PtrOI = OffsetInfoMap[CurPtr];
1651 if (UsrOI.isUnknown())
1654 if (PtrOI.isUnknown()) {
1660 Follow = collectConstantsForGEP(
A,
DL, UsrOI, PtrOI,
GEP);
1663 if (isa<PtrToIntInst>(Usr))
1665 if (isa<CastInst>(Usr) || isa<SelectInst>(Usr) || isa<ReturnInst>(Usr))
1666 return HandlePassthroughUser(Usr, CurPtr, Follow);
1671 if (isa<PHINode>(Usr)) {
1674 bool IsFirstPHIUser = !OffsetInfoMap.
count(Usr);
1675 auto &UsrOI = OffsetInfoMap[Usr];
1676 auto &PtrOI = OffsetInfoMap[CurPtr];
1680 if (PtrOI.isUnknown()) {
1681 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand offset unknown "
1682 << *CurPtr <<
" in " << *Usr <<
"\n");
1683 Follow = !UsrOI.isUnknown();
1689 if (UsrOI == PtrOI) {
1690 assert(!PtrOI.isUnassigned() &&
1691 "Cannot assign if the current Ptr was not visited!");
1692 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant (so far)");
1702 auto It = OffsetInfoMap.
find(CurPtrBase);
1703 if (It == OffsetInfoMap.
end()) {
1704 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand is too complex "
1705 << *CurPtr <<
" in " << *Usr <<
"\n");
1719 auto BaseOI = It->getSecond();
1720 BaseOI.addToAll(
Offset.getZExtValue());
1721 if (IsFirstPHIUser || BaseOI == UsrOI) {
1722 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant " << *CurPtr
1723 <<
" in " << *Usr <<
"\n");
1724 return HandlePassthroughUser(Usr, CurPtr, Follow);
1728 dbgs() <<
"[AAPointerInfo] PHI operand pointer offset mismatch "
1729 << *CurPtr <<
" in " << *Usr <<
"\n");
1740 if (
auto *LoadI = dyn_cast<LoadInst>(Usr)) {
1748 if (!handleAccess(
A, *LoadI,
nullptr, AK,
1749 OffsetInfoMap[CurPtr].Offsets, Changed,
1754 if (
auto *II = dyn_cast<IntrinsicInst>(&
I))
1755 return II->isAssumeLikeIntrinsic();
1766 }
while (FromI && FromI != ToI);
1772 if (IntrI.getIntrinsicID() != Intrinsic::assume)
1775 if (IntrI.getParent() == BB) {
1776 if (IsImpactedInRange(LoadI->getNextNonDebugInstruction(), &IntrI))
1782 if ((*PredIt) != BB)
1787 if (SuccBB == IntrBB)
1789 if (isa<UnreachableInst>(SuccBB->getTerminator()))
1793 if (IsImpactedInRange(LoadI->getNextNonDebugInstruction(),
1796 if (IsImpactedInRange(&IntrBB->
front(), &IntrI))
1802 std::pair<Value *, IntrinsicInst *> Assumption;
1803 for (
const Use &LoadU : LoadI->
uses()) {
1804 if (
auto *CmpI = dyn_cast<CmpInst>(LoadU.getUser())) {
1805 if (!CmpI->isEquality() || !CmpI->isTrueWhenEqual())
1807 for (
const Use &CmpU : CmpI->
uses()) {
1808 if (
auto *IntrI = dyn_cast<IntrinsicInst>(CmpU.getUser())) {
1809 if (!IsValidAssume(*IntrI))
1811 int Idx = CmpI->getOperandUse(0) == LoadU;
1812 Assumption = {CmpI->getOperand(
Idx), IntrI};
1817 if (Assumption.first)
1822 if (!Assumption.first || !Assumption.second)
1826 << *Assumption.second <<
": " << *LoadI
1827 <<
" == " << *Assumption.first <<
"\n");
1828 bool UsedAssumedInformation =
false;
1829 std::optional<Value *>
Content =
nullptr;
1830 if (Assumption.first)
1832 A.getAssumedSimplified(*Assumption.first, *
this,
1834 return handleAccess(
1835 A, *Assumption.second,
Content, AccessKind::AK_ASSUMPTION,
1836 OffsetInfoMap[CurPtr].Offsets, Changed, *LoadI->getType());
1841 for (
auto *OtherOp : OtherOps) {
1842 if (OtherOp == CurPtr) {
1845 <<
"[AAPointerInfo] Escaping use in store like instruction " <<
I
1857 bool UsedAssumedInformation =
false;
1858 std::optional<Value *>
Content =
nullptr;
1862 return handleAccess(
A,
I,
Content, AK, OffsetInfoMap[CurPtr].Offsets,
1866 if (
auto *StoreI = dyn_cast<StoreInst>(Usr))
1867 return HandleStoreLike(*StoreI, StoreI->getValueOperand(),
1868 *StoreI->getValueOperand()->getType(),
1869 {StoreI->getValueOperand()}, AccessKind::AK_W);
1870 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(Usr))
1871 return HandleStoreLike(*RMWI,
nullptr, *RMWI->getValOperand()->getType(),
1872 {RMWI->getValOperand()}, AccessKind::AK_RW);
1873 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(Usr))
1874 return HandleStoreLike(
1875 *CXI,
nullptr, *CXI->getNewValOperand()->getType(),
1876 {CXI->getCompareOperand(), CXI->getNewValOperand()},
1879 if (
auto *CB = dyn_cast<CallBase>(Usr)) {
1892 translateAndAddState(
A, *CSArgPI, OffsetInfoMap[CurPtr], *CB) |
1894 return isValidState();
1896 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Call user not handled " << *CB
1902 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] User not handled " << *Usr <<
"\n");
1905 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
1906 assert(OffsetInfoMap.
count(OldU) &&
"Old use should be known already!");
1907 if (OffsetInfoMap.
count(NewU)) {
1909 if (!(OffsetInfoMap[NewU] == OffsetInfoMap[OldU])) {
1910 dbgs() <<
"[AAPointerInfo] Equivalent use callback failed: "
1911 << OffsetInfoMap[NewU] <<
" vs " << OffsetInfoMap[OldU]
1915 return OffsetInfoMap[NewU] == OffsetInfoMap[OldU];
1917 OffsetInfoMap[NewU] = OffsetInfoMap[OldU];
1920 if (!
A.checkForAllUses(UsePred, *
this, AssociatedValue,
1922 true, EquivalentUseCB)) {
1923 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Check for all uses failed, abort!\n");
1924 return indicatePessimisticFixpoint();
1928 dbgs() <<
"Accesses by bin after update:\n";
1935struct AAPointerInfoReturned final : AAPointerInfoImpl {
1937 : AAPointerInfoImpl(IRP,
A) {}
1941 return indicatePessimisticFixpoint();
1945 void trackStatistics()
const override {
1946 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1950struct AAPointerInfoArgument final : AAPointerInfoFloating {
1952 : AAPointerInfoFloating(IRP,
A) {}
1955 void trackStatistics()
const override {
1956 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1960struct AAPointerInfoCallSiteArgument final : AAPointerInfoFloating {
1962 : AAPointerInfoFloating(IRP,
A) {}
1966 using namespace AA::PointerInfo;
1970 if (
auto *
MI = dyn_cast_or_null<MemIntrinsic>(getCtxI())) {
1974 LengthVal =
Length->getSExtValue();
1975 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
1978 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled memory intrinsic "
1980 return indicatePessimisticFixpoint();
1983 ArgNo == 0 ? AccessKind::AK_MUST_WRITE : AccessKind::AK_MUST_READ;
1985 Changed | addAccess(
A, {0, LengthVal}, *
MI,
nullptr,
Kind,
nullptr);
1988 dbgs() <<
"Accesses by bin after update:\n";
1999 Argument *Arg = getAssociatedArgument();
2004 if (ArgAA && ArgAA->getState().isValidState())
2005 return translateAndAddStateFromCallee(
A, *ArgAA,
2006 *cast<CallBase>(getCtxI()));
2008 return indicatePessimisticFixpoint();
2011 bool IsKnownNoCapture;
2012 if (!AA::hasAssumedIRAttr<Attribute::NoCapture>(
2013 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNoCapture))
2014 return indicatePessimisticFixpoint();
2016 bool IsKnown =
false;
2018 return ChangeStatus::UNCHANGED;
2021 ReadOnly ? AccessKind::AK_MAY_READ : AccessKind::AK_MAY_READ_WRITE;
2027 void trackStatistics()
const override {
2028 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2032struct AAPointerInfoCallSiteReturned final : AAPointerInfoFloating {
2034 : AAPointerInfoFloating(IRP,
A) {}
2037 void trackStatistics()
const override {
2038 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2052 assert(!AA::hasAssumedIRAttr<Attribute::NoUnwind>(
2053 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2057 const std::string getAsStr(
Attributor *
A)
const override {
2058 return getAssumed() ?
"nounwind" :
"may-unwind";
2064 (
unsigned)Instruction::Invoke, (
unsigned)Instruction::CallBr,
2065 (
unsigned)Instruction::Call, (
unsigned)Instruction::CleanupRet,
2066 (
unsigned)Instruction::CatchSwitch, (
unsigned)Instruction::Resume};
2069 if (!
I.mayThrow(
true))
2072 if (
const auto *CB = dyn_cast<CallBase>(&
I)) {
2073 bool IsKnownNoUnwind;
2074 return AA::hasAssumedIRAttr<Attribute::NoUnwind>(
2081 bool UsedAssumedInformation =
false;
2082 if (!
A.checkForAllInstructions(CheckForNoUnwind, *
this, Opcodes,
2083 UsedAssumedInformation))
2084 return indicatePessimisticFixpoint();
2086 return ChangeStatus::UNCHANGED;
2090struct AANoUnwindFunction final :
public AANoUnwindImpl {
2092 : AANoUnwindImpl(IRP,
A) {}
2099struct AANoUnwindCallSite final
2100 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl> {
2102 : AACalleeToCallSite<
AANoUnwind, AANoUnwindImpl>(IRP,
A) {}
2113 case Intrinsic::nvvm_barrier0:
2114 case Intrinsic::nvvm_barrier0_and:
2115 case Intrinsic::nvvm_barrier0_or:
2116 case Intrinsic::nvvm_barrier0_popc:
2118 case Intrinsic::amdgcn_s_barrier:
2119 if (ExecutedAligned)
2132 if (
auto *FI = dyn_cast<FenceInst>(
I))
2135 if (
auto *AI = dyn_cast<AtomicCmpXchgInst>(
I)) {
2142 switch (
I->getOpcode()) {
2143 case Instruction::AtomicRMW:
2144 Ordering = cast<AtomicRMWInst>(
I)->getOrdering();
2146 case Instruction::Store:
2147 Ordering = cast<StoreInst>(
I)->getOrdering();
2149 case Instruction::Load:
2150 Ordering = cast<LoadInst>(
I)->getOrdering();
2154 "New atomic operations need to be known in the attributor.");
2165 if (
auto *
MI = dyn_cast<MemIntrinsic>(
I))
2166 return !
MI->isVolatile();
2177 assert(!AA::hasAssumedIRAttr<Attribute::NoSync>(
A,
nullptr, getIRPosition(),
2178 DepClassTy::NONE, IsKnown));
2182 const std::string getAsStr(
Attributor *
A)
const override {
2183 return getAssumed() ?
"nosync" :
"may-sync";
2199 if (
I.mayReadOrWriteMemory())
2204 if (AA::hasAssumedIRAttr<Attribute::NoSync>(
2213 bool UsedAssumedInformation =
false;
2214 if (!
A.checkForAllReadWriteInstructions(CheckRWInstForNoSync, *
this,
2215 UsedAssumedInformation) ||
2216 !
A.checkForAllCallLikeInstructions(CheckForNoSync, *
this,
2217 UsedAssumedInformation))
2218 return indicatePessimisticFixpoint();
2223struct AANoSyncFunction final :
public AANoSyncImpl {
2225 : AANoSyncImpl(IRP,
A) {}
2232struct AANoSyncCallSite final : AACalleeToCallSite<AANoSync, AANoSyncImpl> {
2234 : AACalleeToCallSite<
AANoSync, AANoSyncImpl>(IRP,
A) {}
2244struct AANoFreeImpl :
public AANoFree {
2250 assert(!AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
nullptr, getIRPosition(),
2251 DepClassTy::NONE, IsKnown));
2259 return AA::hasAssumedIRAttr<Attribute::NoFree>(
2261 DepClassTy::REQUIRED, IsKnown);
2264 bool UsedAssumedInformation =
false;
2265 if (!
A.checkForAllCallLikeInstructions(CheckForNoFree, *
this,
2266 UsedAssumedInformation))
2267 return indicatePessimisticFixpoint();
2268 return ChangeStatus::UNCHANGED;
2272 const std::string getAsStr(
Attributor *
A)
const override {
2273 return getAssumed() ?
"nofree" :
"may-free";
2277struct AANoFreeFunction final :
public AANoFreeImpl {
2279 : AANoFreeImpl(IRP,
A) {}
2286struct AANoFreeCallSite final : AACalleeToCallSite<AANoFree, AANoFreeImpl> {
2288 : AACalleeToCallSite<
AANoFree, AANoFreeImpl>(IRP,
A) {}
2295struct AANoFreeFloating : AANoFreeImpl {
2297 : AANoFreeImpl(IRP,
A) {}
2307 if (AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
this,
2309 DepClassTy::OPTIONAL, IsKnown))
2310 return ChangeStatus::UNCHANGED;
2312 Value &AssociatedValue = getIRPosition().getAssociatedValue();
2313 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
2315 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
2323 return AA::hasAssumedIRAttr<Attribute::NoFree>(
2325 DepClassTy::REQUIRED, IsKnown);
2328 if (isa<GetElementPtrInst>(UserI) || isa<BitCastInst>(UserI) ||
2329 isa<PHINode>(UserI) || isa<SelectInst>(UserI)) {
2333 if (isa<StoreInst>(UserI) || isa<LoadInst>(UserI) ||
2334 isa<ReturnInst>(UserI))
2340 if (!
A.checkForAllUses(Pred, *
this, AssociatedValue))
2341 return indicatePessimisticFixpoint();
2343 return ChangeStatus::UNCHANGED;
2348struct AANoFreeArgument final : AANoFreeFloating {
2350 : AANoFreeFloating(IRP,
A) {}
2357struct AANoFreeCallSiteArgument final : AANoFreeFloating {
2359 : AANoFreeFloating(IRP,
A) {}
2367 Argument *Arg = getAssociatedArgument();
2369 return indicatePessimisticFixpoint();
2372 if (AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
this, ArgPos,
2373 DepClassTy::REQUIRED, IsKnown))
2374 return ChangeStatus::UNCHANGED;
2375 return indicatePessimisticFixpoint();
2383struct AANoFreeReturned final : AANoFreeFloating {
2385 : AANoFreeFloating(IRP,
A) {
2400 void trackStatistics()
const override {}
2404struct AANoFreeCallSiteReturned final : AANoFreeFloating {
2406 : AANoFreeFloating(IRP,
A) {}
2409 return ChangeStatus::UNCHANGED;
2420 bool IgnoreSubsumingPositions) {
2422 AttrKinds.
push_back(Attribute::NonNull);
2425 AttrKinds.
push_back(Attribute::Dereferenceable);
2426 if (
A.hasAttr(IRP, AttrKinds, IgnoreSubsumingPositions, Attribute::NonNull))
2433 if (!Fn->isDeclaration()) {
2443 bool UsedAssumedInformation =
false;
2444 if (!
A.checkForAllInstructions(
2446 Worklist.push_back({*cast<ReturnInst>(I).getReturnValue(), &I});
2450 UsedAssumedInformation))
2461 Attribute::NonNull)});
2466static int64_t getKnownNonNullAndDerefBytesForUse(
2468 const Use *U,
const Instruction *
I,
bool &IsNonNull,
bool &TrackUse) {
2471 const Value *UseV =
U->get();
2478 if (isa<CastInst>(
I)) {
2483 if (isa<GetElementPtrInst>(
I)) {
2493 if (
const auto *CB = dyn_cast<CallBase>(
I)) {
2496 U, {Attribute::NonNull, Attribute::Dereferenceable})) {
2513 bool IsKnownNonNull;
2514 AA::hasAssumedIRAttr<Attribute::NonNull>(
A, &QueryingAA, IRP,
2516 IsNonNull |= IsKnownNonNull;
2523 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
2524 Loc->Size.isScalable() ||
I->isVolatile())
2530 if (
Base &&
Base == &AssociatedValue) {
2531 int64_t DerefBytes = Loc->Size.getValue() +
Offset;
2533 return std::max(int64_t(0), DerefBytes);
2540 int64_t DerefBytes = Loc->Size.getValue();
2542 return std::max(int64_t(0), DerefBytes);
2553 Value &
V = *getAssociatedValue().stripPointerCasts();
2554 if (isa<ConstantPointerNull>(V)) {
2555 indicatePessimisticFixpoint();
2560 followUsesInMBEC(*
this,
A, getState(), *CtxI);
2566 bool IsNonNull =
false;
2567 bool TrackUse =
false;
2568 getKnownNonNullAndDerefBytesForUse(
A, *
this, getAssociatedValue(), U,
I,
2569 IsNonNull, TrackUse);
2570 State.setKnown(IsNonNull);
2575 const std::string getAsStr(
Attributor *
A)
const override {
2576 return getAssumed() ?
"nonnull" :
"may-null";
2581struct AANonNullFloating :
public AANonNullImpl {
2583 : AANonNullImpl(IRP,
A) {}
2588 bool IsKnownNonNull;
2589 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2590 A, *
this, IRP, DepClassTy::OPTIONAL, IsKnownNonNull);
2594 bool UsedAssumedInformation =
false;
2595 Value *AssociatedValue = &getAssociatedValue();
2597 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
2602 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
2606 if (
auto *
PHI = dyn_cast<PHINode>(AssociatedValue))
2608 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2609 A, this, IRPosition::value(*Op), DepClassTy::OPTIONAL,
2612 return ChangeStatus::UNCHANGED;
2613 if (
auto *
Select = dyn_cast<SelectInst>(AssociatedValue))
2614 if (AA::hasAssumedIRAttr<Attribute::NonNull>(
2616 DepClassTy::OPTIONAL, IsKnown) &&
2617 AA::hasAssumedIRAttr<Attribute::NonNull>(
2619 DepClassTy::OPTIONAL, IsKnown))
2620 return ChangeStatus::UNCHANGED;
2627 if (AVIRP == getIRPosition() || !CheckIRP(AVIRP))
2628 return indicatePessimisticFixpoint();
2629 return ChangeStatus::UNCHANGED;
2632 for (
const auto &VAC : Values)
2634 return indicatePessimisticFixpoint();
2636 return ChangeStatus::UNCHANGED;
2644struct AANonNullReturned final
2645 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2646 false, AANonNull::IRAttributeKind, false> {
2653 const std::string getAsStr(
Attributor *
A)
const override {
2654 return getAssumed() ?
"nonnull" :
"may-null";
2662struct AANonNullArgument final
2663 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl> {
2665 : AAArgumentFromCallSiteArguments<
AANonNull, AANonNullImpl>(IRP,
A) {}
2671struct AANonNullCallSiteArgument final : AANonNullFloating {
2673 : AANonNullFloating(IRP,
A) {}
2680struct AANonNullCallSiteReturned final
2681 : AACalleeToCallSite<AANonNull, AANonNullImpl> {
2683 : AACalleeToCallSite<
AANonNull, AANonNullImpl>(IRP,
A) {}
2699 assert(!AA::hasAssumedIRAttr<Attribute::MustProgress>(
2700 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2705 const std::string getAsStr(
Attributor *
A)
const override {
2706 return getAssumed() ?
"mustprogress" :
"may-not-progress";
2710struct AAMustProgressFunction final : AAMustProgressImpl {
2712 : AAMustProgressImpl(IRP,
A) {}
2717 if (AA::hasAssumedIRAttr<Attribute::WillReturn>(
2718 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnown)) {
2720 return indicateOptimisticFixpoint();
2721 return ChangeStatus::UNCHANGED;
2726 bool IsKnownMustProgress;
2727 return AA::hasAssumedIRAttr<Attribute::MustProgress>(
2728 A,
this, IPos, DepClassTy::REQUIRED, IsKnownMustProgress,
2732 bool AllCallSitesKnown =
true;
2733 if (!
A.checkForAllCallSites(CheckForMustProgress, *
this,
2736 return indicatePessimisticFixpoint();
2738 return ChangeStatus::UNCHANGED;
2742 void trackStatistics()
const override {
2748struct AAMustProgressCallSite final : AAMustProgressImpl {
2750 : AAMustProgressImpl(IRP,
A) {}
2759 bool IsKnownMustProgress;
2760 if (!AA::hasAssumedIRAttr<Attribute::MustProgress>(
2761 A,
this, FnPos, DepClassTy::REQUIRED, IsKnownMustProgress))
2762 return indicatePessimisticFixpoint();
2763 return ChangeStatus::UNCHANGED;
2767 void trackStatistics()
const override {
2782 assert(!AA::hasAssumedIRAttr<Attribute::NoRecurse>(
2783 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2788 const std::string getAsStr(
Attributor *
A)
const override {
2789 return getAssumed() ?
"norecurse" :
"may-recurse";
2793struct AANoRecurseFunction final : AANoRecurseImpl {
2795 : AANoRecurseImpl(IRP,
A) {}
2802 bool IsKnownNoRecurse;
2803 if (!AA::hasAssumedIRAttr<Attribute::NoRecurse>(
2806 DepClassTy::NONE, IsKnownNoRecurse))
2808 return IsKnownNoRecurse;
2810 bool UsedAssumedInformation =
false;
2811 if (
A.checkForAllCallSites(CallSitePred, *
this,
true,
2812 UsedAssumedInformation)) {
2818 if (!UsedAssumedInformation)
2819 indicateOptimisticFixpoint();
2820 return ChangeStatus::UNCHANGED;
2825 DepClassTy::REQUIRED);
2826 if (EdgeReachability && EdgeReachability->
canReach(
A, *getAnchorScope()))
2827 return indicatePessimisticFixpoint();
2828 return ChangeStatus::UNCHANGED;
2835struct AANoRecurseCallSite final
2836 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl> {
2838 : AACalleeToCallSite<
AANoRecurse, AANoRecurseImpl>(IRP,
A) {}
2853 const std::string getAsStr(
Attributor *
A)
const override {
2854 return getAssumed() ?
"non-convergent" :
"may-be-convergent";
2858struct AANonConvergentFunction final : AANonConvergentImpl {
2860 : AANonConvergentImpl(IRP,
A) {}
2866 auto CalleeIsNotConvergent = [&](
Instruction &Inst) {
2867 CallBase &CB = cast<CallBase>(Inst);
2869 if (!Callee ||
Callee->isIntrinsic()) {
2872 if (
Callee->isDeclaration()) {
2873 return !
Callee->hasFnAttribute(Attribute::Convergent);
2880 bool UsedAssumedInformation =
false;
2881 if (!
A.checkForAllCallLikeInstructions(CalleeIsNotConvergent, *
this,
2882 UsedAssumedInformation)) {
2883 return indicatePessimisticFixpoint();
2885 return ChangeStatus::UNCHANGED;
2889 if (isKnownNotConvergent() &&
2890 A.hasAttr(getIRPosition(), Attribute::Convergent)) {
2891 A.removeAttrs(getIRPosition(), {Attribute::Convergent});
2892 return ChangeStatus::CHANGED;
2894 return ChangeStatus::UNCHANGED;
2911 const size_t UBPrevSize = KnownUBInsts.size();
2912 const size_t NoUBPrevSize = AssumedNoUBInsts.size();
2916 if (
I.isVolatile() &&
I.mayWriteToMemory())
2920 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2929 "Expected pointer operand of memory accessing instruction");
2933 std::optional<Value *> SimplifiedPtrOp =
2934 stopOnUndefOrAssumed(
A, PtrOp, &
I);
2935 if (!SimplifiedPtrOp || !*SimplifiedPtrOp)
2937 const Value *PtrOpVal = *SimplifiedPtrOp;
2942 if (!isa<ConstantPointerNull>(PtrOpVal)) {
2943 AssumedNoUBInsts.insert(&
I);
2955 AssumedNoUBInsts.insert(&
I);
2957 KnownUBInsts.insert(&
I);
2966 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2970 auto *BrInst = cast<BranchInst>(&
I);
2973 if (BrInst->isUnconditional())
2978 std::optional<Value *> SimplifiedCond =
2979 stopOnUndefOrAssumed(
A, BrInst->getCondition(), BrInst);
2980 if (!SimplifiedCond || !*SimplifiedCond)
2982 AssumedNoUBInsts.insert(&
I);
2990 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2999 for (
unsigned idx = 0; idx < CB.
arg_size(); idx++) {
3005 if (idx >=
Callee->arg_size())
3017 bool IsKnownNoUndef;
3018 AA::hasAssumedIRAttr<Attribute::NoUndef>(
3019 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNoUndef);
3020 if (!IsKnownNoUndef)
3022 bool UsedAssumedInformation =
false;
3023 std::optional<Value *> SimplifiedVal =
3026 if (UsedAssumedInformation)
3028 if (SimplifiedVal && !*SimplifiedVal)
3030 if (!SimplifiedVal || isa<UndefValue>(**SimplifiedVal)) {
3031 KnownUBInsts.insert(&
I);
3035 !isa<ConstantPointerNull>(**SimplifiedVal))
3037 bool IsKnownNonNull;
3038 AA::hasAssumedIRAttr<Attribute::NonNull>(
3039 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNonNull);
3041 KnownUBInsts.insert(&
I);
3047 auto &RI = cast<ReturnInst>(
I);
3050 std::optional<Value *> SimplifiedRetValue =
3051 stopOnUndefOrAssumed(
A, RI.getReturnValue(), &
I);
3052 if (!SimplifiedRetValue || !*SimplifiedRetValue)
3069 if (isa<ConstantPointerNull>(*SimplifiedRetValue)) {
3070 bool IsKnownNonNull;
3071 AA::hasAssumedIRAttr<Attribute::NonNull>(
3075 KnownUBInsts.insert(&
I);
3081 bool UsedAssumedInformation =
false;
3082 A.checkForAllInstructions(InspectMemAccessInstForUB, *
this,
3083 {Instruction::Load, Instruction::Store,
3084 Instruction::AtomicCmpXchg,
3085 Instruction::AtomicRMW},
3086 UsedAssumedInformation,
3088 A.checkForAllInstructions(InspectBrInstForUB, *
this, {Instruction::Br},
3089 UsedAssumedInformation,
3091 A.checkForAllCallLikeInstructions(InspectCallSiteForUB, *
this,
3092 UsedAssumedInformation);
3096 if (!getAnchorScope()->getReturnType()->isVoidTy()) {
3098 if (!
A.isAssumedDead(ReturnIRP,
this,
nullptr, UsedAssumedInformation)) {
3099 bool IsKnownNoUndef;
3100 AA::hasAssumedIRAttr<Attribute::NoUndef>(
3101 A,
this, ReturnIRP, DepClassTy::NONE, IsKnownNoUndef);
3103 A.checkForAllInstructions(InspectReturnInstForUB, *
this,
3104 {Instruction::Ret}, UsedAssumedInformation,
3109 if (NoUBPrevSize != AssumedNoUBInsts.size() ||
3110 UBPrevSize != KnownUBInsts.size())
3111 return ChangeStatus::CHANGED;
3112 return ChangeStatus::UNCHANGED;
3116 return KnownUBInsts.count(
I);
3119 bool isAssumedToCauseUB(
Instruction *
I)
const override {
3126 switch (
I->getOpcode()) {
3127 case Instruction::Load:
3128 case Instruction::Store:
3129 case Instruction::AtomicCmpXchg:
3130 case Instruction::AtomicRMW:
3131 return !AssumedNoUBInsts.count(
I);
3132 case Instruction::Br: {
3133 auto *BrInst = cast<BranchInst>(
I);
3134 if (BrInst->isUnconditional())
3136 return !AssumedNoUBInsts.count(
I);
3145 if (KnownUBInsts.empty())
3146 return ChangeStatus::UNCHANGED;
3148 A.changeToUnreachableAfterManifest(
I);
3149 return ChangeStatus::CHANGED;
3153 const std::string getAsStr(
Attributor *
A)
const override {
3154 return getAssumed() ?
"undefined-behavior" :
"no-ub";
3199 bool UsedAssumedInformation =
false;
3200 std::optional<Value *> SimplifiedV =
3203 if (!UsedAssumedInformation) {
3208 KnownUBInsts.insert(
I);
3209 return std::nullopt;
3215 if (isa<UndefValue>(V)) {
3216 KnownUBInsts.insert(
I);
3217 return std::nullopt;
3223struct AAUndefinedBehaviorFunction final : AAUndefinedBehaviorImpl {
3225 : AAUndefinedBehaviorImpl(IRP,
A) {}
3228 void trackStatistics()
const override {
3230 "Number of instructions known to have UB");
3232 KnownUBInsts.size();
3253 if (SCCI.hasCycle())
3263 for (
auto *L : LI->getLoopsInPreorder()) {
3277 assert(!AA::hasAssumedIRAttr<Attribute::WillReturn>(
3278 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
3283 bool isImpliedByMustprogressAndReadonly(
Attributor &
A,
bool KnownOnly) {
3284 if (!
A.hasAttr(getIRPosition(), {Attribute::MustProgress}))
3289 return IsKnown || !KnownOnly;
3295 if (isImpliedByMustprogressAndReadonly(
A,
false))
3296 return ChangeStatus::UNCHANGED;
3301 if (AA::hasAssumedIRAttr<Attribute::WillReturn>(
3302 A,
this, IPos, DepClassTy::REQUIRED, IsKnown)) {
3308 bool IsKnownNoRecurse;
3309 return AA::hasAssumedIRAttr<Attribute::NoRecurse>(
3310 A,
this, IPos, DepClassTy::REQUIRED, IsKnownNoRecurse);
3313 bool UsedAssumedInformation =
false;
3314 if (!
A.checkForAllCallLikeInstructions(CheckForWillReturn, *
this,
3315 UsedAssumedInformation))
3316 return indicatePessimisticFixpoint();
3318 return ChangeStatus::UNCHANGED;
3322 const std::string getAsStr(
Attributor *
A)
const override {
3323 return getAssumed() ?
"willreturn" :
"may-noreturn";
3327struct AAWillReturnFunction final : AAWillReturnImpl {
3329 : AAWillReturnImpl(IRP,
A) {}
3333 AAWillReturnImpl::initialize(
A);
3336 assert(
F &&
"Did expect an anchor function");
3337 if (
F->isDeclaration() || mayContainUnboundedCycle(*
F,
A))
3338 indicatePessimisticFixpoint();
3346struct AAWillReturnCallSite final
3347 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl> {
3349 : AACalleeToCallSite<
AAWillReturn, AAWillReturnImpl>(IRP,
A) {}
3353 if (isImpliedByMustprogressAndReadonly(
A,
false))
3354 return ChangeStatus::UNCHANGED;
3356 return AACalleeToCallSite::updateImpl(
A);
3378 const ToTy *To =
nullptr;
3388 assert(Hash == 0 &&
"Computed hash twice!");
3393 InstSetDMI::getHashValue(ExclusionSet));
3403 :
From(&
From), To(&To), ExclusionSet(ES) {
3405 if (!ES || ES->
empty()) {
3406 ExclusionSet =
nullptr;
3407 }
else if (MakeUnique) {
3408 ExclusionSet =
A.getInfoCache().getOrCreateUniqueBlockExecutionSet(ES);
3413 :
From(RQI.
From), To(RQI.To), ExclusionSet(RQI.ExclusionSet) {}
3426 return &TombstoneKey;
3433 if (!PairDMI::isEqual({
LHS->From,
LHS->To}, {
RHS->From,
RHS->To}))
3435 return InstSetDMI::isEqual(
LHS->ExclusionSet,
RHS->ExclusionSet);
3439#define DefineKeys(ToTy) \
3441 ReachabilityQueryInfo<ToTy> \
3442 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::EmptyKey = \
3443 ReachabilityQueryInfo<ToTy>( \
3444 DenseMapInfo<const Instruction *>::getEmptyKey(), \
3445 DenseMapInfo<const ToTy *>::getEmptyKey()); \
3447 ReachabilityQueryInfo<ToTy> \
3448 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::TombstoneKey = \
3449 ReachabilityQueryInfo<ToTy>( \
3450 DenseMapInfo<const Instruction *>::getTombstoneKey(), \
3451 DenseMapInfo<const ToTy *>::getTombstoneKey());
3460template <
typename BaseTy,
typename ToTy>
3461struct CachedReachabilityAA :
public BaseTy {
3467 bool isQueryAA()
const override {
return true; }
3472 for (
unsigned u = 0,
e = QueryVector.size();
u <
e; ++
u) {
3473 RQITy *RQI = QueryVector[
u];
3474 if (RQI->Result == RQITy::Reachable::No &&
3475 isReachableImpl(
A, *RQI,
false))
3476 Changed = ChangeStatus::CHANGED;
3481 virtual bool isReachableImpl(
Attributor &
A, RQITy &RQI,
3482 bool IsTemporaryRQI) = 0;
3485 RQITy &RQI,
bool UsedExclusionSet,
bool IsTemporaryRQI) {
3490 QueryCache.erase(&RQI);
3496 if (
Result == RQITy::Reachable::Yes || !UsedExclusionSet) {
3497 RQITy PlainRQI(RQI.From, RQI.To);
3498 if (!QueryCache.count(&PlainRQI)) {
3499 RQITy *RQIPtr =
new (
A.Allocator) RQITy(RQI.From, RQI.To);
3501 QueryVector.push_back(RQIPtr);
3502 QueryCache.insert(RQIPtr);
3507 if (IsTemporaryRQI &&
Result != RQITy::Reachable::Yes && UsedExclusionSet) {
3508 assert((!RQI.ExclusionSet || !RQI.ExclusionSet->empty()) &&
3509 "Did not expect empty set!");
3510 RQITy *RQIPtr =
new (
A.Allocator)
3511 RQITy(
A, *RQI.From, *RQI.To, RQI.ExclusionSet,
true);
3512 assert(RQIPtr->Result == RQITy::Reachable::No &&
"Already reachable?");
3514 assert(!QueryCache.count(RQIPtr));
3515 QueryVector.push_back(RQIPtr);
3516 QueryCache.insert(RQIPtr);
3519 if (
Result == RQITy::Reachable::No && IsTemporaryRQI)
3520 A.registerForUpdate(*
this);
3521 return Result == RQITy::Reachable::Yes;
3524 const std::string getAsStr(
Attributor *
A)
const override {
3526 return "#queries(" + std::to_string(QueryVector.size()) +
")";
3529 bool checkQueryCache(
Attributor &
A, RQITy &StackRQI,
3530 typename RQITy::Reachable &
Result) {
3531 if (!this->getState().isValidState()) {
3532 Result = RQITy::Reachable::Yes;
3538 if (StackRQI.ExclusionSet) {
3539 RQITy PlainRQI(StackRQI.From, StackRQI.To);
3540 auto It = QueryCache.find(&PlainRQI);
3541 if (It != QueryCache.end() && (*It)->Result == RQITy::Reachable::No) {
3542 Result = RQITy::Reachable::No;
3547 auto It = QueryCache.find(&StackRQI);
3548 if (It != QueryCache.end()) {
3555 QueryCache.insert(&StackRQI);
3564struct AAIntraFnReachabilityFunction final
3565 :
public CachedReachabilityAA<AAIntraFnReachability, Instruction> {
3566 using Base = CachedReachabilityAA<AAIntraFnReachability, Instruction>;
3573 bool isAssumedReachable(
3576 auto *NonConstThis =
const_cast<AAIntraFnReachabilityFunction *
>(
this);
3580 RQITy StackRQI(
A,
From, To, ExclusionSet,
false);
3581 typename RQITy::Reachable
Result;
3582 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
3583 return NonConstThis->isReachableImpl(
A, StackRQI,
3585 return Result == RQITy::Reachable::Yes;
3592 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3595 [&](
const auto &DeadEdge) {
3596 return LivenessAA->isEdgeDead(DeadEdge.first,
3600 return LivenessAA->isAssumedDead(BB);
3602 return ChangeStatus::UNCHANGED;
3606 return Base::updateImpl(
A);
3610 bool IsTemporaryRQI)
override {
3612 bool UsedExclusionSet =
false;
3617 while (IP && IP != &To) {
3618 if (ExclusionSet && IP != Origin && ExclusionSet->
count(IP)) {
3619 UsedExclusionSet =
true;
3630 "Not an intra-procedural query!");
3634 if (FromBB == ToBB &&
3635 WillReachInBlock(*RQI.From, *RQI.To, RQI.ExclusionSet))
3636 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3641 if (!WillReachInBlock(ToBB->
front(), *RQI.To, RQI.ExclusionSet))
3642 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3647 if (RQI.ExclusionSet)
3648 for (
auto *
I : *RQI.ExclusionSet)
3649 if (
I->getFunction() == Fn)
3650 ExclusionBlocks.
insert(
I->getParent());
3653 if (ExclusionBlocks.
count(FromBB) &&
3656 return rememberResult(
A, RQITy::Reachable::No, RQI,
true, IsTemporaryRQI);
3659 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3660 if (LivenessAA && LivenessAA->isAssumedDead(ToBB)) {
3661 DeadBlocks.insert(ToBB);
3662 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3671 while (!Worklist.
empty()) {
3673 if (!Visited.
insert(BB).second)
3676 if (LivenessAA && LivenessAA->isEdgeDead(BB, SuccBB)) {
3677 LocalDeadEdges.
insert({BB, SuccBB});
3682 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3685 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3688 if (ExclusionBlocks.
count(SuccBB)) {
3689 UsedExclusionSet =
true;
3696 DeadEdges.insert(LocalDeadEdges.
begin(), LocalDeadEdges.
end());
3697 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3702 void trackStatistics()
const override {}
3722 bool IgnoreSubsumingPositions) {
3723 assert(ImpliedAttributeKind == Attribute::NoAlias &&
3724 "Unexpected attribute kind");
3727 if (isa<AllocaInst>(Val))
3730 IgnoreSubsumingPositions =
true;
3733 if (isa<UndefValue>(Val))
3736 if (isa<ConstantPointerNull>(Val) &&
3741 if (
A.hasAttr(IRP, {Attribute::ByVal, Attribute::NoAlias},
3742 IgnoreSubsumingPositions, Attribute::NoAlias))
3752 "Noalias is a pointer attribute");
3755 const std::string getAsStr(
Attributor *
A)
const override {
3756 return getAssumed() ?
"noalias" :
"may-alias";
3761struct AANoAliasFloating final : AANoAliasImpl {
3763 : AANoAliasImpl(IRP,
A) {}
3768 return indicatePessimisticFixpoint();
3772 void trackStatistics()
const override {
3778struct AANoAliasArgument final
3779 : AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl> {
3780 using Base = AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl>;
3792 if (AA::hasAssumedIRAttr<Attribute::NoSync>(
3794 DepClassTy::OPTIONAL, IsKnownNoSycn))
3795 return Base::updateImpl(
A);
3800 return Base::updateImpl(
A);
3804 bool UsedAssumedInformation =
false;
3805 if (
A.checkForAllCallSites(
3807 true, UsedAssumedInformation))
3808 return Base::updateImpl(
A);
3816 return indicatePessimisticFixpoint();
3823struct AANoAliasCallSiteArgument final : AANoAliasImpl {
3825 : AANoAliasImpl(IRP,
A) {}
3831 const CallBase &CB,
unsigned OtherArgNo) {
3833 if (this->getCalleeArgNo() == (
int)OtherArgNo)
3845 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadNone()) {
3846 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3853 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadOnly() &&
3855 A.recordDependence(MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3856 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3862 AAR =
A.getInfoCache().getAnalysisResultForFunction<
AAManager>(
3866 bool IsAliasing = !AAR || !AAR->
isNoAlias(&getAssociatedValue(), ArgOp);
3868 "callsite arguments: "
3869 << getAssociatedValue() <<
" " << *ArgOp <<
" => "
3870 << (IsAliasing ?
"" :
"no-") <<
"alias \n");
3875 bool isKnownNoAliasDueToNoAliasPreservation(
3895 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
3906 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
3911 bool IsKnownNoCapture;
3912 if (AA::hasAssumedIRAttr<Attribute::NoCapture>(
3914 DepClassTy::OPTIONAL, IsKnownNoCapture))
3920 A, *UserI, *getCtxI(), *
this,
nullptr,
3921 [ScopeFn](
const Function &Fn) {
return &Fn != ScopeFn; }))
3929 case UseCaptureKind::NO_CAPTURE:
3931 case UseCaptureKind::MAY_CAPTURE:
3935 case UseCaptureKind::PASSTHROUGH:
3942 bool IsKnownNoCapture;
3944 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
3945 A,
this, VIRP, DepClassTy::NONE, IsKnownNoCapture,
false, &NoCaptureAA);
3946 if (!IsAssumedNoCapture &&
3948 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue())) {
3950 dbgs() <<
"[AANoAliasCSArg] " << getAssociatedValue()
3951 <<
" cannot be noalias as it is potentially captured\n");
3956 A.recordDependence(*NoCaptureAA, *
this, DepClassTy::OPTIONAL);
3961 const auto &CB = cast<CallBase>(getAnchorValue());
3962 for (
unsigned OtherArgNo = 0; OtherArgNo < CB.
arg_size(); OtherArgNo++)
3963 if (mayAliasWithArgument(
A, AAR, MemBehaviorAA, CB, OtherArgNo))
3973 auto *MemBehaviorAA =
3976 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3977 return ChangeStatus::UNCHANGED;
3980 bool IsKnownNoAlias;
3982 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
3983 A,
this, VIRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
3985 <<
" is not no-alias at the definition\n");
3986 return indicatePessimisticFixpoint();
3990 if (MemBehaviorAA &&
3991 isKnownNoAliasDueToNoAliasPreservation(
A, AAR, *MemBehaviorAA)) {
3993 dbgs() <<
"[AANoAlias] No-Alias deduced via no-alias preservation\n");
3994 return ChangeStatus::UNCHANGED;
3997 return indicatePessimisticFixpoint();
4005struct AANoAliasReturned final : AANoAliasImpl {
4007 : AANoAliasImpl(IRP,
A) {}
4012 auto CheckReturnValue = [&](
Value &RV) ->
bool {
4013 if (
Constant *
C = dyn_cast<Constant>(&RV))
4014 if (
C->isNullValue() || isa<UndefValue>(
C))
4019 if (!isa<CallBase>(&RV))
4023 bool IsKnownNoAlias;
4024 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
4025 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoAlias))
4028 bool IsKnownNoCapture;
4030 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
4031 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
4033 return IsAssumedNoCapture ||
4037 if (!
A.checkForAllReturnedValues(CheckReturnValue, *
this))
4038 return indicatePessimisticFixpoint();
4040 return ChangeStatus::UNCHANGED;
4048struct AANoAliasCallSiteReturned final
4049 : AACalleeToCallSite<AANoAlias, AANoAliasImpl> {
4051 : AACalleeToCallSite<
AANoAlias, AANoAliasImpl>(IRP,
A) {}
4061struct AAIsDeadValueImpl :
public AAIsDead {
4065 bool isAssumedDead()
const override {
return isAssumed(IS_DEAD); }
4068 bool isKnownDead()
const override {
return isKnown(IS_DEAD); }
4071 bool isAssumedDead(
const BasicBlock *BB)
const override {
return false; }
4074 bool isKnownDead(
const BasicBlock *BB)
const override {
return false; }
4077 bool isAssumedDead(
const Instruction *
I)
const override {
4078 return I == getCtxI() && isAssumedDead();
4082 bool isKnownDead(
const Instruction *
I)
const override {
4083 return isAssumedDead(
I) && isKnownDead();
4087 const std::string getAsStr(
Attributor *
A)
const override {
4088 return isAssumedDead() ?
"assumed-dead" :
"assumed-live";
4094 if (
V.getType()->isVoidTy() ||
V.use_empty())
4098 if (!isa<Constant>(V)) {
4099 if (
auto *
I = dyn_cast<Instruction>(&V))
4100 if (!
A.isRunOn(*
I->getFunction()))
4102 bool UsedAssumedInformation =
false;
4103 std::optional<Constant *>
C =
4104 A.getAssumedConstant(V, *
this, UsedAssumedInformation);
4109 auto UsePred = [&](
const Use &
U,
bool &Follow) {
return false; };
4114 return A.checkForAllUses(UsePred, *
this, V,
false,
4115 DepClassTy::REQUIRED,
4124 auto *CB = dyn_cast<CallBase>(
I);
4125 if (!CB || isa<IntrinsicInst>(CB))
4130 bool IsKnownNoUnwind;
4131 if (!AA::hasAssumedIRAttr<Attribute::NoUnwind>(
4132 A,
this, CallIRP, DepClassTy::OPTIONAL, IsKnownNoUnwind))
4140struct AAIsDeadFloating :
public AAIsDeadValueImpl {
4142 : AAIsDeadValueImpl(IRP,
A) {}
4146 AAIsDeadValueImpl::initialize(
A);
4148 if (isa<UndefValue>(getAssociatedValue())) {
4149 indicatePessimisticFixpoint();
4153 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4154 if (!isAssumedSideEffectFree(
A,
I)) {
4155 if (!isa_and_nonnull<StoreInst>(
I) && !isa_and_nonnull<FenceInst>(
I))
4156 indicatePessimisticFixpoint();
4158 removeAssumedBits(HAS_NO_EFFECT);
4165 if (!ExecDomainAA || !ExecDomainAA->isNoOpFence(FI))
4167 A.recordDependence(*ExecDomainAA, *
this, DepClassTy::OPTIONAL);
4174 if (
SI.isVolatile())
4180 bool UsedAssumedInformation =
false;
4181 if (!AssumeOnlyInst) {
4182 PotentialCopies.clear();
4184 UsedAssumedInformation)) {
4187 <<
"[AAIsDead] Could not determine potential copies of store!\n");
4191 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Store has " << PotentialCopies.size()
4192 <<
" potential copies.\n");
4197 UsedAssumedInformation))
4199 if (
auto *LI = dyn_cast<LoadInst>(V)) {
4201 auto &UserI = cast<Instruction>(*U.getUser());
4202 if (InfoCache.isOnlyUsedByAssume(UserI)) {
4204 AssumeOnlyInst->insert(&UserI);
4207 return A.isAssumedDead(U,
this,
nullptr, UsedAssumedInformation);
4213 <<
" is assumed live!\n");
4219 const std::string getAsStr(
Attributor *
A)
const override {
4220 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4221 if (isa_and_nonnull<StoreInst>(
I))
4223 return "assumed-dead-store";
4224 if (isa_and_nonnull<FenceInst>(
I))
4226 return "assumed-dead-fence";
4227 return AAIsDeadValueImpl::getAsStr(
A);
4232 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4233 if (
auto *SI = dyn_cast_or_null<StoreInst>(
I)) {
4234 if (!isDeadStore(
A, *SI))
4235 return indicatePessimisticFixpoint();
4236 }
else if (
auto *FI = dyn_cast_or_null<FenceInst>(
I)) {
4237 if (!isDeadFence(
A, *FI))
4238 return indicatePessimisticFixpoint();
4240 if (!isAssumedSideEffectFree(
A,
I))
4241 return indicatePessimisticFixpoint();
4242 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4243 return indicatePessimisticFixpoint();
4248 bool isRemovableStore()
const override {
4249 return isAssumed(IS_REMOVABLE) && isa<StoreInst>(&getAssociatedValue());
4254 Value &
V = getAssociatedValue();
4255 if (
auto *
I = dyn_cast<Instruction>(&V)) {
4260 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
4262 bool IsDead = isDeadStore(
A, *SI, &AssumeOnlyInst);
4265 A.deleteAfterManifest(*
I);
4266 for (
size_t i = 0; i < AssumeOnlyInst.
size(); ++i) {
4268 for (
auto *Usr : AOI->
users())
4269 AssumeOnlyInst.
insert(cast<Instruction>(Usr));
4270 A.deleteAfterManifest(*AOI);
4274 if (
auto *FI = dyn_cast<FenceInst>(
I)) {
4276 A.deleteAfterManifest(*FI);
4279 if (isAssumedSideEffectFree(
A,
I) && !isa<InvokeInst>(
I)) {
4280 A.deleteAfterManifest(*
I);
4288 void trackStatistics()
const override {
4297struct AAIsDeadArgument :
public AAIsDeadFloating {
4299 : AAIsDeadFloating(IRP,
A) {}
4303 Argument &Arg = *getAssociatedArgument();
4304 if (
A.isValidFunctionSignatureRewrite(Arg, {}))
4305 if (
A.registerFunctionSignatureRewrite(
4309 return ChangeStatus::CHANGED;
4311 return ChangeStatus::UNCHANGED;
4318struct AAIsDeadCallSiteArgument :
public AAIsDeadValueImpl {
4320 : AAIsDeadValueImpl(IRP,
A) {}
4324 AAIsDeadValueImpl::initialize(
A);
4325 if (isa<UndefValue>(getAssociatedValue()))
4326 indicatePessimisticFixpoint();
4335 Argument *Arg = getAssociatedArgument();
4337 return indicatePessimisticFixpoint();
4339 auto *ArgAA =
A.getAAFor<
AAIsDead>(*
this, ArgPos, DepClassTy::REQUIRED);
4341 return indicatePessimisticFixpoint();
4347 CallBase &CB = cast<CallBase>(getAnchorValue());
4349 assert(!isa<UndefValue>(
U.get()) &&
4350 "Expected undef values to be filtered out!");
4352 if (
A.changeUseAfterManifest(U, UV))
4353 return ChangeStatus::CHANGED;
4354 return ChangeStatus::UNCHANGED;
4361struct AAIsDeadCallSiteReturned :
public AAIsDeadFloating {
4363 : AAIsDeadFloating(IRP,
A) {}
4366 bool isAssumedDead()
const override {
4367 return AAIsDeadFloating::isAssumedDead() && IsAssumedSideEffectFree;
4372 AAIsDeadFloating::initialize(
A);
4373 if (isa<UndefValue>(getAssociatedValue())) {
4374 indicatePessimisticFixpoint();
4379 IsAssumedSideEffectFree = isAssumedSideEffectFree(
A, getCtxI());
4385 if (IsAssumedSideEffectFree && !isAssumedSideEffectFree(
A, getCtxI())) {
4386 IsAssumedSideEffectFree =
false;
4387 Changed = ChangeStatus::CHANGED;
4389 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4390 return indicatePessimisticFixpoint();
4395 void trackStatistics()
const override {
4396 if (IsAssumedSideEffectFree)
4403 const std::string getAsStr(
Attributor *
A)
const override {
4404 return isAssumedDead()
4406 : (getAssumed() ?
"assumed-dead-users" :
"assumed-live");
4410 bool IsAssumedSideEffectFree =
true;
4413struct AAIsDeadReturned :
public AAIsDeadValueImpl {
4415 : AAIsDeadValueImpl(IRP,
A) {}
4420 bool UsedAssumedInformation =
false;
4421 A.checkForAllInstructions([](
Instruction &) {
return true; }, *
this,
4422 {Instruction::Ret}, UsedAssumedInformation);
4425 if (ACS.isCallbackCall() || !ACS.getInstruction())
4427 return areAllUsesAssumedDead(
A, *ACS.getInstruction());
4430 if (!
A.checkForAllCallSites(PredForCallSite, *
this,
true,
4431 UsedAssumedInformation))
4432 return indicatePessimisticFixpoint();
4434 return ChangeStatus::UNCHANGED;
4440 bool AnyChange =
false;
4448 bool UsedAssumedInformation =
false;
4449 A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
4450 UsedAssumedInformation);
4451 return AnyChange ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
4458struct AAIsDeadFunction :
public AAIsDead {
4464 assert(
F &&
"Did expect an anchor function");
4465 if (!isAssumedDeadInternalFunction(
A)) {
4466 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4467 assumeLive(
A,
F->getEntryBlock());
4471 bool isAssumedDeadInternalFunction(
Attributor &
A) {
4472 if (!getAnchorScope()->hasLocalLinkage())
4474 bool UsedAssumedInformation =
false;
4476 true, UsedAssumedInformation);
4480 const std::string getAsStr(
Attributor *
A)
const override {
4481 return "Live[#BB " + std::to_string(AssumedLiveBlocks.size()) +
"/" +
4482 std::to_string(getAnchorScope()->
size()) +
"][#TBEP " +
4483 std::to_string(ToBeExploredFrom.size()) +
"][#KDE " +
4484 std::to_string(KnownDeadEnds.size()) +
"]";
4489 assert(getState().isValidState() &&
4490 "Attempted to manifest an invalid state!");
4495 if (AssumedLiveBlocks.empty()) {
4496 A.deleteAfterManifest(
F);
4497 return ChangeStatus::CHANGED;
4503 bool Invoke2CallAllowed = !mayCatchAsynchronousExceptions(
F);
4505 KnownDeadEnds.set_union(ToBeExploredFrom);
4506 for (
const Instruction *DeadEndI : KnownDeadEnds) {
4507 auto *CB = dyn_cast<CallBase>(DeadEndI);
4510 bool IsKnownNoReturn;
4511 bool MayReturn = !AA::hasAssumedIRAttr<Attribute::NoReturn>(
4514 if (MayReturn && (!Invoke2CallAllowed || !isa<InvokeInst>(CB)))
4517 if (
auto *II = dyn_cast<InvokeInst>(DeadEndI))
4518 A.registerInvokeWithDeadSuccessor(
const_cast<InvokeInst &
>(*II));
4520 A.changeToUnreachableAfterManifest(
4521 const_cast<Instruction *
>(DeadEndI->getNextNode()));
4522 HasChanged = ChangeStatus::CHANGED;
4527 if (!AssumedLiveBlocks.count(&BB)) {
4528 A.deleteAfterManifest(BB);
4530 HasChanged = ChangeStatus::CHANGED;
4540 assert(
From->getParent() == getAnchorScope() &&
4542 "Used AAIsDead of the wrong function");
4543 return isValidState() && !AssumedLiveEdges.count(std::make_pair(
From, To));
4547 void trackStatistics()
const override {}
4550 bool isAssumedDead()
const override {
return false; }
4553 bool isKnownDead()
const override {
return false; }
4556 bool isAssumedDead(
const BasicBlock *BB)
const override {
4558 "BB must be in the same anchor scope function.");
4562 return !AssumedLiveBlocks.count(BB);
4566 bool isKnownDead(
const BasicBlock *BB)
const override {
4567 return getKnown() && isAssumedDead(BB);
4571 bool isAssumedDead(
const Instruction *
I)
const override {
4572 assert(
I->getParent()->getParent() == getAnchorScope() &&
4573 "Instruction must be in the same anchor scope function.");
4580 if (!AssumedLiveBlocks.count(
I->getParent()))
4586 if (KnownDeadEnds.count(PrevI) || ToBeExploredFrom.count(PrevI))
4594 bool isKnownDead(
const Instruction *
I)
const override {
4595 return getKnown() && isAssumedDead(
I);
4601 if (!AssumedLiveBlocks.insert(&BB).second)
4609 if (
const auto *CB = dyn_cast<CallBase>(&
I))
4611 if (
F->hasLocalLinkage())
4612 A.markLiveInternalFunction(*
F);
4636 bool IsKnownNoReturn;
4637 if (AA::hasAssumedIRAttr<Attribute::NoReturn>(
4639 return !IsKnownNoReturn;
4651 bool UsedAssumedInformation =
4652 identifyAliveSuccessors(
A, cast<CallBase>(II), AA, AliveSuccessors);
4657 if (AAIsDeadFunction::mayCatchAsynchronousExceptions(*II.
getFunction())) {
4662 bool IsKnownNoUnwind;
4663 if (AA::hasAssumedIRAttr<Attribute::NoUnwind>(
4665 UsedAssumedInformation |= !IsKnownNoUnwind;
4670 return UsedAssumedInformation;
4677 bool UsedAssumedInformation =
false;
4681 std::optional<Constant *>
C =
4682 A.getAssumedConstant(*BI.
getCondition(), AA, UsedAssumedInformation);
4683 if (!
C || isa_and_nonnull<UndefValue>(*
C)) {
4685 }
else if (isa_and_nonnull<ConstantInt>(*
C)) {
4687 BI.
getSuccessor(1 - cast<ConstantInt>(*C)->getValue().getZExtValue());
4692 UsedAssumedInformation =
false;
4695 return UsedAssumedInformation;
4702 bool UsedAssumedInformation =
false;
4706 UsedAssumedInformation)) {
4713 if (Values.
empty() ||
4714 (Values.
size() == 1 &&
4715 isa_and_nonnull<UndefValue>(Values.
front().getValue()))) {
4717 return UsedAssumedInformation;
4720 Type &Ty = *
SI.getCondition()->getType();
4722 auto CheckForConstantInt = [&](
Value *
V) {
4723 if (
auto *CI = dyn_cast_if_present<ConstantInt>(
AA::getWithType(*V, Ty))) {
4731 return CheckForConstantInt(
VAC.getValue());
4735 return UsedAssumedInformation;
4738 unsigned MatchedCases = 0;
4739 for (
const auto &CaseIt :
SI.cases()) {
4740 if (
Constants.count(CaseIt.getCaseValue())) {
4742 AliveSuccessors.
push_back(&CaseIt.getCaseSuccessor()->front());
4749 AliveSuccessors.
push_back(&
SI.getDefaultDest()->front());
4750 return UsedAssumedInformation;
4756 if (AssumedLiveBlocks.empty()) {
4757 if (isAssumedDeadInternalFunction(
A))
4761 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4762 assumeLive(
A,
F->getEntryBlock());
4766 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Live [" << AssumedLiveBlocks.size() <<
"/"
4767 << getAnchorScope()->
size() <<
"] BBs and "
4768 << ToBeExploredFrom.size() <<
" exploration points and "
4769 << KnownDeadEnds.size() <<
" known dead ends\n");
4774 ToBeExploredFrom.end());
4775 decltype(ToBeExploredFrom) NewToBeExploredFrom;
4778 while (!Worklist.
empty()) {
4784 while (!
I->isTerminator() && !isa<CallBase>(
I))
4785 I =
I->getNextNode();
4787 AliveSuccessors.
clear();
4789 bool UsedAssumedInformation =
false;
4790 switch (
I->getOpcode()) {
4794 "Expected non-terminators to be handled already!");
4798 case Instruction::Call:
4799 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<CallInst>(*
I),
4800 *
this, AliveSuccessors);
4802 case Instruction::Invoke:
4803 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<InvokeInst>(*
I),
4804 *
this, AliveSuccessors);
4806 case Instruction::Br:
4807 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<BranchInst>(*
I),
4808 *
this, AliveSuccessors);
4810 case Instruction::Switch:
4811 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<SwitchInst>(*
I),
4812 *
this, AliveSuccessors);
4816 if (UsedAssumedInformation) {
4817 NewToBeExploredFrom.insert(
I);
4818 }
else if (AliveSuccessors.
empty() ||
4819 (
I->isTerminator() &&
4820 AliveSuccessors.
size() <
I->getNumSuccessors())) {
4821 if (KnownDeadEnds.insert(
I))
4826 << AliveSuccessors.
size() <<
" UsedAssumedInformation: "
4827 << UsedAssumedInformation <<
"\n");
4829 for (
const Instruction *AliveSuccessor : AliveSuccessors) {
4830 if (!
I->isTerminator()) {
4831 assert(AliveSuccessors.size() == 1 &&
4832 "Non-terminator expected to have a single successor!");
4836 auto Edge = std::make_pair(
I->getParent(), AliveSuccessor->getParent());
4837 if (AssumedLiveEdges.insert(Edge).second)
4839 if (assumeLive(
A, *AliveSuccessor->getParent()))
4846 if (NewToBeExploredFrom.size() != ToBeExploredFrom.size() ||
4848 return !ToBeExploredFrom.count(I);
4851 ToBeExploredFrom = std::move(NewToBeExploredFrom);
4860 if (ToBeExploredFrom.empty() &&
4861 getAnchorScope()->
size() == AssumedLiveBlocks.size() &&
4863 return DeadEndI->isTerminator() && DeadEndI->getNumSuccessors() == 0;
4865 return indicatePessimisticFixpoint();
4870struct AAIsDeadCallSite final : AAIsDeadFunction {
4872 : AAIsDeadFunction(IRP,
A) {}
4881 "supported for call sites yet!");
4886 return indicatePessimisticFixpoint();
4890 void trackStatistics()
const override {}
4904 Value &
V = *getAssociatedValue().stripPointerCasts();
4906 A.getAttrs(getIRPosition(),
4907 {Attribute::Dereferenceable, Attribute::DereferenceableOrNull},
4910 takeKnownDerefBytesMaximum(Attr.getValueAsInt());
4913 bool IsKnownNonNull;
4914 AA::hasAssumedIRAttr<Attribute::NonNull>(
4915 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNonNull);
4917 bool CanBeNull, CanBeFreed;
4918 takeKnownDerefBytesMaximum(
V.getPointerDereferenceableBytes(
4919 A.getDataLayout(), CanBeNull, CanBeFreed));
4922 followUsesInMBEC(*
this,
A, getState(), *CtxI);
4927 StateType &getState()
override {
return *
this; }
4928 const StateType &getState()
const override {
return *
this; }
4934 const Value *UseV =
U->get();
4939 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
I->isVolatile())
4944 Loc->Ptr,
Offset,
A.getDataLayout(),
true);
4945 if (
Base &&
Base == &getAssociatedValue())
4946 State.addAccessedBytes(
Offset, Loc->Size.getValue());
4952 bool IsNonNull =
false;
4953 bool TrackUse =
false;
4954 int64_t DerefBytes = getKnownNonNullAndDerefBytesForUse(
4955 A, *
this, getAssociatedValue(), U,
I, IsNonNull, TrackUse);
4956 LLVM_DEBUG(
dbgs() <<
"[AADereferenceable] Deref bytes: " << DerefBytes
4957 <<
" for instruction " << *
I <<
"\n");
4959 addAccessedBytesForUse(
A, U,
I, State);
4960 State.takeKnownDerefBytesMaximum(DerefBytes);
4967 bool IsKnownNonNull;
4968 bool IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
4969 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
4970 if (IsAssumedNonNull &&
4971 A.hasAttr(getIRPosition(), Attribute::DereferenceableOrNull)) {
4972 A.removeAttrs(getIRPosition(), {Attribute::DereferenceableOrNull});
4973 return ChangeStatus::CHANGED;
4981 bool IsKnownNonNull;
4982 bool IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
4983 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
4984 if (IsAssumedNonNull)
4986 Ctx, getAssumedDereferenceableBytes()));
4989 Ctx, getAssumedDereferenceableBytes()));
4993 const std::string getAsStr(
Attributor *
A)
const override {
4994 if (!getAssumedDereferenceableBytes())
4995 return "unknown-dereferenceable";
4996 bool IsKnownNonNull;
4997 bool IsAssumedNonNull =
false;
4999 IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
5000 *
A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5001 return std::string(
"dereferenceable") +
5002 (IsAssumedNonNull ?
"" :
"_or_null") +
5003 (isAssumedGlobal() ?
"_globally" :
"") +
"<" +
5004 std::to_string(getKnownDereferenceableBytes()) +
"-" +
5005 std::to_string(getAssumedDereferenceableBytes()) +
">" +
5006 (!
A ?
" [non-null is unknown]" :
"");
5011struct AADereferenceableFloating : AADereferenceableImpl {
5013 : AADereferenceableImpl(IRP,
A) {}
5018 bool UsedAssumedInformation =
false;
5020 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5022 Values.
push_back({getAssociatedValue(), getCtxI()});
5025 Stripped = Values.
size() != 1 ||
5026 Values.
front().getValue() != &getAssociatedValue();
5032 auto VisitValueCB = [&](
const Value &
V) ->
bool {
5034 DL.getIndexSizeInBits(
V.getType()->getPointerAddressSpace());
5042 int64_t DerefBytes = 0;
5043 if (!AA || (!Stripped &&
this == AA)) {
5046 bool CanBeNull, CanBeFreed;
5048 Base->getPointerDereferenceableBytes(
DL, CanBeNull, CanBeFreed);
5049 T.GlobalState.indicatePessimisticFixpoint();
5052 DerefBytes =
DS.DerefBytesState.getAssumed();
5053 T.GlobalState &=
DS.GlobalState;
5059 int64_t OffsetSExt =
Offset.getSExtValue();
5063 T.takeAssumedDerefBytesMinimum(
5064 std::max(int64_t(0), DerefBytes - OffsetSExt));
5069 T.takeKnownDerefBytesMaximum(
5070 std::max(int64_t(0), DerefBytes - OffsetSExt));
5071 T.indicatePessimisticFixpoint();
5072 }
else if (OffsetSExt > 0) {
5078 T.indicatePessimisticFixpoint();
5082 return T.isValidState();
5085 for (
const auto &VAC : Values)
5086 if (!VisitValueCB(*
VAC.getValue()))
5087 return indicatePessimisticFixpoint();
5093 void trackStatistics()
const override {
5099struct AADereferenceableReturned final
5100 : AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl> {
5102 AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl>;
5107 void trackStatistics()
const override {
5113struct AADereferenceableArgument final
5114 : AAArgumentFromCallSiteArguments<AADereferenceable,
5115 AADereferenceableImpl> {
5117 AAArgumentFromCallSiteArguments<AADereferenceable, AADereferenceableImpl>;
5122 void trackStatistics()
const override {
5128struct AADereferenceableCallSiteArgument final : AADereferenceableFloating {
5130 : AADereferenceableFloating(IRP,
A) {}
5133 void trackStatistics()
const override {
5139struct AADereferenceableCallSiteReturned final
5140 : AACalleeToCallSite<AADereferenceable, AADereferenceableImpl> {
5141 using Base = AACalleeToCallSite<AADereferenceable, AADereferenceableImpl>;
5146 void trackStatistics()
const override {
5156 Value &AssociatedValue,
const Use *U,
5160 if (isa<CastInst>(
I)) {
5162 TrackUse = !isa<PtrToIntInst>(
I);
5165 if (
auto *
GEP = dyn_cast<GetElementPtrInst>(
I)) {
5166 if (
GEP->hasAllConstantIndices())
5172 if (
const auto *CB = dyn_cast<CallBase>(
I)) {
5186 const Value *UseV =
U->get();
5187 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
5188 if (
SI->getPointerOperand() == UseV)
5189 MA =
SI->getAlign();
5190 }
else if (
auto *LI = dyn_cast<LoadInst>(
I)) {
5191 if (LI->getPointerOperand() == UseV)
5192 MA = LI->getAlign();
5198 unsigned Alignment = MA->value();
5202 if (
Base == &AssociatedValue) {
5221 A.getAttrs(getIRPosition(), {Attribute::Alignment},
Attrs);
5223 takeKnownMaximum(Attr.getValueAsInt());
5225 Value &
V = *getAssociatedValue().stripPointerCasts();
5226 takeKnownMaximum(
V.getPointerAlignment(
A.getDataLayout()).value());
5229 followUsesInMBEC(*
this,
A, getState(), *CtxI);
5234 ChangeStatus LoadStoreChanged = ChangeStatus::UNCHANGED;
5237 Value &AssociatedValue = getAssociatedValue();
5238 for (
const Use &U : AssociatedValue.
uses()) {
5239 if (
auto *SI = dyn_cast<StoreInst>(
U.getUser())) {
5240 if (
SI->getPointerOperand() == &AssociatedValue)
5241 if (
SI->getAlign() < getAssumedAlign()) {
5243 "Number of times alignment added to a store");
5244 SI->setAlignment(getAssumedAlign());
5245 LoadStoreChanged = ChangeStatus::CHANGED;
5247 }
else if (
auto *LI = dyn_cast<LoadInst>(
U.getUser())) {
5248 if (LI->getPointerOperand() == &AssociatedValue)
5249 if (LI->getAlign() < getAssumedAlign()) {
5250 LI->setAlignment(getAssumedAlign());
5252 "Number of times alignment added to a load");
5253 LoadStoreChanged = ChangeStatus::CHANGED;
5260 Align InheritAlign =
5261 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5262 if (InheritAlign >= getAssumedAlign())
5263 return LoadStoreChanged;
5264 return Changed | LoadStoreChanged;
5274 if (getAssumedAlign() > 1)
5282 bool TrackUse =
false;
5284 unsigned int KnownAlign =
5285 getKnownAlignForUse(
A, *
this, getAssociatedValue(), U,
I, TrackUse);
5286 State.takeKnownMaximum(KnownAlign);
5292 const std::string getAsStr(
Attributor *
A)
const override {
5293 return "align<" + std::to_string(getKnownAlign().
value()) +
"-" +
5294 std::to_string(getAssumedAlign().
value()) +
">";
5299struct AAAlignFloating : AAAlignImpl {
5307 bool UsedAssumedInformation =
false;
5309 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5311 Values.
push_back({getAssociatedValue(), getCtxI()});
5314 Stripped = Values.
size() != 1 ||
5315 Values.
front().getValue() != &getAssociatedValue();
5319 auto VisitValueCB = [&](
Value &
V) ->
bool {
5320 if (isa<UndefValue>(V) || isa<ConstantPointerNull>(V))
5323 DepClassTy::REQUIRED);
5324 if (!AA || (!Stripped &&
this == AA)) {
5326 unsigned Alignment = 1;
5339 Alignment =
V.getPointerAlignment(
DL).value();
5342 T.takeKnownMaximum(Alignment);
5343 T.indicatePessimisticFixpoint();
5349 return T.isValidState();
5352 for (
const auto &VAC : Values) {
5353 if (!VisitValueCB(*
VAC.getValue()))
5354 return indicatePessimisticFixpoint();
5367struct AAAlignReturned final
5368 : AAReturnedFromReturnedValues<AAAlign, AAAlignImpl> {
5369 using Base = AAReturnedFromReturnedValues<AAAlign, AAAlignImpl>;
5377struct AAAlignArgument final
5378 : AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl> {
5379 using Base = AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl>;
5387 if (
A.getInfoCache().isInvolvedInMustTailCall(*getAssociatedArgument()))
5388 return ChangeStatus::UNCHANGED;
5389 return Base::manifest(
A);
5396struct AAAlignCallSiteArgument final : AAAlignFloating {
5398 : AAAlignFloating(IRP,
A) {}
5405 if (
Argument *Arg = getAssociatedArgument())
5406 if (
A.getInfoCache().isInvolvedInMustTailCall(*Arg))
5407 return ChangeStatus::UNCHANGED;
5409 Align InheritAlign =
5410 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5411 if (InheritAlign >= getAssumedAlign())
5412 Changed = ChangeStatus::UNCHANGED;
5419 if (
Argument *Arg = getAssociatedArgument()) {
5422 const auto *ArgAlignAA =
A.getAAFor<
AAAlign>(
5425 takeKnownMaximum(ArgAlignAA->getKnownAlign().value());
5435struct AAAlignCallSiteReturned final
5436 : AACalleeToCallSite<AAAlign, AAAlignImpl> {
5437 using Base = AACalleeToCallSite<AAAlign, AAAlignImpl>;
5454 assert(!AA::hasAssumedIRAttr<Attribute::NoReturn>(
5455 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
5460 const std::string getAsStr(
Attributor *
A)
const override {
5461 return getAssumed() ?
"noreturn" :
"may-return";
5466 auto CheckForNoReturn = [](
Instruction &) {
return false; };
5467 bool UsedAssumedInformation =
false;
5468 if (!
A.checkForAllInstructions(CheckForNoReturn, *
this,
5469 {(unsigned)Instruction::Ret},
5470 UsedAssumedInformation))
5471 return indicatePessimisticFixpoint();
5472 return ChangeStatus::UNCHANGED;
5476struct AANoReturnFunction final : AANoReturnImpl {
5478 : AANoReturnImpl(IRP,
A) {}
5485struct AANoReturnCallSite final
5486 : AACalleeToCallSite<AANoReturn, AANoReturnImpl> {
5488 : AACalleeToCallSite<
AANoReturn, AANoReturnImpl>(IRP,
A) {}
5505 Value &
V = getAssociatedValue();
5506 if (
auto *
C = dyn_cast<Constant>(&V)) {
5507 if (
C->isThreadDependent())
5508 indicatePessimisticFixpoint();
5510 indicateOptimisticFixpoint();
5513 if (
auto *CB = dyn_cast<CallBase>(&V))
5516 indicateOptimisticFixpoint();
5519 if (
auto *
I = dyn_cast<Instruction>(&V)) {
5524 indicatePessimisticFixpoint();
5534 Value &
V = getAssociatedValue();
5536 if (
auto *
I = dyn_cast<Instruction>(&V))
5537 Scope =
I->getFunction();
5538 if (
auto *
A = dyn_cast<Argument>(&V)) {
5540 if (!
Scope->hasLocalLinkage())
5544 return indicateOptimisticFixpoint();
5546 bool IsKnownNoRecurse;
5547 if (AA::hasAssumedIRAttr<Attribute::NoRecurse>(
5552 auto UsePred = [&](
const Use &
U,
bool &Follow) {
5553 const Instruction *UserI = dyn_cast<Instruction>(
U.getUser());
5554 if (!UserI || isa<GetElementPtrInst>(UserI) || isa<CastInst>(UserI) ||
5555 isa<PHINode>(UserI) || isa<SelectInst>(UserI)) {
5559 if (isa<LoadInst>(UserI) || isa<CmpInst>(UserI) ||
5560 (isa<StoreInst>(UserI) &&
5561 cast<StoreInst>(UserI)->getValueOperand() !=
U.get()))
5563 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
5567 if (!Callee || !
Callee->hasLocalLinkage())
5573 DepClassTy::OPTIONAL);
5574 if (!ArgInstanceInfoAA ||
5575 !ArgInstanceInfoAA->isAssumedUniqueForAnalysis())
5580 A, *CB, *Scope, *
this,
nullptr,
5588 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
5589 if (
auto *SI = dyn_cast<StoreInst>(OldU.
getUser())) {
5590 auto *
Ptr =
SI->getPointerOperand()->stripPointerCasts();
5598 if (!
A.checkForAllUses(UsePred, *
this, V,
true,
5599 DepClassTy::OPTIONAL,
5600 true, EquivalentUseCB))
5601 return indicatePessimisticFixpoint();
5607 const std::string getAsStr(
Attributor *
A)
const override {
5608 return isAssumedUniqueForAnalysis() ?
"<unique [fAa]>" :
"<unknown>";
5612 void trackStatistics()
const override {}
5616struct AAInstanceInfoFloating : AAInstanceInfoImpl {
5618 : AAInstanceInfoImpl(IRP,
A) {}
5622struct AAInstanceInfoArgument final : AAInstanceInfoFloating {
5624 : AAInstanceInfoFloating(IRP,
A) {}
5628struct AAInstanceInfoCallSiteArgument final : AAInstanceInfoImpl {
5630 : AAInstanceInfoImpl(IRP,
A) {}
5638 Argument *Arg = getAssociatedArgument();
5640 return indicatePessimisticFixpoint();
5645 return indicatePessimisticFixpoint();
5651struct AAInstanceInfoReturned final : AAInstanceInfoImpl {
5653 : AAInstanceInfoImpl(IRP,
A) {
5669struct AAInstanceInfoCallSiteReturned final : AAInstanceInfoFloating {
5671 : AAInstanceInfoFloating(IRP,
A) {}
5678 bool IgnoreSubsumingPositions) {
5679 assert(ImpliedAttributeKind == Attribute::NoCapture &&
5680 "Unexpected attribute kind");
5683 return V.use_empty();
5686 if (isa<UndefValue>(V) || (isa<ConstantPointerNull>(V) &&
5687 V.getType()->getPointerAddressSpace() == 0)) {
5691 if (
A.hasAttr(IRP, {Attribute::NoCapture},
5692 true, Attribute::NoCapture))
5698 {Attribute::NoCapture, Attribute::ByVal},
5700 A.manifestAttrs(IRP,
5708 determineFunctionCaptureCapabilities(IRP, *
F, State);
5710 A.manifestAttrs(IRP,
5729 bool ReadOnly =
F.onlyReadsMemory();
5730 bool NoThrow =
F.doesNotThrow();
5731 bool IsVoidReturn =
F.getReturnType()->isVoidTy();
5732 if (ReadOnly && NoThrow && IsVoidReturn) {
5745 if (NoThrow && IsVoidReturn)
5750 if (!NoThrow || ArgNo < 0 ||
5751 !
F.getAttributes().hasAttrSomewhere(Attribute::Returned))
5754 for (
unsigned U = 0,
E =
F.arg_size(); U <
E; ++U)
5755 if (
F.hasParamAttribute(U, Attribute::Returned)) {
5756 if (U ==
unsigned(ArgNo))
5774 assert(!AA::hasAssumedIRAttr<Attribute::NoCapture>(
5775 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
5785 if (!isAssumedNoCaptureMaybeReturned())
5788 if (isArgumentPosition()) {
5789 if (isAssumedNoCapture())
5797 const std::string getAsStr(
Attributor *
A)
const override {
5798 if (isKnownNoCapture())
5799 return "known not-captured";
5800 if (isAssumedNoCapture())
5801 return "assumed not-captured";
5802 if (isKnownNoCaptureMaybeReturned())
5803 return "known not-captured-maybe-returned";
5804 if (isAssumedNoCaptureMaybeReturned())
5805 return "assumed not-captured-maybe-returned";
5806 return "assumed-captured";
5814 LLVM_DEBUG(
dbgs() <<
"[AANoCapture] Check use: " << *
U.get() <<
" in "
5818 if (isa<PtrToIntInst>(UInst)) {
5820 return isCapturedIn(State,
true,
true,
5826 if (isa<StoreInst>(UInst))
5827 return isCapturedIn(State,
true,
true,
5831 if (isa<ReturnInst>(UInst)) {
5833 return isCapturedIn(State,
false,
false,
5835 return isCapturedIn(State,
true,
true,
5841 auto *CB = dyn_cast<CallBase>(UInst);
5843 return isCapturedIn(State,
true,
true,
5850 bool IsKnownNoCapture;
5852 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
5853 A,
this, CSArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
5855 if (IsAssumedNoCapture)
5856 return isCapturedIn(State,
false,
false,
5860 return isCapturedIn(State,
false,
false,
5865 return isCapturedIn(State,
true,
true,
5873 bool CapturedInInt,
bool CapturedInRet) {
5874 LLVM_DEBUG(
dbgs() <<
" - captures [Mem " << CapturedInMem <<
"|Int "
5875 << CapturedInInt <<
"|Ret " << CapturedInRet <<
"]\n");
5891 return indicatePessimisticFixpoint();
5895 assert(
F &&
"Expected a function!");
5903 T.addKnownBits(NOT_CAPTURED_IN_MEM);
5905 addKnownBits(NOT_CAPTURED_IN_MEM);
5912 auto CheckReturnedArgs = [&](
bool &UsedAssumedInformation) {
5916 UsedAssumedInformation))
5918 bool SeenConstant =
false;
5920 if (isa<Constant>(
VAC.getValue())) {
5923 SeenConstant =
true;
5924 }
else if (!isa<Argument>(
VAC.getValue()) ||
5925 VAC.getValue() == getAssociatedArgument())
5931 bool IsKnownNoUnwind;
5932 if (AA::hasAssumedIRAttr<Attribute::NoUnwind>(
5934 bool IsVoidTy =
F->getReturnType()->isVoidTy();
5935 bool UsedAssumedInformation =
false;
5936 if (IsVoidTy || CheckReturnedArgs(UsedAssumedInformation)) {
5937 T.addKnownBits(NOT_CAPTURED_IN_RET);
5938 if (
T.isKnown(NOT_CAPTURED_IN_MEM))
5940 if (IsKnownNoUnwind && (IsVoidTy || !UsedAssumedInformation)) {
5941 addKnownBits(NOT_CAPTURED_IN_RET);
5942 if (isKnown(NOT_CAPTURED_IN_MEM))
5943 return indicateOptimisticFixpoint();
5954 auto UseCheck = [&](
const Use &
U,
bool &Follow) ->
bool {
5959 return checkUse(
A,
T, U, Follow);
5967 if (!
A.checkForAllUses(UseCheck, *
this, *V))
5968 return indicatePessimisticFixpoint();
5971 auto Assumed = S.getAssumed();
5972 S.intersectAssumedBits(
T.getAssumed());
5973 if (!isAssumedNoCaptureMaybeReturned())
5974 return indicatePessimisticFixpoint();
5980struct AANoCaptureArgument final : AANoCaptureImpl {
5982 : AANoCaptureImpl(IRP,
A) {}
5989struct AANoCaptureCallSiteArgument final : AANoCaptureImpl {
5991 : AANoCaptureImpl(IRP,
A) {}
5999 Argument *Arg = getAssociatedArgument();
6001 return indicatePessimisticFixpoint();
6003 bool IsKnownNoCapture;
6005 if (AA::hasAssumedIRAttr<Attribute::NoCapture>(
6006 A,
this, ArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
6008 return ChangeStatus::UNCHANGED;
6010 return indicatePessimisticFixpoint();
6019struct AANoCaptureFloating final : AANoCaptureImpl {
6021 : AANoCaptureImpl(IRP,
A) {}
6024 void trackStatistics()
const override {
6030struct AANoCaptureReturned final : AANoCaptureImpl {
6032 : AANoCaptureImpl(IRP,
A) {
6047 void trackStatistics()
const override {}
6051struct AANoCaptureCallSiteReturned final : AANoCaptureImpl {
6053 : AANoCaptureImpl(IRP,
A) {}
6059 determineFunctionCaptureCapabilities(getIRPosition(), *
F, *
this);
6063 void trackStatistics()
const override {
6074 SimplifiedAssociatedValue,
Other, Ty);
6075 if (SimplifiedAssociatedValue == std::optional<Value *>(
nullptr))
6079 if (SimplifiedAssociatedValue)
6080 dbgs() <<
"[ValueSimplify] is assumed to be "
6081 << **SimplifiedAssociatedValue <<
"\n";
6083 dbgs() <<
"[ValueSimplify] is assumed to be <none>\n";
6095 if (getAssociatedValue().
getType()->isVoidTy())
6096 indicatePessimisticFixpoint();
6097 if (
A.hasSimplificationCallback(getIRPosition()))
6098 indicatePessimisticFixpoint();
6102 const std::string getAsStr(
Attributor *
A)
const override {
6104 dbgs() <<
"SAV: " << (
bool)SimplifiedAssociatedValue <<
" ";
6105 if (SimplifiedAssociatedValue && *SimplifiedAssociatedValue)
6106 dbgs() <<
"SAV: " << **SimplifiedAssociatedValue <<
" ";
6108 return isValidState() ? (isAtFixpoint() ?
"simplified" :
"maybe-simple")
6113 void trackStatistics()
const override {}
6116 std::optional<Value *>
6117 getAssumedSimplifiedValue(
Attributor &
A)
const override {
6118 return SimplifiedAssociatedValue;
6129 if (CtxI &&
V.getType()->canLosslesslyBitCastTo(&Ty))
6131 : BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6144 assert(CtxI &&
"Cannot reproduce an instruction without context!");
6145 if (
Check && (
I.mayReadFromMemory() ||
6150 Value *NewOp = reproduceValue(
A, QueryingAA, *
Op, Ty, CtxI,
Check, VMap);
6152 assert(
Check &&
"Manifest of new value unexpectedly failed!");
6178 if (
const auto &NewV = VMap.
lookup(&V))
6180 bool UsedAssumedInformation =
false;
6181 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
6183 if (!SimpleV.has_value())
6187 EffectiveV = *SimpleV;
6188 if (
auto *
C = dyn_cast<Constant>(EffectiveV))
6192 return ensureType(
A, *EffectiveV, Ty, CtxI,
Check);
6193 if (
auto *
I = dyn_cast<Instruction>(EffectiveV))
6194 if (
Value *NewV = reproduceInst(
A, QueryingAA, *
I, Ty, CtxI,
Check, VMap))
6195 return ensureType(
A, *NewV, Ty, CtxI,
Check);
6202 Value *NewV = SimplifiedAssociatedValue
6203 ? *SimplifiedAssociatedValue
6205 if (NewV && NewV != &getAssociatedValue()) {
6209 if (reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6211 return reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6220 const IRPosition &IRP,
bool Simplify =
true) {
6221 bool UsedAssumedInformation =
false;
6224 QueryingValueSimplified =
A.getAssumedSimplified(
6226 return unionAssumed(QueryingValueSimplified);
6230 template <
typename AAType>
bool askSimplifiedValueFor(
Attributor &
A) {
6231 if (!getAssociatedValue().
getType()->isIntegerTy())
6236 A.getAAFor<AAType>(*
this, getIRPosition(), DepClassTy::NONE);
6240 std::optional<Constant *> COpt = AA->getAssumedConstant(
A);
6243 SimplifiedAssociatedValue = std::nullopt;
6244 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6247 if (
auto *
C = *COpt) {
6248 SimplifiedAssociatedValue =
C;
6249 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6255 bool askSimplifiedValueForOtherAAs(
Attributor &
A) {
6256 if (askSimplifiedValueFor<AAValueConstantRange>(
A))
6258 if (askSimplifiedValueFor<AAPotentialConstantValues>(
A))
6266 for (
auto &U : getAssociatedValue().
uses()) {
6270 if (
auto *
PHI = dyn_cast_or_null<PHINode>(IP))
6271 IP =
PHI->getIncomingBlock(U)->getTerminator();
6272 if (
auto *NewV = manifestReplacementValue(
A, IP)) {
6274 <<
" -> " << *NewV <<
" :: " << *
this <<
"\n");
6275 if (
A.changeUseAfterManifest(U, *NewV))
6276 Changed = ChangeStatus::CHANGED;
6280 return Changed | AAValueSimplify::manifest(
A);
6285 SimplifiedAssociatedValue = &getAssociatedValue();
6286 return AAValueSimplify::indicatePessimisticFixpoint();
6290struct AAValueSimplifyArgument final : AAValueSimplifyImpl {
6292 : AAValueSimplifyImpl(IRP,
A) {}
6295 AAValueSimplifyImpl::initialize(
A);
6296 if (
A.hasAttr(getIRPosition(),
6297 {Attribute::InAlloca, Attribute::Preallocated,
6298 Attribute::StructRet, Attribute::Nest, Attribute::ByVal},
6300 indicatePessimisticFixpoint();
6307 Argument *Arg = getAssociatedArgument();
6313 return indicatePessimisticFixpoint();
6316 auto Before = SimplifiedAssociatedValue;
6330 bool UsedAssumedInformation =
false;
6331 std::optional<Constant *> SimpleArgOp =
6332 A.getAssumedConstant(ACSArgPos, *
this, UsedAssumedInformation);
6339 return unionAssumed(*SimpleArgOp);
6344 bool UsedAssumedInformation =
false;
6345 if (hasCallBaseContext() &&
6346 getCallBaseContext()->getCalledOperand() == Arg->
getParent())
6350 Success =
A.checkForAllCallSites(PredForCallSite, *
this,
true,
6351 UsedAssumedInformation);
6354 if (!askSimplifiedValueForOtherAAs(
A))
6355 return indicatePessimisticFixpoint();
6358 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6359 : ChangeStatus ::CHANGED;
6363 void trackStatistics()
const override {
6368struct AAValueSimplifyReturned : AAValueSimplifyImpl {
6370 : AAValueSimplifyImpl(IRP,
A) {}
6373 std::optional<Value *>
6374 getAssumedSimplifiedValue(
Attributor &
A)
const override {
6375 if (!isValidState())
6377 return SimplifiedAssociatedValue;
6382 auto Before = SimplifiedAssociatedValue;
6385 auto &RI = cast<ReturnInst>(
I);
6386 return checkAndUpdate(
6391 bool UsedAssumedInformation =
false;
6392 if (!
A.checkForAllInstructions(ReturnInstCB, *
this, {Instruction::Ret},
6393 UsedAssumedInformation))
6394 if (!askSimplifiedValueForOtherAAs(
A))
6395 return indicatePessimisticFixpoint();
6398 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6399 : ChangeStatus ::CHANGED;
6405 return ChangeStatus::UNCHANGED;
6409 void trackStatistics()
const override {
6414struct AAValueSimplifyFloating : AAValueSimplifyImpl {
6416 : AAValueSimplifyImpl(IRP,
A) {}
6420 AAValueSimplifyImpl::initialize(
A);
6421 Value &
V = getAnchorValue();
6424 if (isa<Constant>(V))
6425 indicatePessimisticFixpoint();
6430 auto Before = SimplifiedAssociatedValue;
6431 if (!askSimplifiedValueForOtherAAs(
A))
6432 return indicatePessimisticFixpoint();
6435 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6436 : ChangeStatus ::CHANGED;
6440 void trackStatistics()
const override {
6445struct AAValueSimplifyFunction : AAValueSimplifyImpl {
6447 : AAValueSimplifyImpl(IRP,
A) {}
6451 SimplifiedAssociatedValue =
nullptr;
6452 indicateOptimisticFixpoint();
6457 "AAValueSimplify(Function|CallSite)::updateImpl will not be called");
6460 void trackStatistics()
const override {
6465struct AAValueSimplifyCallSite : AAValueSimplifyFunction {
6467 : AAValueSimplifyFunction(IRP,
A) {}
6469 void trackStatistics()
const override {
6474struct AAValueSimplifyCallSiteReturned : AAValueSimplifyImpl {
6476 : AAValueSimplifyImpl(IRP,
A) {}
6479 AAValueSimplifyImpl::initialize(
A);
6480 Function *Fn = getAssociatedFunction();
6481 assert(Fn &&
"Did expect an associted function");
6487 checkAndUpdate(
A, *
this, IRP))
6488 indicateOptimisticFixpoint();
6490 indicatePessimisticFixpoint();
6498 return indicatePessimisticFixpoint();
6501 void trackStatistics()
const override {
6506struct AAValueSimplifyCallSiteArgument : AAValueSimplifyFloating {
6508 : AAValueSimplifyFloating(IRP,
A) {}
6516 if (FloatAA && FloatAA->getState().isValidState())
6519 if (
auto *NewV = manifestReplacementValue(
A, getCtxI())) {
6520 Use &
U = cast<CallBase>(&getAnchorValue())
6521 ->getArgOperandUse(getCallSiteArgNo());
6522 if (
A.changeUseAfterManifest(U, *NewV))
6523 Changed = ChangeStatus::CHANGED;
6526 return Changed | AAValueSimplify::manifest(
A);
6529 void trackStatistics()
const override {
6539 struct AllocationInfo {
6551 }
Status = STACK_DUE_TO_USE;
6555 bool HasPotentiallyFreeingUnknownUses =
false;
6559 bool MoveAllocaIntoEntry =
true;
6565 struct DeallocationInfo {
6573 bool MightFreeUnknownObjects =
false;
6582 ~AAHeapToStackFunction() {
6585 for (
auto &It : AllocationInfos)
6586 It.second->~AllocationInfo();
6587 for (
auto &It : DeallocationInfos)
6588 It.second->~DeallocationInfo();
6592 AAHeapToStack::initialize(
A);
6595 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6602 DeallocationInfos[CB] =
new (
A.Allocator) DeallocationInfo{CB, FreedOp};
6611 AllocationInfo *AI =
new (
A.Allocator) AllocationInfo{CB};
6612 AllocationInfos[CB] = AI;
6614 TLI->getLibFunc(*CB, AI->LibraryFunctionId);
6620 bool UsedAssumedInformation =
false;
6621 bool Success =
A.checkForAllCallLikeInstructions(
6622 AllocationIdentifierCB, *
this, UsedAssumedInformation,
6626 assert(
Success &&
"Did not expect the call base visit callback to fail!");
6630 bool &) -> std::optional<Value *> {
return nullptr; };
6631 for (
const auto &It : AllocationInfos)
6634 for (
const auto &It : DeallocationInfos)
6639 const std::string getAsStr(
Attributor *
A)
const override {
6640 unsigned NumH2SMallocs = 0, NumInvalidMallocs = 0;
6641 for (
const auto &It : AllocationInfos) {
6642 if (It.second->Status == AllocationInfo::INVALID)
6643 ++NumInvalidMallocs;
6647 return "[H2S] Mallocs Good/Bad: " + std::to_string(NumH2SMallocs) +
"/" +
6648 std::to_string(NumInvalidMallocs);
6652 void trackStatistics()
const override {
6655 "Number of malloc/calloc/aligned_alloc calls converted to allocas");
6656 for (
const auto &It : AllocationInfos)
6657 if (It.second->Status != AllocationInfo::INVALID)
6661 bool isAssumedHeapToStack(
const CallBase &CB)
const override {
6663 if (AllocationInfo *AI =
6664 AllocationInfos.lookup(
const_cast<CallBase *
>(&CB)))
6665 return AI->Status != AllocationInfo::INVALID;
6669 bool isAssumedHeapToStackRemovedFree(
CallBase &CB)
const override {
6670 if (!isValidState())
6673 for (
const auto &It : AllocationInfos) {
6674 AllocationInfo &AI = *It.second;
6675 if (AI.Status == AllocationInfo::INVALID)
6678 if (AI.PotentialFreeCalls.count(&CB))
6686 assert(getState().isValidState() &&
6687 "Attempted to manifest an invalid state!");
6691 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6693 for (
auto &It : AllocationInfos) {
6694 AllocationInfo &AI = *It.second;
6695 if (AI.Status == AllocationInfo::INVALID)
6698 for (
CallBase *FreeCall : AI.PotentialFreeCalls) {
6699 LLVM_DEBUG(
dbgs() <<
"H2S: Removing free call: " << *FreeCall <<
"\n");
6700 A.deleteAfterManifest(*FreeCall);
6701 HasChanged = ChangeStatus::CHANGED;
6704 LLVM_DEBUG(
dbgs() <<
"H2S: Removing malloc-like call: " << *AI.CB
6709 if (TLI->getLibFunc(*AI.CB, IsAllocShared))
6710 if (IsAllocShared == LibFunc___kmpc_alloc_shared)
6711 return OR <<
"Moving globalized variable to the stack.";
6712 return OR <<
"Moving memory allocation from the heap to the stack.";
6714 if (AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
6721 std::optional<APInt> SizeAPI =
getSize(
A, *
this, AI);
6723 Size = ConstantInt::get(AI.CB->getContext(), *SizeAPI);
6730 cast<ConstantInt>(SizeOffsetPair.
Offset)->isZero());
6735 ?
F->getEntryBlock().begin()
6736 : AI.CB->getIterator();
6739 if (
MaybeAlign RetAlign = AI.CB->getRetAlign())
6740 Alignment = std::max(Alignment, *RetAlign);
6742 std::optional<APInt> AlignmentAPI = getAPInt(
A, *
this, *
Align);
6743 assert(AlignmentAPI && AlignmentAPI->getZExtValue() > 0 &&
6744 "Expected an alignment during manifest!");
6746 std::max(Alignment,
assumeAligned(AlignmentAPI->getZExtValue()));
6750 unsigned AS =
DL.getAllocaAddrSpace();
6753 AI.CB->getName() +
".h2s", IP);
6755 if (Alloca->
getType() != AI.CB->getType())
6756 Alloca = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6757 Alloca, AI.CB->getType(),
"malloc_cast", AI.CB->getIterator());
6762 "Must be able to materialize initial memory state of allocation");
6766 if (
auto *II = dyn_cast<InvokeInst>(AI.CB)) {
6769 A.deleteAfterManifest(*AI.CB);
6771 A.deleteAfterManifest(*AI.CB);
6777 if (!isa<UndefValue>(InitVal)) {
6780 Builder.CreateMemSet(Alloca, InitVal,
Size, std::nullopt);
6782 HasChanged = ChangeStatus::CHANGED;
6790 bool UsedAssumedInformation =
false;
6791 std::optional<Constant *> SimpleV =
6792 A.getAssumedConstant(V, AA, UsedAssumedInformation);
6794 return APInt(64, 0);
6795 if (
auto *CI = dyn_cast_or_null<ConstantInt>(*SimpleV))
6796 return CI->getValue();
6797 return std::nullopt;
6801 AllocationInfo &AI) {
6802 auto Mapper = [&](
const Value *
V) ->
const Value * {
6803 bool UsedAssumedInformation =
false;
6804 if (std::optional<Constant *> SimpleV =
6805 A.getAssumedConstant(*V, AA, UsedAssumedInformation))
6812 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6830 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6832 const auto *LivenessAA =
6836 A.getInfoCache().getMustBeExecutedContextExplorer();
6838 bool StackIsAccessibleByOtherThreads =
6839 A.getInfoCache().stackIsAccessibleByOtherThreads();
6842 A.getInfoCache().getAnalysisResultForFunction<
LoopAnalysis>(*F);
6843 std::optional<bool> MayContainIrreducibleControl;
6845 if (&
F->getEntryBlock() == &BB)
6847 if (!MayContainIrreducibleControl.has_value())
6849 if (*MayContainIrreducibleControl)
6858 bool HasUpdatedFrees =
false;
6860 auto UpdateFrees = [&]() {
6861 HasUpdatedFrees =
true;
6863 for (
auto &It : DeallocationInfos) {
6864 DeallocationInfo &DI = *It.second;
6867 if (DI.MightFreeUnknownObjects)
6871 bool UsedAssumedInformation =
false;
6872 if (
A.isAssumedDead(*DI.CB,
this, LivenessAA, UsedAssumedInformation,
6879 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown underlying object for free!\n");
6880 DI.MightFreeUnknownObjects =
true;
6886 if (isa<ConstantPointerNull>(Obj) || isa<UndefValue>(Obj))
6889 CallBase *ObjCB = dyn_cast<CallBase>(Obj);
6893 DI.MightFreeUnknownObjects =
true;
6897 AllocationInfo *AI = AllocationInfos.lookup(ObjCB);
6899 LLVM_DEBUG(
dbgs() <<
"[H2S] Free of a non-allocation object: " << *Obj
6901 DI.MightFreeUnknownObjects =
true;
6905 DI.PotentialAllocationCalls.insert(ObjCB);
6909 auto FreeCheck = [&](AllocationInfo &AI) {
6913 if (!StackIsAccessibleByOtherThreads) {
6915 if (!AA::hasAssumedIRAttr<Attribute::NoSync>(
6918 dbgs() <<
"[H2S] found an escaping use, stack is not accessible by "
6919 "other threads and function is not nosync:\n");
6923 if (!HasUpdatedFrees)
6927 if (AI.PotentialFreeCalls.size() != 1) {
6929 << AI.PotentialFreeCalls.size() <<
"\n");
6932 CallBase *UniqueFree = *AI.PotentialFreeCalls.begin();
6933 DeallocationInfo *DI = DeallocationInfos.lookup(UniqueFree);
6936 dbgs() <<
"[H2S] unique free call was not known as deallocation call "
6937 << *UniqueFree <<
"\n");
6940 if (DI->MightFreeUnknownObjects) {
6942 dbgs() <<
"[H2S] unique free call might free unknown allocations\n");
6945 if (DI->PotentialAllocationCalls.empty())
6947 if (DI->PotentialAllocationCalls.size() > 1) {
6949 << DI->PotentialAllocationCalls.size()
6950 <<
" different allocations\n");
6953 if (*DI->PotentialAllocationCalls.begin() != AI.CB) {
6956 <<
"[H2S] unique free call not known to free this allocation but "
6957 << **DI->PotentialAllocationCalls.begin() <<
"\n");
6962 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared) {
6964 if (!Explorer || !Explorer->findInContextOf(UniqueFree, CtxI)) {
6967 <<
"[H2S] unique free call might not be executed with the allocation "
6968 << *UniqueFree <<
"\n");
6975 auto UsesCheck = [&](AllocationInfo &AI) {
6976 bool ValidUsesOnly =
true;
6978 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
6980 if (isa<LoadInst>(UserI))
6982 if (
auto *SI = dyn_cast<StoreInst>(UserI)) {
6983 if (
SI->getValueOperand() ==
U.get()) {
6985 <<
"[H2S] escaping store to memory: " << *UserI <<
"\n");
6986 ValidUsesOnly =
false;
6992 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
6995 if (DeallocationInfos.count(CB)) {
6996 AI.PotentialFreeCalls.insert(CB);
7003 bool IsKnownNoCapture;
7004 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
7009 bool IsAssumedNoFree = AA::hasAssumedIRAttr<Attribute::NoFree>(
7012 if (!IsAssumedNoCapture ||
7013 (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7014 !IsAssumedNoFree)) {
7015 AI.HasPotentiallyFreeingUnknownUses |= !IsAssumedNoFree;
7020 <<
"Could not move globalized variable to the stack. "
7021 "Variable is potentially captured in call. Mark "
7022 "parameter as `__attribute__((noescape))` to override.";
7025 if (ValidUsesOnly &&
7026 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
7030 ValidUsesOnly =
false;
7035 if (isa<GetElementPtrInst>(UserI) || isa<BitCastInst>(UserI) ||
7036 isa<PHINode>(UserI) || isa<SelectInst>(UserI)) {
7043 ValidUsesOnly =
false;
7046 if (!
A.checkForAllUses(Pred, *
this, *AI.CB,
false,
7048 [&](
const Use &OldU,
const Use &NewU) {
7049 auto *SI = dyn_cast<StoreInst>(OldU.getUser());
7050 return !SI || StackIsAccessibleByOtherThreads ||
7051 AA::isAssumedThreadLocalObject(
7052 A, *SI->getPointerOperand(), *this);
7055 return ValidUsesOnly;
7060 for (
auto &It : AllocationInfos) {
7061 AllocationInfo &AI = *It.second;
7062 if (AI.Status == AllocationInfo::INVALID)
7066 std::optional<APInt> APAlign = getAPInt(
A, *
this, *
Align);
7070 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown allocation alignment: " << *AI.CB
7072 AI.Status = AllocationInfo::INVALID;
7077 !APAlign->isPowerOf2()) {
7078 LLVM_DEBUG(
dbgs() <<
"[H2S] Invalid allocation alignment: " << APAlign
7080 AI.Status = AllocationInfo::INVALID;
7087 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7092 dbgs() <<
"[H2S] Unknown allocation size: " << *AI.CB <<
"\n";
7094 dbgs() <<
"[H2S] Allocation size too large: " << *AI.CB <<
" vs. "
7098 AI.Status = AllocationInfo::INVALID;
7104 switch (AI.Status) {
7105 case AllocationInfo::STACK_DUE_TO_USE:
7108 AI.Status = AllocationInfo::STACK_DUE_TO_FREE;
7110 case AllocationInfo::STACK_DUE_TO_FREE:
7113 AI.Status = AllocationInfo::INVALID;
7116 case AllocationInfo::INVALID:
7123 bool IsGlobalizedLocal =
7124 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared;
7125 if (AI.MoveAllocaIntoEntry &&
7126 (!
Size.has_value() ||
7127 (!IsGlobalizedLocal && IsInLoop(*AI.CB->getParent()))))
7128 AI.MoveAllocaIntoEntry =
false;
7142 AAPrivatizablePtr::indicatePessimisticFixpoint();
7143 PrivatizableType =
nullptr;
7144 return ChangeStatus::CHANGED;
7150 virtual std::optional<Type *> identifyPrivatizableType(
Attributor &
A) = 0;
7154 std::optional<Type *> combineTypes(std::optional<Type *> T0,
7155 std::optional<Type *>
T1) {
7165 std::optional<Type *> getPrivatizableType()
const override {
7166 return PrivatizableType;
7169 const std::string getAsStr(
Attributor *
A)
const override {
7170 return isAssumedPrivatizablePtr() ?
"[priv]" :
"[no-priv]";
7174 std::optional<Type *> PrivatizableType;
7179struct AAPrivatizablePtrArgument final :
public AAPrivatizablePtrImpl {
7181 : AAPrivatizablePtrImpl(IRP,
A) {}
7184 std::optional<Type *> identifyPrivatizableType(
Attributor &
A)
override {
7187 bool UsedAssumedInformation =
false;
7189 A.getAttrs(getIRPosition(), {Attribute::ByVal},
Attrs,
7191 if (!
Attrs.empty() &&
7193 true, UsedAssumedInformation))
7194 return Attrs[0].getValueAsType();
7196 std::optional<Type *> Ty;
7197 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
7220 dbgs() <<
"[AAPrivatizablePtr] ACSPos: " << ACSArgPos <<
", CSTy: ";
7224 dbgs() <<
"<nullptr>";
7229 Ty = combineTypes(Ty, CSTy);
7232 dbgs() <<
" : New Type: ";
7234 (*Ty)->print(
dbgs());
7236 dbgs() <<
"<nullptr>";
7245 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7246 UsedAssumedInformation))
7253 PrivatizableType = identifyPrivatizableType(
A);
7254 if (!PrivatizableType)
7255 return ChangeStatus::UNCHANGED;
7256 if (!*PrivatizableType)
7257 return indicatePessimisticFixpoint();
7262 DepClassTy::OPTIONAL);
7265 if (!
A.hasAttr(getIRPosition(), Attribute::ByVal) &&
7268 return indicatePessimisticFixpoint();
7274 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7278 Function &Fn = *getIRPosition().getAnchorScope();
7282 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Missing TTI for function "
7284 return indicatePessimisticFixpoint();
7294 bool UsedAssumedInformation =
false;
7295 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7296 UsedAssumedInformation)) {
7298 dbgs() <<
"[AAPrivatizablePtr] ABI incompatibility detected for "
7300 return indicatePessimisticFixpoint();
7304 Argument *Arg = getAssociatedArgument();
7305 if (!
A.isValidFunctionSignatureRewrite(*Arg, ReplacementTypes)) {
7307 return indicatePessimisticFixpoint();
7314 auto IsCompatiblePrivArgOfCallback = [&](
CallBase &CB) {
7317 for (
const Use *U : CallbackUses) {
7319 assert(CBACS && CBACS.isCallbackCall());
7320 for (
Argument &CBArg : CBACS.getCalledFunction()->args()) {
7321 int CBArgNo = CBACS.getCallArgOperandNo(CBArg);
7325 <<
"[AAPrivatizablePtr] Argument " << *Arg
7326 <<
"check if can be privatized in the context of its parent ("
7328 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7330 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7331 <<
")\n[AAPrivatizablePtr] " << CBArg <<
" : "
7332 << CBACS.getCallArgOperand(CBArg) <<
" vs "
7334 <<
"[AAPrivatizablePtr] " << CBArg <<
" : "
7335 << CBACS.getCallArgOperandNo(CBArg) <<
" vs " << ArgNo <<
"\n";
7338 if (CBArgNo !=
int(ArgNo))
7342 if (CBArgPrivAA && CBArgPrivAA->isValidState()) {
7346 if (*CBArgPrivTy == PrivatizableType)
7351 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7352 <<
" cannot be privatized in the context of its parent ("
7354 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7356 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7357 <<
").\n[AAPrivatizablePtr] for which the argument "
7358 "privatization is not compatible.\n";
7372 "Expected a direct call operand for callback call operand");
7377 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7378 <<
" check if be privatized in the context of its parent ("
7380 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7382 << DCArgNo <<
"@" << DCCallee->
getName() <<
").\n";
7385 if (
unsigned(DCArgNo) < DCCallee->
arg_size()) {
7388 DepClassTy::REQUIRED);
7389 if (DCArgPrivAA && DCArgPrivAA->isValidState()) {
7393 if (*DCArgPrivTy == PrivatizableType)
7399 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7400 <<
" cannot be privatized in the context of its parent ("
7402 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7405 <<
").\n[AAPrivatizablePtr] for which the argument "
7406 "privatization is not compatible.\n";
7418 return IsCompatiblePrivArgOfDirectCS(ACS);
7422 if (!
A.checkForAllCallSites(IsCompatiblePrivArgOfOtherCallSite, *
this,
true,
7423 UsedAssumedInformation))
7424 return indicatePessimisticFixpoint();
7426 return ChangeStatus::UNCHANGED;
7432 identifyReplacementTypes(
Type *PrivType,
7436 assert(PrivType &&
"Expected privatizable type!");
7439 if (
auto *PrivStructType = dyn_cast<StructType>(PrivType)) {
7440 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++)
7441 ReplacementTypes.
push_back(PrivStructType->getElementType(u));
7442 }
else if (
auto *PrivArrayType = dyn_cast<ArrayType>(PrivType)) {
7443 ReplacementTypes.
append(PrivArrayType->getNumElements(),
7444 PrivArrayType->getElementType());
7455 assert(PrivType &&
"Expected privatizable type!");
7461 if (
auto *PrivStructType = dyn_cast<StructType>(PrivType)) {
7462 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7463 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7468 }
else if (
auto *PrivArrayType = dyn_cast<ArrayType>(PrivType)) {
7469 Type *PointeeTy = PrivArrayType->getElementType();
7470 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7471 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7482 void createReplacementValues(
Align Alignment,
Type *PrivType,
7486 assert(PrivType &&
"Expected privatizable type!");
7493 if (
auto *PrivStructType = dyn_cast<StructType>(PrivType)) {
7494 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7495 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7496 Type *PointeeTy = PrivStructType->getElementType(u);
7500 L->setAlignment(Alignment);
7503 }
else if (
auto *PrivArrayType = dyn_cast<ArrayType>(PrivType)) {
7504 Type *PointeeTy = PrivArrayType->getElementType();
7505 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7506 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7509 L->setAlignment(Alignment);
7514 L->setAlignment(Alignment);
7521 if (!PrivatizableType)
7522 return ChangeStatus::UNCHANGED;
7523 assert(*PrivatizableType &&
"Expected privatizable type!");
7529 bool UsedAssumedInformation =
false;
7530 if (!
A.checkForAllInstructions(
7532 CallInst &CI = cast<CallInst>(I);
7533 if (CI.isTailCall())
7534 TailCalls.push_back(&CI);
7537 *
this, {Instruction::Call}, UsedAssumedInformation))
7538 return ChangeStatus::UNCHANGED;
7540 Argument *Arg = getAssociatedArgument();
7543 const auto *AlignAA =
7552 BasicBlock &EntryBB = ReplacementFn.getEntryBlock();
7554 const DataLayout &
DL = IP->getModule()->getDataLayout();
7555 unsigned AS =
DL.getAllocaAddrSpace();
7557 Arg->
getName() +
".priv", IP);
7558 createInitialization(*PrivatizableType, *AI, ReplacementFn,
7559 ArgIt->getArgNo(), IP);
7562 AI = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
7567 CI->setTailCall(
false);
7578 createReplacementValues(
7579 AlignAA ? AlignAA->getAssumedAlign() :
Align(0),
7580 *PrivatizableType, ACS,
7588 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7591 if (
A.registerFunctionSignatureRewrite(*Arg, ReplacementTypes,
7592 std::move(FnRepairCB),
7593 std::move(ACSRepairCB)))
7594 return ChangeStatus::CHANGED;
7595 return ChangeStatus::UNCHANGED;
7599 void trackStatistics()
const override {
7604struct AAPrivatizablePtrFloating :
public AAPrivatizablePtrImpl {
7606 : AAPrivatizablePtrImpl(IRP,
A) {}
7611 indicatePessimisticFixpoint();
7616 "updateImpl will not be called");
7620 std::optional<Type *> identifyPrivatizableType(
Attributor &
A)
override {
7623 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] No underlying object found!\n");
7627 if (
auto *AI = dyn_cast<AllocaInst>(Obj))
7628 if (
auto *CI = dyn_cast<ConstantInt>(AI->getArraySize()))
7630 return AI->getAllocatedType();
7631 if (
auto *Arg = dyn_cast<Argument>(Obj)) {
7634 if (PrivArgAA && PrivArgAA->isAssumedPrivatizablePtr())
7638 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Underlying object neither valid "
7639 "alloca nor privatizable argument: "
7645 void trackStatistics()
const override {
7650struct AAPrivatizablePtrCallSiteArgument final
7651 :
public AAPrivatizablePtrFloating {
7653 : AAPrivatizablePtrFloating(IRP,
A) {}
7657 if (
A.hasAttr(getIRPosition(), Attribute::ByVal))
7658 indicateOptimisticFixpoint();
7663 PrivatizableType = identifyPrivatizableType(
A);
7664 if (!PrivatizableType)
7665 return ChangeStatus::UNCHANGED;
7666 if (!*PrivatizableType)
7667 return indicatePessimisticFixpoint();
7670 bool IsKnownNoCapture;
7671 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
7672 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoCapture);
7673 if (!IsAssumedNoCapture) {
7674 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might be captured!\n");
7675 return indicatePessimisticFixpoint();
7678 bool IsKnownNoAlias;
7679 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
7680 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
7681 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might alias!\n");
7682 return indicatePessimisticFixpoint();
7687 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer is written!\n");
7688 return indicatePessimisticFixpoint();
7691 return ChangeStatus::UNCHANGED;
7695 void trackStatistics()
const override {
7700struct AAPrivatizablePtrCallSiteReturned final
7701 :
public AAPrivatizablePtrFloating {
7703 : AAPrivatizablePtrFloating(IRP,
A) {}
7708 indicatePessimisticFixpoint();
7712 void trackStatistics()
const override {
7717struct AAPrivatizablePtrReturned final :
public AAPrivatizablePtrFloating {
7719 : AAPrivatizablePtrFloating(IRP,
A) {}
7724 indicatePessimisticFixpoint();
7728 void trackStatistics()
const override {
7744 intersectAssumedBits(BEST_STATE);
7745 getKnownStateFromValue(
A, getIRPosition(), getState());
7746 AAMemoryBehavior::initialize(
A);
7752 bool IgnoreSubsumingPositions =
false) {
7754 A.getAttrs(IRP, AttrKinds, Attrs, IgnoreSubsumingPositions);
7756 switch (Attr.getKindAsEnum()) {
7757 case Attribute::ReadNone:
7760 case Attribute::ReadOnly:
7763 case Attribute::WriteOnly:
7772 if (!
I->mayReadFromMemory())
7774 if (!
I->mayWriteToMemory())
7787 else if (isAssumedWriteOnly())
7796 if (
A.hasAttr(IRP, Attribute::ReadNone,
7798 return ChangeStatus::UNCHANGED;
7807 return ChangeStatus::UNCHANGED;
7810 A.removeAttrs(IRP, AttrKinds);
7813 A.removeAttrs(IRP, Attribute::Writable);
7820 const std::string getAsStr(
Attributor *
A)
const override {
7825 if (isAssumedWriteOnly())
7827 return "may-read/write";
7835 Attribute::ReadNone, Attribute::ReadOnly, Attribute::WriteOnly};
7838struct AAMemoryBehaviorFloating : AAMemoryBehaviorImpl {
7840 : AAMemoryBehaviorImpl(IRP,
A) {}
7846 void trackStatistics()
const override {
7851 else if (isAssumedWriteOnly())
7866struct AAMemoryBehaviorArgument : AAMemoryBehaviorFloating {
7868 : AAMemoryBehaviorFloating(IRP,
A) {}
7872 intersectAssumedBits(BEST_STATE);
7877 bool HasByVal =
A.hasAttr(IRP, {Attribute::ByVal},
7879 getKnownStateFromValue(
A, IRP, getState(),
7886 return ChangeStatus::UNCHANGED;
7890 if (
A.hasAttr(getIRPosition(),
7891 {Attribute::InAlloca, Attribute::Preallocated})) {
7892 removeKnownBits(NO_WRITES);
7893 removeAssumedBits(NO_WRITES);
7895 A.removeAttrs(getIRPosition(), AttrKinds);
7896 return AAMemoryBehaviorFloating::manifest(
A);
7900 void trackStatistics()
const override {
7905 else if (isAssumedWriteOnly())
7910struct AAMemoryBehaviorCallSiteArgument final : AAMemoryBehaviorArgument {
7912 : AAMemoryBehaviorArgument(IRP,
A) {}
7918 Argument *Arg = getAssociatedArgument();
7920 indicatePessimisticFixpoint();
7924 addKnownBits(NO_WRITES);
7925 removeKnownBits(NO_READS);
7926 removeAssumedBits(NO_READS);
7928 AAMemoryBehaviorArgument::initialize(
A);
7929 if (getAssociatedFunction()->isDeclaration())
7930 indicatePessimisticFixpoint();
7939 Argument *Arg = getAssociatedArgument();
7944 return indicatePessimisticFixpoint();
7949 void trackStatistics()
const override {
7954 else if (isAssumedWriteOnly())
7960struct AAMemoryBehaviorCallSiteReturned final : AAMemoryBehaviorFloating {
7962 : AAMemoryBehaviorFloating(IRP,
A) {}
7966 AAMemoryBehaviorImpl::initialize(
A);
7971 return ChangeStatus::UNCHANGED;
7975 void trackStatistics()
const override {}
7979struct AAMemoryBehaviorFunction final :
public AAMemoryBehaviorImpl {
7981 : AAMemoryBehaviorImpl(IRP,
A) {}
7991 Function &
F = cast<Function>(getAnchorValue());
7997 else if (isAssumedWriteOnly())
8000 A.removeAttrs(getIRPosition(), AttrKinds);
8005 return A.manifestAttrs(getIRPosition(),
8010 void trackStatistics()
const override {
8015 else if (isAssumedWriteOnly())
8021struct AAMemoryBehaviorCallSite final
8022 : AACalleeToCallSite<AAMemoryBehavior, AAMemoryBehaviorImpl> {
8029 CallBase &CB = cast<CallBase>(getAnchorValue());
8035 else if (isAssumedWriteOnly())
8038 A.removeAttrs(getIRPosition(), AttrKinds);
8043 Attribute::Writable);
8044 return A.manifestAttrs(
8049 void trackStatistics()
const override {
8054 else if (isAssumedWriteOnly())
8062 auto AssumedState = getAssumed();
8068 if (
const auto *CB = dyn_cast<CallBase>(&
I)) {
8071 if (MemBehaviorAA) {
8072 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8073 return !isAtFixpoint();
8078 if (
I.mayReadFromMemory())
8079 removeAssumedBits(NO_READS);
8080 if (
I.mayWriteToMemory())
8081 removeAssumedBits(NO_WRITES);
8082 return !isAtFixpoint();
8085 bool UsedAssumedInformation =
false;
8086 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8087 UsedAssumedInformation))
8088 return indicatePessimisticFixpoint();
8107 const auto *FnMemAA =
8111 S.addKnownBits(FnMemAA->getKnown());
8112 if ((S.getAssumed() & FnMemAA->getAssumed()) == S.getAssumed())
8118 auto AssumedState = S.getAssumed();
8124 bool IsKnownNoCapture;
8126 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
8130 if (!IsAssumedNoCapture &&
8132 S.intersectAssumedBits(FnMemAssumedState);
8138 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
8140 LLVM_DEBUG(
dbgs() <<
"[AAMemoryBehavior] Use: " << *U <<
" in " << *UserI
8148 Follow = followUsersOfUseIn(
A, U, UserI);
8152 analyzeUseIn(
A, U, UserI);
8154 return !isAtFixpoint();
8157 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue()))
8158 return indicatePessimisticFixpoint();
8164bool AAMemoryBehaviorFloating::followUsersOfUseIn(
Attributor &
A,
const Use &U,
8168 if (isa<LoadInst>(UserI) || isa<ReturnInst>(UserI))
8173 const auto *CB = dyn_cast<CallBase>(UserI);
8183 if (
U.get()->getType()->isPointerTy()) {
8185 bool IsKnownNoCapture;
8186 return !AA::hasAssumedIRAttr<Attribute::NoCapture>(
8194void AAMemoryBehaviorFloating::analyzeUseIn(
Attributor &
A,
const Use &U,
8202 case Instruction::Load:
8204 removeAssumedBits(NO_READS);
8207 case Instruction::Store:
8212 removeAssumedBits(NO_WRITES);
8214 indicatePessimisticFixpoint();
8217 case Instruction::Call:
8218 case Instruction::CallBr:
8219 case Instruction::Invoke: {
8222 const auto *CB = cast<CallBase>(UserI);
8226 indicatePessimisticFixpoint();
8233 removeAssumedBits(NO_READS);
8240 if (
U.get()->getType()->isPointerTy())
8244 const auto *MemBehaviorAA =
8250 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8258 removeAssumedBits(NO_READS);
8260 removeAssumedBits(NO_WRITES);
8272 return "all memory";
8275 std::string S =
"memory:";
8281 S +=
"internal global,";
8283 S +=
"external global,";
8287 S +=
"inaccessible,";
8301 AccessKind2Accesses.fill(
nullptr);
8304 ~AAMemoryLocationImpl() {
8307 for (AccessSet *AS : AccessKind2Accesses)
8314 intersectAssumedBits(BEST_STATE);
8315 getKnownStateFromValue(
A, getIRPosition(), getState());
8316 AAMemoryLocation::initialize(
A);
8322 bool IgnoreSubsumingPositions =
false) {
8331 bool UseArgMemOnly =
true;
8333 if (AnchorFn &&
A.isRunOn(*AnchorFn))
8337 A.getAttrs(IRP, {Attribute::Memory},
Attrs, IgnoreSubsumingPositions);
8346 State.
addKnownBits(inverseLocation(NO_INACCESSIBLE_MEM,
true,
true));
8351 State.
addKnownBits(inverseLocation(NO_ARGUMENT_MEM,
true,
true));
8355 A.manifestAttrs(IRP,
8365 NO_INACCESSIBLE_MEM | NO_ARGUMENT_MEM,
true,
true));
8369 A.manifestAttrs(IRP,
8388 else if (isAssumedInaccessibleMemOnly())
8391 else if (isAssumedArgMemOnly())
8394 else if (isAssumedInaccessibleOrArgMemOnly())
8409 if (DeducedAttrs.
size() != 1)
8410 return ChangeStatus::UNCHANGED;
8418 bool checkForAllAccessesToMemoryKind(
8420 MemoryLocationsKind)>
8422 MemoryLocationsKind RequestedMLK)
const override {
8423 if (!isValidState())
8426 MemoryLocationsKind AssumedMLK = getAssumedNotAccessedLocation();
8427 if (AssumedMLK == NO_LOCATIONS)
8431 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS;
8432 CurMLK *= 2, ++
Idx) {
8433 if (CurMLK & RequestedMLK)
8436 if (
const AccessSet *Accesses = AccessKind2Accesses[
Idx])
8437 for (
const AccessInfo &AI : *Accesses)
8438 if (!Pred(AI.I, AI.Ptr, AI.Kind, CurMLK))
8450 bool Changed =
false;
8451 MemoryLocationsKind KnownMLK = getKnown();
8452 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
8453 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2)
8454 if (!(CurMLK & KnownMLK))
8455 updateStateAndAccessesMap(getState(), CurMLK,
I,
nullptr, Changed,
8456 getAccessKindFromInst(
I));
8457 return AAMemoryLocation::indicatePessimisticFixpoint();
8477 bool operator()(
const AccessInfo &
LHS,
const AccessInfo &
RHS)
const {
8481 return LHS.Ptr <
RHS.Ptr;
8482 if (
LHS.Kind !=
RHS.Kind)
8483 return LHS.Kind <
RHS.Kind;
8491 std::array<AccessSet *, llvm::CTLog2<VALID_STATE>()> AccessKind2Accesses;
8508 AK =
I->mayReadFromMemory() ? READ :
NONE;
8526 Changed |= Accesses->insert(AccessInfo{
I,
Ptr, AK}).second;
8527 if (MLK == NO_UNKOWN_MEM)
8529 State.removeAssumedBits(MLK);
8536 unsigned AccessAS = 0);
8542void AAMemoryLocationImpl::categorizePtrValue(
8545 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize pointer locations for "
8547 << getMemoryLocationsAsStr(State.getAssumed()) <<
"]\n");
8549 auto Pred = [&](
Value &Obj) {
8552 MemoryLocationsKind MLK = NO_LOCATIONS;
8562 if (isa<UndefValue>(&Obj))
8564 if (isa<Argument>(&Obj)) {
8571 MLK = NO_ARGUMENT_MEM;
8572 }
else if (
auto *GV = dyn_cast<GlobalValue>(&Obj)) {
8576 if (
auto *GVar = dyn_cast<GlobalVariable>(GV))
8577 if (GVar->isConstant())
8580 if (GV->hasLocalLinkage())
8581 MLK = NO_GLOBAL_INTERNAL_MEM;
8583 MLK = NO_GLOBAL_EXTERNAL_MEM;
8584 }
else if (isa<ConstantPointerNull>(&Obj) &&
8588 }
else if (isa<AllocaInst>(&Obj)) {
8590 }
else if (
const auto *CB = dyn_cast<CallBase>(&Obj)) {
8591 bool IsKnownNoAlias;
8592 if (AA::hasAssumedIRAttr<Attribute::NoAlias>(
8595 MLK = NO_MALLOCED_MEM;
8597 MLK = NO_UNKOWN_MEM;
8599 MLK = NO_UNKOWN_MEM;
8602 assert(MLK != NO_LOCATIONS &&
"No location specified!");
8603 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Ptr value can be categorized: "
8604 << Obj <<
" -> " << getMemoryLocationsAsStr(MLK) <<
"\n");
8605 updateStateAndAccessesMap(State, MLK, &
I, &Obj, Changed,
8606 getAccessKindFromInst(&
I));
8615 dbgs() <<
"[AAMemoryLocation] Pointer locations not categorized\n");
8616 updateStateAndAccessesMap(State, NO_UNKOWN_MEM, &
I,
nullptr, Changed,
8617 getAccessKindFromInst(&
I));
8622 dbgs() <<
"[AAMemoryLocation] Accessed locations with pointer locations: "
8623 << getMemoryLocationsAsStr(State.getAssumed()) <<
"\n");
8626void AAMemoryLocationImpl::categorizeArgumentPointerLocations(
8629 for (
unsigned ArgNo = 0,
E = CB.
arg_size(); ArgNo <
E; ++ArgNo) {
8638 const auto *ArgOpMemLocationAA =
8641 if (ArgOpMemLocationAA && ArgOpMemLocationAA->isAssumedReadNone())
8646 categorizePtrValue(
A, CB, *ArgOp, AccessedLocs, Changed);
8653 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize accessed locations for "
8657 AccessedLocs.intersectAssumedBits(NO_LOCATIONS);
8659 if (
auto *CB = dyn_cast<CallBase>(&
I)) {
8665 <<
" [" << CBMemLocationAA <<
"]\n");
8666 if (!CBMemLocationAA) {
8667 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr,
8668 Changed, getAccessKindFromInst(&
I));
8669 return NO_UNKOWN_MEM;
8672 if (CBMemLocationAA->isAssumedReadNone())
8673 return NO_LOCATIONS;
8675 if (CBMemLocationAA->isAssumedInaccessibleMemOnly()) {
8676 updateStateAndAccessesMap(AccessedLocs, NO_INACCESSIBLE_MEM, &
I,
nullptr,
8677 Changed, getAccessKindFromInst(&
I));
8678 return AccessedLocs.getAssumed();
8681 uint32_t CBAssumedNotAccessedLocs =
8682 CBMemLocationAA->getAssumedNotAccessedLocation();
8685 uint32_t CBAssumedNotAccessedLocsNoArgMem =
8686 CBAssumedNotAccessedLocs | NO_ARGUMENT_MEM | NO_GLOBAL_MEM;
8688 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2) {
8689 if (CBAssumedNotAccessedLocsNoArgMem & CurMLK)
8691 updateStateAndAccessesMap(AccessedLocs, CurMLK, &
I,
nullptr, Changed,
8692 getAccessKindFromInst(&
I));
8697 bool HasGlobalAccesses = ((~CBAssumedNotAccessedLocs) & NO_GLOBAL_MEM);
8698 if (HasGlobalAccesses) {
8701 updateStateAndAccessesMap(AccessedLocs, MLK, &
I,
Ptr, Changed,
8702 getAccessKindFromInst(&
I));
8705 if (!CBMemLocationAA->checkForAllAccessesToMemoryKind(
8706 AccessPred, inverseLocation(NO_GLOBAL_MEM,
false,
false)))
8707 return AccessedLocs.getWorstState();
8711 dbgs() <<
"[AAMemoryLocation] Accessed state before argument handling: "
8712 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8715 bool HasArgAccesses = ((~CBAssumedNotAccessedLocs) & NO_ARGUMENT_MEM);
8717 categorizeArgumentPointerLocations(
A, *CB, AccessedLocs, Changed);
8720 dbgs() <<
"[AAMemoryLocation] Accessed state after argument handling: "
8721 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8723 return AccessedLocs.getAssumed();
8728 dbgs() <<
"[AAMemoryLocation] Categorize memory access with pointer: "
8729 <<
I <<
" [" << *
Ptr <<
"]\n");
8730 categorizePtrValue(
A,
I, *
Ptr, AccessedLocs, Changed,
8731 Ptr->getType()->getPointerAddressSpace());
8732 return AccessedLocs.getAssumed();
8735 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Failed to categorize instruction: "
8737 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr, Changed,
8738 getAccessKindFromInst(&
I));
8739 return AccessedLocs.getAssumed();
8743struct AAMemoryLocationFunction final :
public AAMemoryLocationImpl {
8745 : AAMemoryLocationImpl(IRP,
A) {}
8750 const auto *MemBehaviorAA =
8754 return indicateOptimisticFixpoint();
8756 "AAMemoryLocation was not read-none but AAMemoryBehavior was!");
8757 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
8758 return ChangeStatus::UNCHANGED;
8762 auto AssumedState = getAssumed();
8763 bool Changed =
false;
8766 MemoryLocationsKind MLK = categorizeAccessedLocations(
A,
I, Changed);
8767 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Accessed locations for " <<
I
8768 <<
": " << getMemoryLocationsAsStr(MLK) <<
"\n");
8769 removeAssumedBits(inverseLocation(MLK,
false,
false));
8772 return getAssumedNotAccessedLocation() != VALID_STATE;
8775 bool UsedAssumedInformation =
false;
8776 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8777 UsedAssumedInformation))
8778 return indicatePessimisticFixpoint();
8780 Changed |= AssumedState != getAssumed();
8781 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8785 void trackStatistics()
const override {
8788 else if (isAssumedArgMemOnly())
8790 else if (isAssumedInaccessibleMemOnly())
8792 else if (isAssumedInaccessibleOrArgMemOnly())
8798struct AAMemoryLocationCallSite final : AAMemoryLocationImpl {
8800 : AAMemoryLocationImpl(IRP,
A) {}
8813 return indicatePessimisticFixpoint();
8814 bool Changed =
false;
8817 updateStateAndAccessesMap(getState(), MLK,
I,
Ptr, Changed,
8818 getAccessKindFromInst(
I));
8821 if (!FnAA->checkForAllAccessesToMemoryKind(AccessPred, ALL_LOCATIONS))
8822 return indicatePessimisticFixpoint();
8823 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8827 void trackStatistics()
const override {
8841 const std::string getAsStr(
Attributor *
A)
const override {
8842 std::string Str(
"AADenormalFPMath[");
8845 DenormalState Known = getKnown();
8846 if (Known.Mode.isValid())
8847 OS <<
"denormal-fp-math=" << Known.Mode;
8851 if (Known.ModeF32.isValid())
8852 OS <<
" denormal-fp-math-f32=" << Known.ModeF32;
8858struct AADenormalFPMathFunction final : AADenormalFPMathImpl {
8860 : AADenormalFPMathImpl(IRP,
A) {}
8872 Known = DenormalState{
Mode, ModeF32};
8883 <<
"->" << getAssociatedFunction()->
getName() <<
'\n');
8891 CallerInfo->getState());
8895 bool AllCallSitesKnown =
true;
8896 if (!
A.checkForAllCallSites(CheckCallSite, *
this,
true, AllCallSitesKnown))
8897 return indicatePessimisticFixpoint();
8899 if (Change == ChangeStatus::CHANGED && isModeFixed())
8905 LLVMContext &Ctx = getAssociatedFunction()->getContext();
8910 AttrToRemove.
push_back(
"denormal-fp-math");
8916 if (Known.ModeF32 != Known.Mode) {
8918 Attribute::get(Ctx,
"denormal-fp-math-f32", Known.ModeF32.str()));
8920 AttrToRemove.
push_back(
"denormal-fp-math-f32");
8923 auto &IRP = getIRPosition();
8926 return A.removeAttrs(IRP, AttrToRemove) |
8927 A.manifestAttrs(IRP, AttrToAdd,
true);
8930 void trackStatistics()
const override {
8946 if (
A.hasSimplificationCallback(getIRPosition())) {
8947 indicatePessimisticFixpoint();
8952 intersectKnown(getConstantRangeFromSCEV(
A, getCtxI()));
8955 intersectKnown(getConstantRangeFromLVI(
A, getCtxI()));
8959 const std::string getAsStr(
Attributor *
A)
const override {
8963 getKnown().print(
OS);
8965 getAssumed().print(
OS);
8973 if (!getAnchorScope())
8986 const SCEV *S = SE->
getSCEV(&getAssociatedValue());
8997 if (!getAnchorScope())
9004 const SCEV *S = getSCEV(
A,
I);
9016 if (!getAnchorScope())
9035 bool isValidCtxInstructionForOutsideAnalysis(
Attributor &
A,
9037 bool AllowAACtxI)
const {
9038 if (!CtxI || (!AllowAACtxI && CtxI == getCtxI()))
9049 if (
auto *
I = dyn_cast<Instruction>(&getAssociatedValue())) {
9063 const Instruction *CtxI =
nullptr)
const override {
9064 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9070 return getKnown().intersectWith(SCEVR).intersectWith(LVIR);
9076 const Instruction *CtxI =
nullptr)
const override {
9081 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9083 return getAssumed();
9087 return getAssumed().intersectWith(SCEVR).intersectWith(LVIR);
9095 Ty, AssumedConstantRange.
getLower())),
9097 Ty, AssumedConstantRange.
getUpper()))};
9119 mdconst::extract<ConstantInt>(KnownRanges->
getOperand(0));
9121 mdconst::extract<ConstantInt>(KnownRanges->
getOperand(1));
9124 return Known.contains(Assumed) && Known != Assumed;
9131 auto *OldRangeMD =
I->getMetadata(LLVMContext::MD_range);
9132 if (isBetterRange(AssumedConstantRange, OldRangeMD)) {
9134 I->setMetadata(LLVMContext::MD_range,
9135 getMDNodeForConstantRange(
I->getType(),
I->getContext(),
9136 AssumedConstantRange));
9149 auto &
V = getAssociatedValue();
9153 assert(
I == getCtxI() &&
"Should not annotate an instruction which is "
9154 "not the context instruction");
9155 if (isa<CallInst>(
I) || isa<LoadInst>(
I))
9156 if (setRangeMetadataIfisBetterRange(
I, AssumedConstantRange))
9157 Changed = ChangeStatus::CHANGED;
9165struct AAValueConstantRangeArgument final
9166 : AAArgumentFromCallSiteArguments<
9167 AAValueConstantRange, AAValueConstantRangeImpl, IntegerRangeState,
9169 using Base = AAArgumentFromCallSiteArguments<
9176 void trackStatistics()
const override {
9181struct AAValueConstantRangeReturned
9182 : AAReturnedFromReturnedValues<AAValueConstantRange,
9183 AAValueConstantRangeImpl,
9184 AAValueConstantRangeImpl::StateType,
9188 AAValueConstantRangeImpl,
9196 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9197 indicatePessimisticFixpoint();
9201 void trackStatistics()
const override {
9206struct AAValueConstantRangeFloating : AAValueConstantRangeImpl {
9208 : AAValueConstantRangeImpl(IRP,
A) {}
9212 AAValueConstantRangeImpl::initialize(
A);
9216 Value &
V = getAssociatedValue();
9218 if (
auto *
C = dyn_cast<ConstantInt>(&V)) {
9220 indicateOptimisticFixpoint();
9224 if (isa<UndefValue>(&V)) {
9227 indicateOptimisticFixpoint();
9231 if (isa<CallBase>(&V))
9234 if (isa<BinaryOperator>(&V) || isa<CmpInst>(&V) || isa<CastInst>(&V))
9238 if (
LoadInst *LI = dyn_cast<LoadInst>(&V))
9239 if (
auto *RangeMD = LI->getMetadata(LLVMContext::MD_range)) {
9246 if (isa<SelectInst>(V) || isa<PHINode>(V))
9250 indicatePessimisticFixpoint();
9253 << getAssociatedValue() <<
"\n");
9256 bool calculateBinaryOperator(
9264 bool UsedAssumedInformation =
false;
9265 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9268 if (!SimplifiedLHS.has_value())
9270 if (!*SimplifiedLHS)
9272 LHS = *SimplifiedLHS;
9274 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9277 if (!SimplifiedRHS.has_value())
9279 if (!*SimplifiedRHS)
9281 RHS = *SimplifiedRHS;
9289 DepClassTy::REQUIRED);
9293 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9297 DepClassTy::REQUIRED);
9301 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9303 auto AssumedRange = LHSAARange.binaryOp(BinOp->
getOpcode(), RHSAARange);
9305 T.unionAssumed(AssumedRange);
9309 return T.isValidState();
9312 bool calculateCastInst(
9321 bool UsedAssumedInformation =
false;
9322 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9325 if (!SimplifiedOpV.has_value())
9327 if (!*SimplifiedOpV)
9329 OpV = *SimplifiedOpV;
9336 DepClassTy::REQUIRED);
9340 T.unionAssumed(OpAA->getAssumed().castOp(CastI->
getOpcode(),
9342 return T.isValidState();
9353 bool UsedAssumedInformation =
false;
9354 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9357 if (!SimplifiedLHS.has_value())
9359 if (!*SimplifiedLHS)
9361 LHS = *SimplifiedLHS;
9363 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9366 if (!SimplifiedRHS.has_value())
9368 if (!*SimplifiedRHS)
9370 RHS = *SimplifiedRHS;
9378 DepClassTy::REQUIRED);
9384 DepClassTy::REQUIRED);
9388 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9389 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9392 if (LHSAARange.isEmptySet() || RHSAARange.isEmptySet())
9395 bool MustTrue =
false, MustFalse =
false;
9397 auto AllowedRegion =
9400 if (AllowedRegion.intersectWith(LHSAARange).isEmptySet())
9406 assert((!MustTrue || !MustFalse) &&
9407 "Either MustTrue or MustFalse should be false!");
9416 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] " << *CmpI <<
" after "
9417 << (MustTrue ?
"true" : (MustFalse ?
"false" :
"unknown"))
9418 <<
": " <<
T <<
"\n\t" << *LHSAA <<
"\t<op>\n\t"
9422 return T.isValidState();
9431 if (!
I || isa<CallBase>(
I)) {
9434 bool UsedAssumedInformation =
false;
9435 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9438 if (!SimplifiedOpV.has_value())
9440 if (!*SimplifiedOpV)
9442 Value *VPtr = *SimplifiedOpV;
9447 DepClassTy::REQUIRED);
9451 T.unionAssumed(AA->getAssumedConstantRange(
A, CtxI));
9455 return T.isValidState();
9459 if (
auto *BinOp = dyn_cast<BinaryOperator>(
I)) {
9460 if (!calculateBinaryOperator(
A, BinOp,
T, CtxI, QuerriedAAs))
9462 }
else if (
auto *CmpI = dyn_cast<CmpInst>(
I)) {
9463 if (!calculateCmpInst(
A, CmpI,
T, CtxI, QuerriedAAs))
9465 }
else if (
auto *CastI = dyn_cast<CastInst>(
I)) {
9466 if (!calculateCastInst(
A, CastI,
T, CtxI, QuerriedAAs))
9472 T.indicatePessimisticFixpoint();
9480 if (QueriedAA !=
this)
9483 if (
T.getAssumed() == getState().getAssumed())
9485 T.indicatePessimisticFixpoint();
9488 return T.isValidState();
9491 if (!VisitValueCB(getAssociatedValue(), getCtxI()))
9492 return indicatePessimisticFixpoint();
9497 return ChangeStatus::UNCHANGED;
9498 if (++NumChanges > MaxNumChanges) {
9499 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] performed " << NumChanges
9500 <<
" but only " << MaxNumChanges
9501 <<
" are allowed to avoid cyclic reasoning.");
9502 return indicatePessimisticFixpoint();
9504 return ChangeStatus::CHANGED;
9508 void trackStatistics()
const override {
9517 static constexpr int MaxNumChanges = 5;
9520struct AAValueConstantRangeFunction : AAValueConstantRangeImpl {
9522 : AAValueConstantRangeImpl(IRP,
A) {}
9526 llvm_unreachable(
"AAValueConstantRange(Function|CallSite)::updateImpl will "
9534struct AAValueConstantRangeCallSite : AAValueConstantRangeFunction {
9536 : AAValueConstantRangeFunction(IRP,
A) {}
9542struct AAValueConstantRangeCallSiteReturned
9543 : AACalleeToCallSite<AAValueConstantRange, AAValueConstantRangeImpl,
9544 AAValueConstantRangeImpl::StateType,
9548 AAValueConstantRangeImpl::StateType,
9554 if (
CallInst *CI = dyn_cast<CallInst>(&getAssociatedValue()))
9555 if (
auto *RangeMD = CI->getMetadata(LLVMContext::MD_range))
9558 AAValueConstantRangeImpl::initialize(
A);
9562 void trackStatistics()
const override {
9566struct AAValueConstantRangeCallSiteArgument : AAValueConstantRangeFloating {
9568 : AAValueConstantRangeFloating(IRP,
A) {}
9572 return ChangeStatus::UNCHANGED;
9576 void trackStatistics()
const override {
9593 if (
A.hasSimplificationCallback(getIRPosition()))
9594 indicatePessimisticFixpoint();
9596 AAPotentialConstantValues::initialize(
A);
9600 bool &ContainsUndef,
bool ForSelf) {
9602 bool UsedAssumedInformation =
false;
9604 UsedAssumedInformation)) {
9612 *
this, IRP, DepClassTy::REQUIRED);
9613 if (!PotentialValuesAA || !PotentialValuesAA->getState().isValidState())
9615 ContainsUndef = PotentialValuesAA->getState().undefIsContained();
9616 S = PotentialValuesAA->getState().getAssumedSet();
9623 ContainsUndef =
false;
9624 for (
auto &It : Values) {
9625 if (isa<UndefValue>(It.getValue())) {
9626 ContainsUndef =
true;
9629 auto *CI = dyn_cast<ConstantInt>(It.getValue());
9632 S.insert(CI->getValue());
9634 ContainsUndef &= S.empty();
9640 const std::string getAsStr(
Attributor *
A)
const override {
9649 return indicatePessimisticFixpoint();
9653struct AAPotentialConstantValuesArgument final
9654 : AAArgumentFromCallSiteArguments<AAPotentialConstantValues,
9655 AAPotentialConstantValuesImpl,
9656 PotentialConstantIntValuesState> {
9658 AAPotentialConstantValuesImpl,
9664 void trackStatistics()
const override {
9669struct AAPotentialConstantValuesReturned
9670 : AAReturnedFromReturnedValues<AAPotentialConstantValues,
9671 AAPotentialConstantValuesImpl> {
9673 AAPotentialConstantValuesImpl>;
9678 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9679 indicatePessimisticFixpoint();
9680 Base::initialize(
A);
9684 void trackStatistics()
const override {
9689struct AAPotentialConstantValuesFloating : AAPotentialConstantValuesImpl {
9691 : AAPotentialConstantValuesImpl(IRP,
A) {}
9695 AAPotentialConstantValuesImpl::initialize(
A);
9699 Value &
V = getAssociatedValue();
9701 if (
auto *
C = dyn_cast<ConstantInt>(&V)) {
9702 unionAssumed(
C->getValue());
9703 indicateOptimisticFixpoint();
9707 if (isa<UndefValue>(&V)) {
9708 unionAssumedWithUndef();
9709 indicateOptimisticFixpoint();
9713 if (isa<BinaryOperator>(&V) || isa<ICmpInst>(&V) || isa<CastInst>(&V))
9716 if (isa<SelectInst>(V) || isa<PHINode>(V) || isa<LoadInst>(V))
9719 indicatePessimisticFixpoint();
9722 << getAssociatedValue() <<
"\n");
9736 case Instruction::Trunc:
9737 return Src.trunc(ResultBitWidth);
9738 case Instruction::SExt:
9739 return Src.sext(ResultBitWidth);
9740 case Instruction::ZExt:
9741 return Src.zext(ResultBitWidth);
9742 case Instruction::BitCast:
9749 bool &SkipOperation,
bool &Unsupported) {
9756 switch (BinOpcode) {
9760 case Instruction::Add:
9762 case Instruction::Sub:
9764 case Instruction::Mul:
9766 case Instruction::UDiv:
9768 SkipOperation =
true;
9772 case Instruction::SDiv:
9774 SkipOperation =
true;
9778 case Instruction::URem:
9780 SkipOperation =
true;
9784 case Instruction::SRem:
9786 SkipOperation =
true;
9790 case Instruction::Shl:
9792 case Instruction::LShr:
9794 case Instruction::AShr:
9796 case Instruction::And:
9798 case Instruction::Or:
9800 case Instruction::Xor:
9805 bool calculateBinaryOperatorAndTakeUnion(
const BinaryOperator *BinOp,
9807 bool SkipOperation =
false;
9810 calculateBinaryOperator(BinOp,
LHS,
RHS, SkipOperation, Unsupported);
9815 unionAssumed(Result);
9816 return isValidState();
9820 auto AssumedBefore = getAssumed();
9824 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9825 SetTy LHSAAPVS, RHSAAPVS;
9827 LHSContainsUndef,
false) ||
9829 RHSContainsUndef,
false))
9830 return indicatePessimisticFixpoint();
9833 bool MaybeTrue =
false, MaybeFalse =
false;
9835 if (LHSContainsUndef && RHSContainsUndef) {
9838 unionAssumedWithUndef();
9839 }
else if (LHSContainsUndef) {
9840 for (
const APInt &R : RHSAAPVS) {
9841 bool CmpResult = calculateICmpInst(ICI, Zero, R);
9842 MaybeTrue |= CmpResult;
9843 MaybeFalse |= !CmpResult;
9844 if (MaybeTrue & MaybeFalse)
9845 return indicatePessimisticFixpoint();
9847 }
else if (RHSContainsUndef) {
9848 for (
const APInt &L : LHSAAPVS) {
9849 bool CmpResult = calculateICmpInst(ICI, L, Zero);
9850 MaybeTrue |= CmpResult;
9851 MaybeFalse |= !CmpResult;
9852 if (MaybeTrue & MaybeFalse)
9853 return indicatePessimisticFixpoint();
9856 for (
const APInt &L : LHSAAPVS) {
9857 for (
const APInt &R : RHSAAPVS) {
9858 bool CmpResult = calculateICmpInst(ICI, L, R);
9859 MaybeTrue |= CmpResult;
9860 MaybeFalse |= !CmpResult;
9861 if (MaybeTrue & MaybeFalse)
9862 return indicatePessimisticFixpoint();
9867 unionAssumed(
APInt( 1, 1));
9869 unionAssumed(
APInt( 1, 0));
9870 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
9871 : ChangeStatus::CHANGED;
9875 auto AssumedBefore = getAssumed();
9879 bool UsedAssumedInformation =
false;
9880 std::optional<Constant *>
C =
A.getAssumedConstant(
9881 *
SI->getCondition(), *
this, UsedAssumedInformation);
9884 bool OnlyLeft =
false, OnlyRight =
false;
9885 if (
C && *
C && (*C)->isOneValue())
9887 else if (
C && *
C && (*C)->isZeroValue())
9890 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9891 SetTy LHSAAPVS, RHSAAPVS;
9894 LHSContainsUndef,
false))
9895 return indicatePessimisticFixpoint();
9899 RHSContainsUndef,
false))
9900 return indicatePessimisticFixpoint();
9902 if (OnlyLeft || OnlyRight) {
9904 auto *OpAA = OnlyLeft ? &LHSAAPVS : &RHSAAPVS;
9905 auto Undef = OnlyLeft ? LHSContainsUndef : RHSContainsUndef;
9908 unionAssumedWithUndef();
9910 for (
const auto &It : *OpAA)
9914 }
else if (LHSContainsUndef && RHSContainsUndef) {
9916 unionAssumedWithUndef();
9918 for (
const auto &It : LHSAAPVS)
9920 for (
const auto &It : RHSAAPVS)
9923 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
9924 : ChangeStatus::CHANGED;
9928 auto AssumedBefore = getAssumed();
9930 return indicatePessimisticFixpoint();
9935 bool SrcContainsUndef =
false;
9938 SrcContainsUndef,
false))
9939 return indicatePessimisticFixpoint();
9941 if (SrcContainsUndef)
9942 unionAssumedWithUndef();
9944 for (
const APInt &S : SrcPVS) {
9945 APInt T = calculateCastInst(CI, S, ResultBitWidth);
9949 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
9950 : ChangeStatus::CHANGED;
9954 auto AssumedBefore = getAssumed();
9958 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9959 SetTy LHSAAPVS, RHSAAPVS;
9961 LHSContainsUndef,
false) ||
9963 RHSContainsUndef,
false))
9964 return indicatePessimisticFixpoint();
9969 if (LHSContainsUndef && RHSContainsUndef) {
9970 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, Zero))
9971 return indicatePessimisticFixpoint();
9972 }
else if (LHSContainsUndef) {
9973 for (
const APInt &R : RHSAAPVS) {
9974 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, R))
9975 return indicatePessimisticFixpoint();
9977 }
else if (RHSContainsUndef) {
9978 for (
const APInt &L : LHSAAPVS) {
9979 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, Zero))
9980 return indicatePessimisticFixpoint();
9983 for (
const APInt &L : LHSAAPVS) {
9984 for (
const APInt &R : RHSAAPVS) {
9985 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, R))
9986 return indicatePessimisticFixpoint();
9990 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
9991 : ChangeStatus::CHANGED;
9995 auto AssumedBefore = getAssumed();
9999 ContainsUndef,
true))
10000 return indicatePessimisticFixpoint();
10001 if (ContainsUndef) {
10002 unionAssumedWithUndef();
10007 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10008 : ChangeStatus::CHANGED;
10013 Value &
V = getAssociatedValue();
10016 if (
auto *ICI = dyn_cast<ICmpInst>(
I))
10017 return updateWithICmpInst(
A, ICI);
10019 if (
auto *SI = dyn_cast<SelectInst>(
I))
10020 return updateWithSelectInst(
A, SI);
10022 if (
auto *CI = dyn_cast<CastInst>(
I))
10023 return updateWithCastInst(
A, CI);
10025 if (
auto *BinOp = dyn_cast<BinaryOperator>(
I))
10026 return updateWithBinaryOperator(
A, BinOp);
10028 if (isa<PHINode>(
I) || isa<LoadInst>(
I))
10029 return updateWithInstruction(
A,
I);
10031 return indicatePessimisticFixpoint();
10035 void trackStatistics()
const override {
10040struct AAPotentialConstantValuesFunction : AAPotentialConstantValuesImpl {
10042 : AAPotentialConstantValuesImpl(IRP,
A) {}
10047 "AAPotentialConstantValues(Function|CallSite)::updateImpl will "
10052 void trackStatistics()
const override {
10057struct AAPotentialConstantValuesCallSite : AAPotentialConstantValuesFunction {
10059 : AAPotentialConstantValuesFunction(IRP,
A) {}
10062 void trackStatistics()
const override {
10067struct AAPotentialConstantValuesCallSiteReturned
10068 : AACalleeToCallSite<AAPotentialConstantValues,
10069 AAPotentialConstantValuesImpl> {
10070 AAPotentialConstantValuesCallSiteReturned(
const IRPosition &IRP,
10073 AAPotentialConstantValuesImpl>(IRP,
A) {}
10076 void trackStatistics()
const override {
10081struct AAPotentialConstantValuesCallSiteArgument
10082 : AAPotentialConstantValuesFloating {
10083 AAPotentialConstantValuesCallSiteArgument(
const IRPosition &IRP,
10085 : AAPotentialConstantValuesFloating(IRP,
A) {}
10089 AAPotentialConstantValuesImpl::initialize(
A);
10090 if (isAtFixpoint())
10093 Value &
V = getAssociatedValue();
10095 if (
auto *
C = dyn_cast<ConstantInt>(&V)) {
10096 unionAssumed(
C->getValue());
10097 indicateOptimisticFixpoint();
10101 if (isa<UndefValue>(&V)) {
10102 unionAssumedWithUndef();
10103 indicateOptimisticFixpoint();
10110 Value &
V = getAssociatedValue();
10111 auto AssumedBefore = getAssumed();
10115 return indicatePessimisticFixpoint();
10116 const auto &S = AA->getAssumed();
10118 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10119 : ChangeStatus::CHANGED;
10123 void trackStatistics()
const override {
10132 bool IgnoreSubsumingPositions) {
10133 assert(ImpliedAttributeKind == Attribute::NoUndef &&
10134 "Unexpected attribute kind");
10135 if (
A.hasAttr(IRP, {Attribute::NoUndef}, IgnoreSubsumingPositions,
10136 Attribute::NoUndef))
10156 Value &V = getAssociatedValue();
10157 if (isa<UndefValue>(V))
10158 indicatePessimisticFixpoint();
10159 assert(!isImpliedByIR(
A, getIRPosition(), Attribute::NoUndef));
10165 const Value *UseV =
U->get();
10174 bool TrackUse =
false;
10177 if (isa<CastInst>(*
I) || isa<GetElementPtrInst>(*
I))
10183 const std::string getAsStr(
Attributor *
A)
const override {
10184 return getAssumed() ?
"noundef" :
"may-undef-or-poison";
10191 bool UsedAssumedInformation =
false;
10192 if (
A.isAssumedDead(getIRPosition(),
nullptr,
nullptr,
10193 UsedAssumedInformation))
10194 return ChangeStatus::UNCHANGED;
10198 if (!
A.getAssumedSimplified(getIRPosition(), *
this, UsedAssumedInformation,
10201 return ChangeStatus::UNCHANGED;
10202 return AANoUndef::manifest(
A);
10206struct AANoUndefFloating :
public AANoUndefImpl {
10208 : AANoUndefImpl(IRP,
A) {}
10212 AANoUndefImpl::initialize(
A);
10213 if (!getState().isAtFixpoint() && getAnchorScope() &&
10214 !getAnchorScope()->isDeclaration())
10216 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10221 auto VisitValueCB = [&](
const IRPosition &IRP) ->
bool {
10222 bool IsKnownNoUndef;
10223 return AA::hasAssumedIRAttr<Attribute::NoUndef>(
10224 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoUndef);
10228 bool UsedAssumedInformation =
false;
10229 Value *AssociatedValue = &getAssociatedValue();
10231 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10236 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
10244 if (AVIRP == getIRPosition() || !VisitValueCB(AVIRP))
10245 return indicatePessimisticFixpoint();
10246 return ChangeStatus::UNCHANGED;
10249 for (
const auto &VAC : Values)
10251 return indicatePessimisticFixpoint();
10253 return ChangeStatus::UNCHANGED;
10260struct AANoUndefReturned final
10261 : AAReturnedFromReturnedValues<AANoUndef, AANoUndefImpl> {
10263 : AAReturnedFromReturnedValues<
AANoUndef, AANoUndefImpl>(IRP,
A) {}
10269struct AANoUndefArgument final
10270 : AAArgumentFromCallSiteArguments<AANoUndef, AANoUndefImpl> {
10272 : AAArgumentFromCallSiteArguments<
AANoUndef, AANoUndefImpl>(IRP,
A) {}
10278struct AANoUndefCallSiteArgument final : AANoUndefFloating {
10280 : AANoUndefFloating(IRP,
A) {}
10286struct AANoUndefCallSiteReturned final
10287 : AACalleeToCallSite<AANoUndef, AANoUndefImpl> {
10289 : AACalleeToCallSite<
AANoUndef, AANoUndefImpl>(IRP,
A) {}
10304 if (isa<UndefValue>(V)) {
10305 indicateOptimisticFixpoint();
10310 A.getAttrs(getIRPosition(), {Attribute::NoFPClass},
Attrs,
false);
10311 for (
const auto &Attr : Attrs) {
10322 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10328 const Value *UseV =
U->get();
10345 0, TLI, AC,
I, DT);
10348 if (
auto *CI = dyn_cast<CallInst>(UseV)) {
10350 switch (CI->getIntrinsicID()) {
10351 case Intrinsic::frexp:
10363 return !isa<LoadInst, AtomicRMWInst>(UseV);
10366 const std::string getAsStr(
Attributor *
A)
const override {
10367 std::string
Result =
"nofpclass";
10369 OS << getAssumedNoFPClass();
10379struct AANoFPClassFloating :
public AANoFPClassImpl {
10381 : AANoFPClassImpl(IRP,
A) {}
10386 bool UsedAssumedInformation =
false;
10387 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10389 Values.
push_back({getAssociatedValue(), getCtxI()});
10395 DepClassTy::REQUIRED);
10396 if (!AA ||
this == AA) {
10397 T.indicatePessimisticFixpoint();
10403 return T.isValidState();
10406 for (
const auto &VAC : Values)
10407 if (!VisitValueCB(*
VAC.getValue(),
VAC.getCtxI()))
10408 return indicatePessimisticFixpoint();
10414 void trackStatistics()
const override {
10419struct AANoFPClassReturned final
10420 : AAReturnedFromReturnedValues<AANoFPClass, AANoFPClassImpl,
10421 AANoFPClassImpl::StateType, false, Attribute::None, false> {
10423 : AAReturnedFromReturnedValues<
AANoFPClass, AANoFPClassImpl,
10428 void trackStatistics()
const override {
10433struct AANoFPClassArgument final
10434 : AAArgumentFromCallSiteArguments<AANoFPClass, AANoFPClassImpl> {
10436 : AAArgumentFromCallSiteArguments<
AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10442struct AANoFPClassCallSiteArgument final : AANoFPClassFloating {
10444 : AANoFPClassFloating(IRP,
A) {}
10447 void trackStatistics()
const override {
10452struct AANoFPClassCallSiteReturned final
10453 : AACalleeToCallSite<AANoFPClass, AANoFPClassImpl> {
10455 : AACalleeToCallSite<
AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10458 void trackStatistics()
const override {
10467 return CalledFunctions;
10470 bool hasUnknownCallee()
const override {
return HasUnknownCallee; }
10472 bool hasNonAsmUnknownCallee()
const override {
10473 return HasUnknownCalleeNonAsm;
10476 const std::string getAsStr(
Attributor *
A)
const override {
10477 return "CallEdges[" + std::to_string(HasUnknownCallee) +
"," +
10478 std::to_string(CalledFunctions.size()) +
"]";
10481 void trackStatistics()
const override {}
10485 if (CalledFunctions.insert(Fn)) {
10486 Change = ChangeStatus::CHANGED;
10492 void setHasUnknownCallee(
bool NonAsm,
ChangeStatus &Change) {
10493 if (!HasUnknownCallee)
10494 Change = ChangeStatus::CHANGED;
10495 if (NonAsm && !HasUnknownCalleeNonAsm)
10496 Change = ChangeStatus::CHANGED;
10497 HasUnknownCalleeNonAsm |= NonAsm;
10498 HasUnknownCallee =
true;
10506 bool HasUnknownCallee =
false;
10509 bool HasUnknownCalleeNonAsm =
false;
10512struct AACallEdgesCallSite :
public AACallEdgesImpl {
10514 : AACallEdgesImpl(IRP,
A) {}
10520 if (
Function *Fn = dyn_cast<Function>(&V)) {
10521 addCalledFunction(Fn, Change);
10523 LLVM_DEBUG(
dbgs() <<
"[AACallEdges] Unrecognized value: " << V <<
"\n");
10524 setHasUnknownCallee(
true, Change);
10534 if (isa<Constant>(V)) {
10535 VisitValue(*V, CtxI);
10539 bool UsedAssumedInformation =
false;
10545 for (
auto &VAC : Values)
10546 VisitValue(*
VAC.getValue(),
VAC.getCtxI());
10549 CallBase *CB = cast<CallBase>(getCtxI());
10552 if (
IA->hasSideEffects() &&
10555 setHasUnknownCallee(
false, Change);
10562 *
this, getIRPosition(), DepClassTy::OPTIONAL))
10563 if (IndirectCallAA->foreachCallee(
10564 [&](
Function *Fn) { return VisitValue(*Fn, CB); }))
10573 for (
const Use *U : CallbackUses)
10574 ProcessCalledOperand(
U->get(), CB);
10580struct AACallEdgesFunction :
public AACallEdgesImpl {
10582 : AACallEdgesImpl(IRP,
A) {}
10589 CallBase &CB = cast<CallBase>(Inst);
10595 if (CBEdges->hasNonAsmUnknownCallee())
10596 setHasUnknownCallee(
true, Change);
10597 if (CBEdges->hasUnknownCallee())
10598 setHasUnknownCallee(
false, Change);
10600 for (
Function *
F : CBEdges->getOptimisticEdges())
10601 addCalledFunction(
F, Change);
10607 bool UsedAssumedInformation =
false;
10608 if (!
A.checkForAllCallLikeInstructions(ProcessCallInst, *
this,
10609 UsedAssumedInformation,
10613 setHasUnknownCallee(
true, Change);
10622struct AAInterFnReachabilityFunction
10623 :
public CachedReachabilityAA<AAInterFnReachability, Function> {
10624 using Base = CachedReachabilityAA<AAInterFnReachability, Function>;
10628 bool instructionCanReach(
10631 assert(
From.getFunction() == getAnchorScope() &&
"Queried the wrong AA!");
10632 auto *NonConstThis =
const_cast<AAInterFnReachabilityFunction *
>(
this);
10634 RQITy StackRQI(
A,
From, To, ExclusionSet,
false);
10635 typename RQITy::Reachable
Result;
10636 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
10637 return NonConstThis->isReachableImpl(
A, StackRQI,
10639 return Result == RQITy::Reachable::Yes;
10643 bool IsTemporaryRQI)
override {
10646 if (EntryI != RQI.From &&
10647 !instructionCanReach(
A, *EntryI, *RQI.To,
nullptr))
10648 return rememberResult(
A, RQITy::Reachable::No, RQI,
false,
10651 auto CheckReachableCallBase = [&](
CallBase *CB) {
10654 if (!CBEdges || !CBEdges->getState().isValidState())
10657 if (CBEdges->hasUnknownCallee())
10660 for (
Function *Fn : CBEdges->getOptimisticEdges()) {
10671 if (Fn == getAnchorScope()) {
10672 if (EntryI == RQI.From)
10679 DepClassTy::OPTIONAL);
10682 if (!InterFnReachability ||
10692 DepClassTy::OPTIONAL);
10698 if (CheckReachableCallBase(cast<CallBase>(&CBInst)))
10701 A, *RQI.From, CBInst, RQI.ExclusionSet);
10704 bool UsedExclusionSet =
true;
10705 bool UsedAssumedInformation =
false;
10706 if (!
A.checkForAllCallLikeInstructions(CheckCallBase, *
this,
10707 UsedAssumedInformation,
10709 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
10712 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
10716 void trackStatistics()
const override {}
10720template <
typename AAType>
10721static std::optional<Constant *>
10732 std::optional<Constant *> COpt = AA->getAssumedConstant(
A);
10734 if (!COpt.has_value()) {
10736 return std::nullopt;
10738 if (
auto *
C = *COpt) {
10749 std::optional<Value *> V;
10750 for (
auto &It : Values) {
10752 if (V.has_value() && !*V)
10755 if (!V.has_value())
10769 if (
A.hasSimplificationCallback(getIRPosition())) {
10770 indicatePessimisticFixpoint();
10773 Value *Stripped = getAssociatedValue().stripPointerCasts();
10774 auto *
CE = dyn_cast<ConstantExpr>(Stripped);
10775 if (isa<Constant>(Stripped) &&
10776 (!CE ||
CE->getOpcode() != Instruction::ICmp)) {
10777 addValue(
A, getState(), *Stripped, getCtxI(),
AA::AnyScope,
10779 indicateOptimisticFixpoint();
10782 AAPotentialValues::initialize(
A);
10786 const std::string getAsStr(
Attributor *
A)
const override {
10793 template <
typename AAType>
10794 static std::optional<Value *> askOtherAA(
Attributor &
A,
10799 std::optional<Constant *>
C = askForAssumedConstant<AAType>(
A, AA, IRP, Ty);
10801 return std::nullopt;
10813 if (
auto *CB = dyn_cast_or_null<CallBase>(CtxI)) {
10814 for (
const auto &U : CB->
args()) {
10824 Type &Ty = *getAssociatedType();
10825 std::optional<Value *> SimpleV =
10826 askOtherAA<AAValueConstantRange>(
A, *
this, ValIRP, Ty);
10827 if (SimpleV.has_value() && !*SimpleV) {
10829 *
this, ValIRP, DepClassTy::OPTIONAL);
10830 if (PotentialConstantsAA && PotentialConstantsAA->isValidState()) {
10831 for (
const auto &It : PotentialConstantsAA->getAssumedSet())
10832 State.
unionAssumed({{*ConstantInt::get(&Ty, It),
nullptr}, S});
10833 if (PotentialConstantsAA->undefIsContained())
10838 if (!SimpleV.has_value())
10845 if (isa<ConstantInt>(VPtr))
10850 State.unionAssumed({{*VPtr, CtxI}, S});
10860 return II.I ==
I && II.S == S;
10862 bool operator<(
const ItemInfo &II)
const {
10875 bool UsedAssumedInformation =
false;
10877 if (!
A.getAssumedSimplifiedValues(IRP,
this, Values, CS,
10878 UsedAssumedInformation))
10881 for (
auto &It : Values)
10882 ValueScopeMap[It] += CS;
10884 for (
auto &It : ValueScopeMap)
10885 addValue(
A, getState(), *It.first.getValue(), It.first.getCtxI(),
10892 auto NewS = StateType::getBestState(getState());
10893 for (
const auto &It : getAssumedSet()) {
10896 addValue(
A, NewS, *It.first.getValue(), It.first.getCtxI(),
10899 assert(!undefIsContained() &&
"Undef should be an explicit value!");
10907 getState() = StateType::getBestState(getState());
10908 getState().unionAssumed({{getAssociatedValue(), getCtxI()},
AA::AnyScope});
10915 return indicatePessimisticFixpoint();
10923 if (!getAssumedSimplifiedValues(
A, Values, S))
10925 Value &OldV = getAssociatedValue();
10926 if (isa<UndefValue>(OldV))
10928 Value *NewV = getSingleValue(
A, *
this, getIRPosition(), Values);
10929 if (!NewV || NewV == &OldV)
10934 if (
A.changeAfterManifest(getIRPosition(), *NewV))
10940 bool getAssumedSimplifiedValues(
10942 AA::ValueScope S,
bool RecurseForSelectAndPHI =
false)
const override {
10943 if (!isValidState())
10945 bool UsedAssumedInformation =
false;
10946 for (
const auto &It : getAssumedSet())
10947 if (It.second & S) {
10948 if (RecurseForSelectAndPHI && (isa<PHINode>(It.first.getValue()) ||
10949 isa<SelectInst>(It.first.getValue()))) {
10950 if (
A.getAssumedSimplifiedValues(
10952 this, Values, S, UsedAssumedInformation))
10957 assert(!undefIsContained() &&
"Undef should be an explicit value!");
10962struct AAPotentialValuesFloating : AAPotentialValuesImpl {
10964 : AAPotentialValuesImpl(IRP,
A) {}
10968 auto AssumedBefore = getAssumed();
10970 genericValueTraversal(
A, &getAssociatedValue());
10972 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
10973 : ChangeStatus::CHANGED;
10977 struct LivenessInfo {
10978 const AAIsDead *LivenessAA =
nullptr;
10979 bool AnyDead =
false;
10992 bool UsedAssumedInformation =
false;
10994 auto GetSimplifiedValues = [&](
Value &
V,
10996 if (!
A.getAssumedSimplifiedValues(
11002 return Values.
empty();
11004 if (GetSimplifiedValues(*
LHS, LHSValues))
11006 if (GetSimplifiedValues(*
RHS, RHSValues))
11018 F ?
A.getInfoCache().getTargetLibraryInfoForFunction(*
F) :
nullptr;
11026 auto CheckPair = [&](
Value &LHSV,
Value &RHSV) {
11027 if (isa<UndefValue>(LHSV) || isa<UndefValue>(RHSV)) {
11029 nullptr, II.S, getAnchorScope());
11035 if (&LHSV == &RHSV &&
11039 addValue(
A, getState(), *NewV,
nullptr, II.S,
11046 if (TypedLHS && TypedRHS) {
11048 if (NewV && NewV != &Cmp) {
11049 addValue(
A, getState(), *NewV,
nullptr, II.S,
11059 bool LHSIsNull = isa<ConstantPointerNull>(LHSV);
11060 bool RHSIsNull = isa<ConstantPointerNull>(RHSV);
11061 if (!LHSIsNull && !RHSIsNull)
11067 assert((LHSIsNull || RHSIsNull) &&
11068 "Expected nullptr versus non-nullptr comparison at this point");
11071 unsigned PtrIdx = LHSIsNull;
11072 bool IsKnownNonNull;
11073 bool IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
11075 DepClassTy::REQUIRED, IsKnownNonNull);
11076 if (!IsAssumedNonNull)
11082 addValue(
A, getState(), *NewV,
nullptr, II.S,
11087 for (
auto &LHSValue : LHSValues)
11088 for (
auto &RHSValue : RHSValues)
11089 if (!CheckPair(*LHSValue.getValue(), *RHSValue.getValue()))
11097 bool UsedAssumedInformation =
false;
11099 std::optional<Constant *>
C =
11100 A.getAssumedConstant(*
SI.getCondition(), *
this, UsedAssumedInformation);
11101 bool NoValueYet = !
C.has_value();
11102 if (NoValueYet || isa_and_nonnull<UndefValue>(*
C))
11104 if (
auto *CI = dyn_cast_or_null<ConstantInt>(*
C)) {
11106 Worklist.
push_back({{*
SI.getFalseValue(), CtxI}, II.S});
11108 Worklist.
push_back({{*
SI.getTrueValue(), CtxI}, II.S});
11109 }
else if (&SI == &getAssociatedValue()) {
11111 Worklist.
push_back({{*
SI.getTrueValue(), CtxI}, II.S});
11112 Worklist.
push_back({{*
SI.getFalseValue(), CtxI}, II.S});
11114 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11116 if (!SimpleV.has_value())
11119 addValue(
A, getState(), **SimpleV, CtxI, II.S, getAnchorScope());
11131 bool UsedAssumedInformation =
false;
11133 PotentialValueOrigins, *
this,
11134 UsedAssumedInformation,
11136 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Failed to get potentially "
11137 "loaded values for load instruction "
11148 if (!
I || isa<AssumeInst>(
I))
11150 if (
auto *SI = dyn_cast<StoreInst>(
I))
11151 return A.isAssumedDead(
SI->getOperandUse(0),
this,
11153 UsedAssumedInformation,
11155 return A.isAssumedDead(*
I,
this,
nullptr,
11156 UsedAssumedInformation,
11159 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Load is onl used by assumes "
11160 "and we cannot delete all the stores: "
11171 bool AllLocal = ScopeIsLocal;
11176 if (!DynamicallyUnique) {
11177 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Not all potentially loaded "
11178 "values are dynamically unique: "
11183 for (
auto *PotentialCopy : PotentialCopies) {
11185 Worklist.
push_back({{*PotentialCopy, CtxI}, II.S});
11190 if (!AllLocal && ScopeIsLocal)
11195 bool handlePHINode(
11199 auto GetLivenessInfo = [&](
const Function &
F) -> LivenessInfo & {
11200 LivenessInfo &LI = LivenessAAs[&
F];
11201 if (!LI.LivenessAA)
11207 if (&
PHI == &getAssociatedValue()) {
11208 LivenessInfo &LI = GetLivenessInfo(*
PHI.getFunction());
11211 *
PHI.getFunction());
11215 for (
unsigned u = 0, e =
PHI.getNumIncomingValues(); u < e; u++) {
11217 if (LI.LivenessAA &&
11218 LI.LivenessAA->isEdgeDead(IncomingBB,
PHI.getParent())) {
11228 if (CyclePHI && isa<Instruction>(V) &&
11229 (!
C ||
C->contains(cast<Instruction>(V)->getParent())))
11237 bool UsedAssumedInformation =
false;
11238 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11240 if (!SimpleV.has_value())
11244 addValue(
A, getState(), **SimpleV, &
PHI, II.S, getAnchorScope());
11253 bool SomeSimplified =
false;
11254 bool UsedAssumedInformation =
false;
11259 const auto &SimplifiedOp =
A.getAssumedSimplified(
11264 if (!SimplifiedOp.has_value())
11268 NewOps[
Idx] = *SimplifiedOp;
11272 SomeSimplified |= (NewOps[
Idx] !=
Op);
11278 if (!SomeSimplified)
11285 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
11291 if (!NewV || NewV == &
I)
11294 LLVM_DEBUG(
dbgs() <<
"Generic inst " <<
I <<
" assumed simplified to "
11296 Worklist.
push_back({{*NewV, II.I.getCtxI()}, II.S});
11304 if (
auto *CI = dyn_cast<CmpInst>(&
I))
11306 CI->getPredicate(), II, Worklist);
11308 switch (
I.getOpcode()) {
11309 case Instruction::Select:
11310 return handleSelectInst(
A, cast<SelectInst>(
I), II, Worklist);
11311 case Instruction::PHI:
11312 return handlePHINode(
A, cast<PHINode>(
I), II, Worklist, LivenessAAs);
11313 case Instruction::Load:
11314 return handleLoadInst(
A, cast<LoadInst>(
I), II, Worklist);
11316 return handleGenericInst(
A,
I, II, Worklist);
11331 Value *
V = II.I.getValue();
11338 if (!Visited.
insert(II).second)
11343 LLVM_DEBUG(
dbgs() <<
"Generic value traversal reached iteration limit: "
11344 << Iteration <<
"!\n");
11345 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11351 Value *NewV =
nullptr;
11352 if (
V->getType()->isPointerTy()) {
11355 if (
auto *CB = dyn_cast<CallBase>(V))
11365 if (NewV && NewV != V) {
11366 Worklist.
push_back({{*NewV, CtxI}, S});
11370 if (
auto *CE = dyn_cast<ConstantExpr>(V)) {
11371 if (
CE->getOpcode() == Instruction::ICmp)
11372 if (handleCmp(
A, *CE,
CE->getOperand(0),
CE->getOperand(1),
11377 if (
auto *
I = dyn_cast<Instruction>(V)) {
11382 if (V != InitialV || isa<Argument>(V))
11387 if (V == InitialV && CtxI == getCtxI()) {
11388 indicatePessimisticFixpoint();
11392 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11393 }
while (!Worklist.
empty());
11397 for (
auto &It : LivenessAAs)
11398 if (It.second.AnyDead)
11399 A.recordDependence(*It.second.LivenessAA, *
this, DepClassTy::OPTIONAL);
11403 void trackStatistics()
const override {
11408struct AAPotentialValuesArgument final : AAPotentialValuesImpl {
11409 using Base = AAPotentialValuesImpl;
11415 auto &Arg = cast<Argument>(getAssociatedValue());
11417 indicatePessimisticFixpoint();
11422 auto AssumedBefore = getAssumed();
11424 unsigned ArgNo = getCalleeArgNo();
11426 bool UsedAssumedInformation =
false;
11430 if (CSArgIRP.getPositionKind() == IRP_INVALID)
11433 if (!
A.getAssumedSimplifiedValues(CSArgIRP,
this, Values,
11435 UsedAssumedInformation))
11438 return isValidState();
11441 if (!
A.checkForAllCallSites(CallSitePred, *
this,
11443 UsedAssumedInformation))
11444 return indicatePessimisticFixpoint();
11446 Function *Fn = getAssociatedFunction();
11447 bool AnyNonLocal =
false;
11448 for (
auto &It : Values) {
11449 if (isa<Constant>(It.getValue())) {
11450 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11455 return indicatePessimisticFixpoint();
11457 if (
auto *Arg = dyn_cast<Argument>(It.getValue()))
11459 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11465 AnyNonLocal =
true;
11467 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11469 giveUpOnIntraprocedural(
A);
11471 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11472 : ChangeStatus::CHANGED;
11476 void trackStatistics()
const override {
11481struct AAPotentialValuesReturned :
public AAPotentialValuesFloating {
11482 using Base = AAPotentialValuesFloating;
11489 if (!
F ||
F->isDeclaration() ||
F->getReturnType()->isVoidTy()) {
11490 indicatePessimisticFixpoint();
11497 ReturnedArg = &Arg;
11500 if (!
A.isFunctionIPOAmendable(*
F) ||
11501 A.hasSimplificationCallback(getIRPosition())) {
11503 indicatePessimisticFixpoint();
11505 indicateOptimisticFixpoint();
11511 auto AssumedBefore = getAssumed();
11512 bool UsedAssumedInformation =
false;
11515 Function *AnchorScope = getAnchorScope();
11521 UsedAssumedInformation,
11527 addValue(
A, getState(), *
VAC.getValue(),
11528 VAC.getCtxI() ?
VAC.getCtxI() : CtxI, S, AnchorScope);
11534 HandleReturnedValue(*ReturnedArg,
nullptr,
true);
11537 bool AddValues =
true;
11538 if (isa<PHINode>(RetI.getOperand(0)) ||
11539 isa<SelectInst>(RetI.getOperand(0))) {
11540 addValue(
A, getState(), *RetI.getOperand(0), &RetI,
AA::AnyScope,
11544 return HandleReturnedValue(*RetI.getOperand(0), &RetI, AddValues);
11547 if (!
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11548 UsedAssumedInformation,
11550 return indicatePessimisticFixpoint();
11553 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11554 : ChangeStatus::CHANGED;
11559 Function *AnchorScope)
const override {
11561 if (
auto *CB = dyn_cast<CallBase>(&V))
11564 Base::addValue(
A, State, V, CtxI, S, AnchorScope);
11569 return ChangeStatus::UNCHANGED;
11571 if (!getAssumedSimplifiedValues(
A, Values, AA::ValueScope::Intraprocedural,
11573 return ChangeStatus::UNCHANGED;
11574 Value *NewVal = getSingleValue(
A, *
this, getIRPosition(), Values);
11576 return ChangeStatus::UNCHANGED;
11579 if (
auto *Arg = dyn_cast<Argument>(NewVal)) {
11581 "Number of function with unique return");
11582 Changed |=
A.manifestAttrs(
11589 Value *RetOp = RetI.getOperand(0);
11590 if (isa<UndefValue>(RetOp) || RetOp == NewVal)
11593 if (
A.changeUseAfterManifest(RetI.getOperandUse(0), *NewVal))
11594 Changed = ChangeStatus::CHANGED;
11597 bool UsedAssumedInformation =
false;
11598 (void)
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11599 UsedAssumedInformation,
11609 void trackStatistics()
const override{
11616struct AAPotentialValuesFunction : AAPotentialValuesImpl {
11618 : AAPotentialValuesImpl(IRP,
A) {}
11627 void trackStatistics()
const override {
11632struct AAPotentialValuesCallSite : AAPotentialValuesFunction {
11634 : AAPotentialValuesFunction(IRP,
A) {}
11637 void trackStatistics()
const override {
11642struct AAPotentialValuesCallSiteReturned : AAPotentialValuesImpl {
11644 : AAPotentialValuesImpl(IRP,
A) {}
11648 auto AssumedBefore = getAssumed();
11652 return indicatePessimisticFixpoint();
11654 bool UsedAssumedInformation =
false;
11655 auto *CB = cast<CallBase>(getCtxI());
11658 UsedAssumedInformation))
11659 return indicatePessimisticFixpoint();
11664 UsedAssumedInformation))
11665 return indicatePessimisticFixpoint();
11669 bool AnyNonLocal =
false;
11670 for (
auto &It : Values) {
11671 Value *
V = It.getValue();
11672 std::optional<Value *> CallerV =
A.translateArgumentToCallSiteContent(
11673 V, *CB, *
this, UsedAssumedInformation);
11674 if (!CallerV.has_value()) {
11678 V = *CallerV ? *CallerV :
V;
11684 if (
auto *Arg = dyn_cast<Argument>(V))
11690 addValue(
A, getState(), *V, CB,
AA::AnyScope, getAnchorScope());
11692 AnyNonLocal =
true;
11700 UsedAssumedInformation))
11701 return indicatePessimisticFixpoint();
11702 AnyNonLocal =
false;
11704 for (
auto &It : Values) {
11705 Value *
V = It.getValue();
11707 return indicatePessimisticFixpoint();
11709 addValue(
A, getState(), *V, CB,
AA::AnyScope, getAnchorScope());
11711 AnyNonLocal =
true;
11717 giveUpOnIntraprocedural(
A);
11719 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11720 : ChangeStatus::CHANGED;
11724 return AAPotentialValues::indicatePessimisticFixpoint();
11728 void trackStatistics()
const override {
11733struct AAPotentialValuesCallSiteArgument : AAPotentialValuesFloating {
11735 : AAPotentialValuesFloating(IRP,
A) {}
11738 void trackStatistics()
const override {
11754 if (getKnown().isUniversal())
11755 return ChangeStatus::UNCHANGED;
11758 return A.manifestAttrs(
11761 llvm::join(getAssumed().getSet(),
",")),
11766 return isValidState() && setContains(Assumption);
11770 const std::string getAsStr(
Attributor *
A)
const override {
11771 const SetContents &Known = getKnown();
11772 const SetContents &Assumed = getAssumed();
11774 const std::string KnownStr =
11775 llvm::join(Known.getSet().begin(), Known.getSet().end(),
",");
11776 const std::string AssumedStr =
11777 (Assumed.isUniversal())
11779 : llvm::join(Assumed.getSet().begin(), Assumed.getSet().end(),
",");
11781 return "Known [" + KnownStr +
"]," +
" Assumed [" + AssumedStr +
"]";
11796struct AAAssumptionInfoFunction final : AAAssumptionInfoImpl {
11798 : AAAssumptionInfoImpl(IRP,
A,
11803 bool Changed =
false;
11808 DepClassTy::REQUIRED);
11812 Changed |= getIntersection(AssumptionAA->getAssumed());
11813 return !getAssumed().empty() || !getKnown().empty();
11816 bool UsedAssumedInformation =
false;
11821 if (!
A.checkForAllCallSites(CallSitePred, *
this,
true,
11822 UsedAssumedInformation))
11823 return indicatePessimisticFixpoint();
11825 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11828 void trackStatistics()
const override {}
11832struct AAAssumptionInfoCallSite final : AAAssumptionInfoImpl {
11835 : AAAssumptionInfoImpl(IRP,
A, getInitialAssumptions(IRP)) {}
11846 auto *AssumptionAA =
11849 return indicatePessimisticFixpoint();
11850 bool Changed = getIntersection(AssumptionAA->getAssumed());
11851 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11855 void trackStatistics()
const override {}
11867 return Assumptions;
11882struct AAUnderlyingObjectsImpl
11888 const std::string getAsStr(
Attributor *
A)
const override {
11889 return std::string(
"UnderlyingObjects ") +
11891 ? (std::string(
"inter #") +
11892 std::to_string(InterAssumedUnderlyingObjects.size()) +
11893 " objs" + std::string(
", intra #") +
11894 std::to_string(IntraAssumedUnderlyingObjects.size()) +
11900 void trackStatistics()
const override {}
11904 auto &
Ptr = getAssociatedValue();
11908 bool UsedAssumedInformation =
false;
11913 Scope, UsedAssumedInformation))
11916 bool Changed =
false;
11918 for (
unsigned I = 0;
I < Values.
size(); ++
I) {
11919 auto &
VAC = Values[
I];
11920 auto *Obj =
VAC.getValue();
11922 if (UO && UO !=
VAC.getValue() && SeenObjects.
insert(UO).second) {
11925 auto Pred = [&Values](
Value &
V) {
11930 if (!OtherAA || !OtherAA->forallUnderlyingObjects(Pred, Scope))
11932 "The forall call should not return false at this position");
11937 if (isa<SelectInst>(Obj)) {
11938 Changed |= handleIndirect(
A, *Obj, UnderlyingObjects, Scope);
11941 if (
auto *
PHI = dyn_cast<PHINode>(Obj)) {
11944 for (
unsigned u = 0, e =
PHI->getNumIncomingValues(); u < e; u++) {
11945 Changed |= handleIndirect(
A, *
PHI->getIncomingValue(u),
11946 UnderlyingObjects, Scope);
11951 Changed |= UnderlyingObjects.
insert(Obj);
11957 bool Changed =
false;
11961 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11964 bool forallUnderlyingObjects(
11967 if (!isValidState())
11968 return Pred(getAssociatedValue());
11971 ? IntraAssumedUnderlyingObjects
11972 : InterAssumedUnderlyingObjects;
11973 for (
Value *Obj : AssumedUnderlyingObjects)
11986 bool Changed =
false;
11989 auto Pred = [&](
Value &
V) {
11990 Changed |= UnderlyingObjects.
insert(&V);
11993 if (!AA || !AA->forallUnderlyingObjects(Pred, Scope))
11995 "The forall call should not return false at this position");
12005struct AAUnderlyingObjectsFloating final : AAUnderlyingObjectsImpl {
12007 : AAUnderlyingObjectsImpl(IRP,
A) {}
12010struct AAUnderlyingObjectsArgument final : AAUnderlyingObjectsImpl {
12012 : AAUnderlyingObjectsImpl(IRP,
A) {}
12015struct AAUnderlyingObjectsCallSite final : AAUnderlyingObjectsImpl {
12017 : AAUnderlyingObjectsImpl(IRP,
A) {}
12020struct AAUnderlyingObjectsCallSiteArgument final : AAUnderlyingObjectsImpl {
12022 : AAUnderlyingObjectsImpl(IRP,
A) {}
12025struct AAUnderlyingObjectsReturned final : AAUnderlyingObjectsImpl {
12027 : AAUnderlyingObjectsImpl(IRP,
A) {}
12030struct AAUnderlyingObjectsCallSiteReturned final : AAUnderlyingObjectsImpl {
12032 : AAUnderlyingObjectsImpl(IRP,
A) {}
12035struct AAUnderlyingObjectsFunction final : AAUnderlyingObjectsImpl {
12037 : AAUnderlyingObjectsImpl(IRP,
A) {}
12052 Instruction *UInst = dyn_cast<Instruction>(
U.getUser());
12058 LLVM_DEBUG(
dbgs() <<
"[AAGlobalValueInfo] Check use: " << *
U.get() <<
" in "
12059 << *UInst <<
"\n");
12061 if (
auto *Cmp = dyn_cast<ICmpInst>(
U.getUser())) {
12062 int Idx = &
Cmp->getOperandUse(0) == &
U;
12063 if (isa<Constant>(
Cmp->getOperand(
Idx)))
12065 return U == &getAnchorValue();
12069 if (isa<ReturnInst>(UInst)) {
12071 Worklist.
push_back(ACS.getInstruction());
12074 bool UsedAssumedInformation =
false;
12076 if (!
A.checkForAllCallSites(CallSitePred, *UInst->
getFunction(),
12078 UsedAssumedInformation))
12085 auto *CB = dyn_cast<CallBase>(UInst);
12096 if (!Fn || !
A.isFunctionIPOAmendable(*Fn))
12105 unsigned NumUsesBefore =
Uses.size();
12111 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
12114 case UseCaptureKind::NO_CAPTURE:
12115 return checkUse(
A, U, Follow, Worklist);
12116 case UseCaptureKind::MAY_CAPTURE:
12117 return checkUse(
A, U, Follow, Worklist);
12118 case UseCaptureKind::PASSTHROUGH:
12124 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
12125 Uses.insert(&OldU);
12129 while (!Worklist.
empty()) {
12131 if (!Visited.
insert(V).second)
12133 if (!
A.checkForAllUses(UsePred, *
this, *V,
12135 DepClassTy::OPTIONAL,
12136 true, EquivalentUseCB)) {
12137 return indicatePessimisticFixpoint();
12141 return Uses.size() == NumUsesBefore ? ChangeStatus::UNCHANGED
12142 : ChangeStatus::CHANGED;
12145 bool isPotentialUse(
const Use &U)
const override {
12146 return !isValidState() ||
Uses.contains(&U);
12151 return ChangeStatus::UNCHANGED;
12155 const std::string getAsStr(
Attributor *
A)
const override {
12156 return "[" + std::to_string(
Uses.size()) +
" uses]";
12159 void trackStatistics()
const override {
12177 auto *MD = getCtxI()->getMetadata(LLVMContext::MD_callees);
12178 if (!MD && !
A.isClosedWorldModule())
12182 for (
const auto &
Op : MD->operands())
12183 if (
Function *Callee = mdconst::dyn_extract_or_null<Function>(
Op))
12184 PotentialCallees.insert(Callee);
12185 }
else if (
A.isClosedWorldModule()) {
12187 A.getInfoCache().getIndirectlyCallableFunctions(
A);
12188 PotentialCallees.insert(IndirectlyCallableFunctions.
begin(),
12189 IndirectlyCallableFunctions.
end());
12192 if (PotentialCallees.empty())
12193 indicateOptimisticFixpoint();
12197 CallBase *CB = cast<CallBase>(getCtxI());
12202 bool AllCalleesKnownNow = AllCalleesKnown;
12204 auto CheckPotentialCalleeUse = [&](
Function &PotentialCallee,
12205 bool &UsedAssumedInformation) {
12208 if (!GIAA || GIAA->isPotentialUse(CalleeUse))
12210 UsedAssumedInformation = !GIAA->isAtFixpoint();
12214 auto AddPotentialCallees = [&]() {
12215 for (
auto *PotentialCallee : PotentialCallees) {
12216 bool UsedAssumedInformation =
false;
12217 if (CheckPotentialCalleeUse(*PotentialCallee, UsedAssumedInformation))
12218 AssumedCalleesNow.
insert(PotentialCallee);
12224 bool UsedAssumedInformation =
false;
12227 AA::ValueScope::AnyScope,
12228 UsedAssumedInformation)) {
12229 if (PotentialCallees.empty())
12230 return indicatePessimisticFixpoint();
12231 AddPotentialCallees();
12236 auto CheckPotentialCallee = [&](
Function &Fn) {
12237 if (!PotentialCallees.empty() && !PotentialCallees.count(&Fn))
12240 auto &CachedResult = FilterResults[&Fn];
12241 if (CachedResult.has_value())
12242 return CachedResult.value();
12244 bool UsedAssumedInformation =
false;
12245 if (!CheckPotentialCalleeUse(Fn, UsedAssumedInformation)) {
12246 if (!UsedAssumedInformation)
12247 CachedResult =
false;
12256 for (
int I = NumCBArgs;
I < NumFnArgs; ++
I) {
12257 bool IsKnown =
false;
12258 if (AA::hasAssumedIRAttr<Attribute::NoUndef>(
12260 DepClassTy::OPTIONAL, IsKnown)) {
12262 CachedResult =
false;
12267 CachedResult =
true;
12273 for (
auto &VAC : Values) {
12274 if (isa<UndefValue>(
VAC.getValue()))
12276 if (isa<ConstantPointerNull>(
VAC.getValue()) &&
12277 VAC.getValue()->getType()->getPointerAddressSpace() == 0)
12280 if (
auto *VACFn = dyn_cast<Function>(
VAC.getValue())) {
12281 if (CheckPotentialCallee(*VACFn))
12282 AssumedCalleesNow.
insert(VACFn);
12285 if (!PotentialCallees.empty()) {
12286 AddPotentialCallees();
12289 AllCalleesKnownNow =
false;
12292 if (AssumedCalleesNow == AssumedCallees &&
12293 AllCalleesKnown == AllCalleesKnownNow)
12294 return ChangeStatus::UNCHANGED;
12296 std::swap(AssumedCallees, AssumedCalleesNow);
12297 AllCalleesKnown = AllCalleesKnownNow;
12298 return ChangeStatus::CHANGED;
12304 if (!AllCalleesKnown && AssumedCallees.empty())
12305 return ChangeStatus::UNCHANGED;
12307 CallBase *CB = cast<CallBase>(getCtxI());
12308 bool UsedAssumedInformation =
false;
12309 if (
A.isAssumedDead(*CB,
this,
nullptr,
12310 UsedAssumedInformation))
12311 return ChangeStatus::UNCHANGED;
12315 if (
FP->getType()->getPointerAddressSpace())
12326 if (AssumedCallees.empty()) {
12327 assert(AllCalleesKnown &&
12328 "Expected all callees to be known if there are none.");
12329 A.changeToUnreachableAfterManifest(CB);
12330 return ChangeStatus::CHANGED;
12334 if (AllCalleesKnown && AssumedCallees.size() == 1) {
12335 auto *NewCallee = AssumedCallees.front();
12338 return ChangeStatus::CHANGED;
12345 A.deleteAfterManifest(*CB);
12346 return ChangeStatus::CHANGED;
12356 bool SpecializedForAnyCallees =
false;
12357 bool SpecializedForAllCallees = AllCalleesKnown;
12361 for (
Function *NewCallee : AssumedCallees) {
12362 if (!
A.shouldSpecializeCallSiteForCallee(*
this, *CB, *NewCallee)) {
12363 SkippedAssumedCallees.
push_back(NewCallee);
12364 SpecializedForAllCallees =
false;
12367 SpecializedForAnyCallees =
true;
12373 A.registerManifestAddedBasicBlock(*ThenTI->
getParent());
12374 A.registerManifestAddedBasicBlock(*IP->getParent());
12375 auto *SplitTI = cast<BranchInst>(LastCmp->
getNextNode());
12380 A.registerManifestAddedBasicBlock(*ElseBB);
12382 SplitTI->replaceUsesOfWith(CBBB, ElseBB);
12390 auto *CBClone = cast<CallBase>(CB->
clone());
12391 CBClone->insertBefore(ThenTI);
12392 NewCall = &cast<CallInst>(
promoteCall(*CBClone, NewCallee, &RetBC));
12400 auto AttachCalleeMetadata = [&](
CallBase &IndirectCB) {
12401 if (!AllCalleesKnown)
12402 return ChangeStatus::UNCHANGED;
12403 MDBuilder MDB(IndirectCB.getContext());
12404 MDNode *Callees = MDB.createCallees(SkippedAssumedCallees);
12405 IndirectCB.setMetadata(LLVMContext::MD_callees, Callees);
12406 return ChangeStatus::CHANGED;
12409 if (!SpecializedForAnyCallees)
12410 return AttachCalleeMetadata(*CB);
12413 if (SpecializedForAllCallees) {
12417 IP->eraseFromParent();
12419 auto *CBClone = cast<CallInst>(CB->
clone());
12420 CBClone->setName(CB->
getName());
12421 CBClone->insertBefore(*IP->getParent(), IP);
12422 NewCalls.
push_back({CBClone,
nullptr});
12423 AttachCalleeMetadata(*CBClone);
12431 for (
auto &It : NewCalls) {
12433 Instruction *CallRet = It.second ? It.second : It.first;
12445 A.deleteAfterManifest(*CB);
12446 Changed = ChangeStatus::CHANGED;
12452 const std::string getAsStr(
Attributor *
A)
const override {
12453 return std::string(AllCalleesKnown ?
"eliminate" :
"specialize") +
12454 " indirect call site with " + std::to_string(AssumedCallees.size()) +
12458 void trackStatistics()
const override {
12459 if (AllCalleesKnown) {
12461 Eliminated, CallSites,
12462 "Number of indirect call sites eliminated via specialization")
12465 "Number of indirect call sites specialized")
12470 return isValidState() && AllCalleesKnown &&
all_of(AssumedCallees, CB);
12487 bool AllCalleesKnown =
true;
12498 assert(isValidState() &&
"the AA is invalid");
12499 return AssumedAddressSpace;
12504 assert(getAssociatedType()->isPtrOrPtrVectorTy() &&
12505 "Associated value is not a pointer");
12509 int32_t OldAddressSpace = AssumedAddressSpace;
12511 DepClassTy::REQUIRED);
12512 auto Pred = [&](
Value &Obj) {
12513 if (isa<UndefValue>(&Obj))
12518 if (!AUO->forallUnderlyingObjects(Pred))
12519 return indicatePessimisticFixpoint();
12521 return OldAddressSpace == AssumedAddressSpace ? ChangeStatus::UNCHANGED
12522 : ChangeStatus::CHANGED;
12527 Value *AssociatedValue = &getAssociatedValue();
12528 Value *OriginalValue = peelAddrspacecast(AssociatedValue);
12532 return ChangeStatus::UNCHANGED;
12534 Type *NewPtrTy = PointerType::get(getAssociatedType()->getContext(),
12536 bool UseOriginalValue =
12540 bool Changed =
false;
12544 if (UseOriginalValue) {
12545 A.changeUseAfterManifest(U, *OriginalValue);
12550 A.changeUseAfterManifest(U, *
CastInst);
12553 auto Pred = [&](
const Use &
U,
bool &) {
12554 if (
U.get() != AssociatedValue)
12556 auto *Inst = dyn_cast<Instruction>(
U.getUser());
12563 if (isa<LoadInst>(Inst))
12564 MakeChange(Inst,
const_cast<Use &
>(U));
12565 if (isa<StoreInst>(Inst)) {
12567 if (
U.getOperandNo() == 1)
12568 MakeChange(Inst,
const_cast<Use &
>(U));
12575 (void)
A.checkForAllUses(Pred, *
this, getAssociatedValue(),
12578 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
12582 const std::string getAsStr(
Attributor *
A)
const override {
12583 if (!isValidState())
12584 return "addrspace(<invalid>)";
12585 return "addrspace(" +
12586 (AssumedAddressSpace == NoAddressSpace
12588 : std::to_string(AssumedAddressSpace)) +
12593 int32_t AssumedAddressSpace = NoAddressSpace;
12595 bool takeAddressSpace(int32_t AS) {
12596 if (AssumedAddressSpace == NoAddressSpace) {
12597 AssumedAddressSpace = AS;
12600 return AssumedAddressSpace == AS;
12604 if (
auto *
I = dyn_cast<AddrSpaceCastInst>(V))
12605 return peelAddrspacecast(
I->getPointerOperand());
12606 if (
auto *
C = dyn_cast<ConstantExpr>(V))
12607 if (
C->getOpcode() == Instruction::AddrSpaceCast)
12608 return peelAddrspacecast(
C->getOperand(0));
12613struct AAAddressSpaceFloating final : AAAddressSpaceImpl {
12615 : AAAddressSpaceImpl(IRP,
A) {}
12617 void trackStatistics()
const override {
12622struct AAAddressSpaceReturned final : AAAddressSpaceImpl {
12624 : AAAddressSpaceImpl(IRP,
A) {}
12630 (void)indicatePessimisticFixpoint();
12633 void trackStatistics()
const override {
12638struct AAAddressSpaceCallSiteReturned final : AAAddressSpaceImpl {
12640 : AAAddressSpaceImpl(IRP,
A) {}
12642 void trackStatistics()
const override {
12647struct AAAddressSpaceArgument final : AAAddressSpaceImpl {
12649 : AAAddressSpaceImpl(IRP,
A) {}
12654struct AAAddressSpaceCallSiteArgument final : AAAddressSpaceImpl {
12656 : AAAddressSpaceImpl(IRP,
A) {}
12662 (void)indicatePessimisticFixpoint();
12665 void trackStatistics()
const override {
12677 std::optional<TypeSize> getAllocatedSize()
const override {
12678 assert(isValidState() &&
"the AA is invalid");
12679 return AssumedAllocatedSize;
12682 std::optional<TypeSize> findInitialAllocationSize(
Instruction *
I,
12686 switch (
I->getOpcode()) {
12687 case Instruction::Alloca: {
12692 return std::nullopt;
12702 if (!isa<AllocaInst>(
I))
12703 return indicatePessimisticFixpoint();
12705 bool IsKnownNoCapture;
12706 if (!AA::hasAssumedIRAttr<Attribute::NoCapture>(
12707 A,
this, IRP, DepClassTy::OPTIONAL, IsKnownNoCapture))
12708 return indicatePessimisticFixpoint();
12711 A.getOrCreateAAFor<
AAPointerInfo>(IRP, *
this, DepClassTy::REQUIRED);
12714 return indicatePessimisticFixpoint();
12717 return indicatePessimisticFixpoint();
12720 const auto AllocationSize = findInitialAllocationSize(
I,
DL);
12723 if (!AllocationSize)
12724 return indicatePessimisticFixpoint();
12728 if (*AllocationSize == 0)
12729 return indicatePessimisticFixpoint();
12735 return indicatePessimisticFixpoint();
12737 if (BinSize == 0) {
12738 auto NewAllocationSize = std::optional<TypeSize>(
TypeSize(0,
false));
12739 if (!changeAllocationSize(NewAllocationSize))
12740 return ChangeStatus::UNCHANGED;
12741 return ChangeStatus::CHANGED;
12745 const auto &It = PI->
begin();
12748 if (It->first.Offset != 0)
12749 return indicatePessimisticFixpoint();
12751 uint64_t SizeOfBin = It->first.Offset + It->first.Size;
12753 if (SizeOfBin >= *AllocationSize)
12754 return indicatePessimisticFixpoint();
12756 auto NewAllocationSize =
12757 std::optional<TypeSize>(
TypeSize(SizeOfBin * 8,
false));
12759 if (!changeAllocationSize(NewAllocationSize))
12760 return ChangeStatus::UNCHANGED;
12762 return ChangeStatus::CHANGED;
12768 assert(isValidState() &&
12769 "Manifest should only be called if the state is valid.");
12773 auto FixedAllocatedSizeInBits = getAllocatedSize()->getFixedValue();
12775 unsigned long NumBytesToAllocate = (FixedAllocatedSizeInBits + 7) / 8;
12777 switch (
I->getOpcode()) {
12779 case Instruction::Alloca: {
12785 auto *NumBytesToValue =
12786 ConstantInt::get(
I->getContext(),
APInt(32, NumBytesToAllocate));
12789 insertPt = std::next(insertPt);
12795 return ChangeStatus::CHANGED;
12803 return ChangeStatus::UNCHANGED;
12807 const std::string getAsStr(
Attributor *
A)
const override {
12808 if (!isValidState())
12809 return "allocationinfo(<invalid>)";
12810 return "allocationinfo(" +
12811 (AssumedAllocatedSize == HasNoAllocationSize
12813 : std::to_string(AssumedAllocatedSize->getFixedValue())) +
12818 std::optional<TypeSize> AssumedAllocatedSize = HasNoAllocationSize;
12822 bool changeAllocationSize(std::optional<TypeSize>
Size) {
12823 if (AssumedAllocatedSize == HasNoAllocationSize ||
12824 AssumedAllocatedSize !=
Size) {
12825 AssumedAllocatedSize =
Size;
12832struct AAAllocationInfoFloating : AAAllocationInfoImpl {
12834 : AAAllocationInfoImpl(IRP,
A) {}
12836 void trackStatistics()
const override {
12841struct AAAllocationInfoReturned : AAAllocationInfoImpl {
12843 : AAAllocationInfoImpl(IRP,
A) {}
12849 (void)indicatePessimisticFixpoint();
12852 void trackStatistics()
const override {
12857struct AAAllocationInfoCallSiteReturned : AAAllocationInfoImpl {
12859 : AAAllocationInfoImpl(IRP,
A) {}
12861 void trackStatistics()
const override {
12866struct AAAllocationInfoArgument : AAAllocationInfoImpl {
12868 : AAAllocationInfoImpl(IRP,
A) {}
12870 void trackStatistics()
const override {
12875struct AAAllocationInfoCallSiteArgument : AAAllocationInfoImpl {
12877 : AAAllocationInfoImpl(IRP,
A) {}
12882 (void)indicatePessimisticFixpoint();
12885 void trackStatistics()
const override {
12932#define SWITCH_PK_INV(CLASS, PK, POS_NAME) \
12933 case IRPosition::PK: \
12934 llvm_unreachable("Cannot create " #CLASS " for a " POS_NAME " position!");
12936#define SWITCH_PK_CREATE(CLASS, IRP, PK, SUFFIX) \
12937 case IRPosition::PK: \
12938 AA = new (A.Allocator) CLASS##SUFFIX(IRP, A); \
12942#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
12943 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
12944 CLASS *AA = nullptr; \
12945 switch (IRP.getPositionKind()) { \
12946 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
12947 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
12948 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
12949 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
12950 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
12951 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
12952 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
12953 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
12958#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
12959 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
12960 CLASS *AA = nullptr; \
12961 switch (IRP.getPositionKind()) { \
12962 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
12963 SWITCH_PK_INV(CLASS, IRP_FUNCTION, "function") \
12964 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
12965 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
12966 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
12967 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
12968 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
12969 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
12974#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS) \
12975 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
12976 CLASS *AA = nullptr; \
12977 switch (IRP.getPositionKind()) { \
12978 SWITCH_PK_CREATE(CLASS, IRP, POS, SUFFIX) \
12980 llvm_unreachable("Cannot create " #CLASS " for position otherthan " #POS \
12986#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
12987 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
12988 CLASS *AA = nullptr; \
12989 switch (IRP.getPositionKind()) { \
12990 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
12991 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
12992 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
12993 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
12994 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
12995 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
12996 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
12997 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13002#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13003 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13004 CLASS *AA = nullptr; \
13005 switch (IRP.getPositionKind()) { \
13006 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13007 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
13008 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
13009 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13010 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
13011 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13012 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13013 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13018#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13019 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13020 CLASS *AA = nullptr; \
13021 switch (IRP.getPositionKind()) { \
13022 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13023 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13024 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13025 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13026 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13027 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13028 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13029 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13079#undef CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION
13080#undef CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION
13081#undef CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION
13082#undef CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION
13083#undef CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION
13084#undef CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION
13085#undef SWITCH_PK_CREATE
13086#undef SWITCH_PK_INV
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
amdgpu AMDGPU Register Bank Select
This file implements a class to represent arbitrary precision integral constant values and operations...
ReachingDefAnalysis InstSet & ToRemove
This file contains the simple types necessary to represent the attributes associated with functions a...
#define STATS_DECLTRACK(NAME, TYPE, MSG)
static std::optional< Constant * > askForAssumedConstant(Attributor &A, const AbstractAttribute &QueryingAA, const IRPosition &IRP, Type &Ty)
static cl::opt< unsigned, true > MaxPotentialValues("attributor-max-potential-values", cl::Hidden, cl::desc("Maximum number of potential values to be " "tracked for each position."), cl::location(llvm::PotentialConstantIntValuesState::MaxPotentialValues), cl::init(7))
static const Value * getPointerOperand(const Instruction *I, bool AllowVolatile)
Get pointer operand of memory accessing instruction.
static void clampReturnedValueStates(Attributor &A, const AAType &QueryingAA, StateType &S, const IRPosition::CallBaseContext *CBContext=nullptr)
Clamp the information known for all returned values of a function (identified by QueryingAA) into S.
#define STATS_DECLTRACK_FN_ATTR(NAME)
#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxPotentialValuesIterations("attributor-max-potential-values-iterations", cl::Hidden, cl::desc("Maximum number of iterations we keep dismantling potential values."), cl::init(64))
#define STATS_DECLTRACK_CS_ATTR(NAME)
#define PIPE_OPERATOR(CLASS)
static bool mayBeInCycle(const CycleInfo *CI, const Instruction *I, bool HeaderOnly, Cycle **CPtr=nullptr)
#define STATS_DECLTRACK_ARG_ATTR(NAME)
static const Value * stripAndAccumulateOffsets(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Val, const DataLayout &DL, APInt &Offset, bool GetMinOffset, bool AllowNonInbounds, bool UseAssumed=false)
#define STATS_DECLTRACK_CSRET_ATTR(NAME)
static cl::opt< bool > ManifestInternal("attributor-manifest-internal", cl::Hidden, cl::desc("Manifest Attributor internal string attributes."), cl::init(false))
static Value * constructPointer(Value *Ptr, int64_t Offset, IRBuilder< NoFolder > &IRB)
Helper function to create a pointer based on Ptr, and advanced by Offset bytes.
#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define BUILD_STAT_NAME(NAME, TYPE)
static bool isDenselyPacked(Type *Ty, const DataLayout &DL)
Checks if a type could have padding bytes.
#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static const Value * getMinimalBaseOfPointer(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Ptr, int64_t &BytesOffset, const DataLayout &DL, bool AllowNonInbounds=false)
#define STATS_DECLTRACK_FNRET_ATTR(NAME)
#define STATS_DECLTRACK_CSARG_ATTR(NAME)
#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS)
#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxHeapToStackSize("max-heap-to-stack-size", cl::init(128), cl::Hidden)
#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define STATS_DECLTRACK_FLOATING_ATTR(NAME)
#define STATS_DECL(NAME, TYPE, MSG)
BlockVerifier::State From
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file declares an analysis pass that computes CycleInfo for LLVM IR, specialized from GenericCycl...
Returns the sub type a function will return at a given Idx Should correspond to the result type of an ExtractValue instruction executed with just that one unsigned Idx
Given that RA is a live propagate it s liveness to any other values it uses(according to Uses). void DeadArgumentEliminationPass
Performs the initial survey of the specified function
Given that RA is a live value
This file defines DenseMapInfo traits for DenseMap.
Rewrite Partial Register Uses
static LoopDeletionResult merge(LoopDeletionResult A, LoopDeletionResult B)
This file implements a map that provides insertion order iteration.
static StringRef getName(Value *V)
static cl::opt< RegAllocEvictionAdvisorAnalysis::AdvisorMode > Mode("regalloc-enable-advisor", cl::Hidden, cl::init(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Default), cl::desc("Enable regalloc advisor mode"), cl::values(clEnumValN(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Default, "default", "Default"), clEnumValN(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Release, "release", "precompiled"), clEnumValN(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Development, "development", "for training")))
This builds on the llvm/ADT/GraphTraits.h file to find the strongly connected components (SCCs) of a ...
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
APInt gcd(const SCEVConstant *C1, const SCEVConstant *C2)
This file defines generic set operations that may be used on set's of different types,...
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static SymbolRef::Type getType(const Symbol *Sym)
static void initialize(TargetLibraryInfoImpl &TLI, const Triple &T, ArrayRef< StringLiteral > StandardNames)
Initialize the set of available library functions based on the specified target triple.
static unsigned getBitWidth(Type *Ty, const DataLayout &DL)
Returns the bitwidth of the given scalar or pointer type.
static unsigned getSize(unsigned Kind)
AACallGraphNode * operator*() const
A manager for alias analyses.
bool isNoAlias(const MemoryLocation &LocA, const MemoryLocation &LocB)
A trivial helper function to check to see if the specified pointers are no-alias.
Class for arbitrary precision integers.
int64_t getSExtValue() const
Get sign extended value.
CallBase * getInstruction() const
Return the underlying instruction.
bool isCallbackCall() const
Return true if this ACS represents a callback call.
bool isDirectCall() const
Return true if this ACS represents a direct call.
static void getCallbackUses(const CallBase &CB, SmallVectorImpl< const Use * > &CallbackUses)
Add operand uses of CB that represent callback uses into CallbackUses.
int getCallArgOperandNo(Argument &Arg) const
Return the operand index of the underlying instruction associated with Arg.
This class represents a conversion between pointers from one address space to another.
an instruction to allocate memory on the stack
Align getAlign() const
Return the alignment of the memory that is being allocated by the instruction.
unsigned getAddressSpace() const
Return the address space for the allocation.
std::optional< TypeSize > getAllocationSize(const DataLayout &DL) const
Get allocation size in bytes.
This class represents an incoming formal argument to a Function.
bool hasPointeeInMemoryValueAttr() const
Return true if this argument has the byval, sret, inalloca, preallocated, or byref attribute.
bool hasReturnedAttr() const
Return true if this argument has the returned attribute.
bool hasByValAttr() const
Return true if this argument has the byval attribute.
const Function * getParent() const
unsigned getArgNo() const
Return the index of this formal argument in its containing function.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
A function analysis which provides an AssumptionCache.
A cache of @llvm.assume calls within a function.
static Attribute get(LLVMContext &Context, AttrKind Kind, uint64_t Val=0)
Return a uniquified Attribute object.
static Attribute getWithDereferenceableBytes(LLVMContext &Context, uint64_t Bytes)
FPClassTest getNoFPClass() const
Return the FPClassTest for nofpclass.
Attribute::AttrKind getKindAsEnum() const
Return the attribute's kind as an enum (Attribute::AttrKind).
MemoryEffects getMemoryEffects() const
Returns memory effects.
static Attribute getWithDereferenceableOrNullBytes(LLVMContext &Context, uint64_t Bytes)
static Attribute getWithNoFPClass(LLVMContext &Context, FPClassTest Mask)
AttrKind
This enumeration lists the attributes that can be associated with parameters, function results,...
static bool isEnumAttrKind(AttrKind Kind)
static Attribute getWithMemoryEffects(LLVMContext &Context, MemoryEffects ME)
static Attribute getWithAlignment(LLVMContext &Context, Align Alignment)
Return a uniquified Attribute object that has the specific alignment set.
LLVM Basic Block Representation.
const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
const Instruction & front() const
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
const Function * getParent() const
Return the enclosing method, or null if none.
InstListType::iterator iterator
Instruction iterators...
LLVMContext & getContext() const
Get the context in which this basic block lives.
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
BinaryOps getOpcode() const
Conditional or Unconditional Branch instruction.
static BranchInst * Create(BasicBlock *IfTrue, BasicBlock::iterator InsertBefore)
unsigned getNumSuccessors() const
BasicBlock * getSuccessor(unsigned i) const
Value * getCondition() const
Allocate memory in an ever growing pool, as if by bump-pointer.
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
User::op_iterator arg_begin()
Return the iterator pointing to the beginning of the argument list.
bool isMustTailCall() const
Tests if this call site must be tail call optimized.
bool isIndirectCall() const
Return true if the callsite is an indirect call.
bool isCallee(Value::const_user_iterator UI) const
Determine whether the passed iterator points to the callee operand's Use.
Value * getCalledOperand() const
const Use & getCalledOperandUse() const
const Use & getArgOperandUse(unsigned i) const
Wrappers for getting the Use of a call argument.
Value * getArgOperand(unsigned i) const
User::op_iterator arg_end()
Return the iterator pointing to the end of the argument list.
bool isBundleOperand(unsigned Idx) const
Return true if the operand at index Idx is a bundle operand.
bool isConvergent() const
Determine if the invoke is convergent.
FunctionType * getFunctionType() const
Intrinsic::ID getIntrinsicID() const
Returns the intrinsic ID of the intrinsic called or Intrinsic::not_intrinsic if the called function i...
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
unsigned getArgOperandNo(const Use *U) const
Given a use for a arg operand, get the arg operand number that corresponds to it.
unsigned arg_size() const
bool isArgOperand(const Use *U) const
Function * getCaller()
Helper to get the caller (the parent function).
This class represents a function call, abstracting a target machine's calling convention.
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr, BasicBlock::iterator InsertBefore)
This is the base class for all instructions that perform data casts.
Instruction::CastOps getOpcode() const
Return the opcode of this CastInst.
bool isIntegerCast() const
There are several places where we need to know if a cast instruction only deals with integer source a...
Type * getDestTy() const
Return the destination type, as a convenience.
This class is the base class for the comparison instructions.
bool isEquality() const
Determine if this is an equals/not equals predicate.
bool isFalseWhenEqual() const
This is just a convenience.
Predicate
This enumeration lists the possible predicates for CmpInst subclasses.
bool isTrueWhenEqual() const
This is just a convenience.
Predicate getPredicate() const
Return the predicate for this instruction.
A constant value that is initialized with an expression using other constant values.
static Constant * getExtractElement(Constant *Vec, Constant *Idx, Type *OnlyIfReducedTy=nullptr)
This is the shared class of boolean and integer constants.
static ConstantInt * getTrue(LLVMContext &Context)
This class represents a range of values.
const APInt & getLower() const
Return the lower value for this range.
bool isFullSet() const
Return true if this set contains all of the elements possible for this data-type.
bool isEmptySet() const
Return true if this set contains no members.
bool isSingleElement() const
Return true if this set contains exactly one member.
static ConstantRange makeAllowedICmpRegion(CmpInst::Predicate Pred, const ConstantRange &Other)
Produce the smallest range such that all values that may satisfy the given predicate with any value c...
const APInt & getUpper() const
Return the upper value for this range.
This is an important base class in LLVM.
Analysis pass which computes a CycleInfo.
This class represents an Operation in the Expression.
A parsed version of the target data layout string in and methods for querying it.
iterator find(const_arg_type_t< KeyT > Val)
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Implements a dense probed hash-table based set.
Analysis pass which computes a DominatorTree.
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
An instruction for ordering other memory operations.
A handy container for a FunctionType+Callee-pointer pair, which can be passed around as a single enti...
const BasicBlock & getEntryBlock() const
iterator_range< arg_iterator > args()
const Function & getFunction() const
Argument * getArg(unsigned i) const
bool hasFnAttribute(Attribute::AttrKind Kind) const
Return true if the function has the attribute.
CycleT * getCycle(const BlockT *Block) const
Find the innermost cycle containing a given block.
A possibly irreducible generalization of a Loop.
bool isDeclaration() const
Return true if the primary definition of this global value is outside of the current translation unit...
bool hasLocalLinkage() const
This instruction compares its operands according to the predicate given to the constructor.
static bool compare(const APInt &LHS, const APInt &RHS, ICmpInst::Predicate Pred)
Return result of LHS Pred RHS comparison.
Value * CreatePtrAdd(Value *Ptr, Value *Offset, const Twine &Name="", bool IsInBounds=false)
ConstantInt * getInt64(uint64_t C)
Get a constant 64-bit value.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
Instruction * clone() const
Create a copy of 'this' instruction that is identical in all ways except the following:
bool isLifetimeStartOrEnd() const LLVM_READONLY
Return true if the instruction is a llvm.lifetime.start or llvm.lifetime.end marker.
bool mayReadOrWriteMemory() const
Return true if this instruction may read or write memory.
bool mayWriteToMemory() const LLVM_READONLY
Return true if this instruction may modify memory.
void insertBefore(Instruction *InsertPos)
Insert an unlinked instruction into a basic block immediately before the specified instruction.
const Module * getModule() const
Return the module owning the function this instruction belongs to or nullptr it the function does not...
const BasicBlock * getParent() const
InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
const Function * getFunction() const
Return the function this instruction belongs to.
BasicBlock * getSuccessor(unsigned Idx) const LLVM_READONLY
Return the specified successor. This instruction must be a terminator.
bool mayHaveSideEffects() const LLVM_READONLY
Return true if the instruction may have side effects.
bool isTerminator() const
bool mayReadFromMemory() const LLVM_READONLY
Return true if this instruction may read memory.
const Instruction * getNextNonDebugInstruction(bool SkipPseudoOp=false) const
Return a pointer to the next non-debug instruction in the same basic block as 'this',...
unsigned getOpcode() const
Returns a member of one of the enums like Instruction::Add.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
A wrapper class for inspecting calls to intrinsic functions.
BasicBlock * getUnwindDest() const
BasicBlock * getNormalDest() const
This is an important class for using LLVM in a threaded context.
Analysis to compute lazy value information.
This pass computes, caches, and vends lazy value constraint information.
ConstantRange getConstantRange(Value *V, Instruction *CxtI, bool UndefAllowed)
Return the ConstantRange constraint that is known to hold for the specified value at the specified in...
An instruction for reading from memory.
Analysis pass that exposes the LoopInfo for a function.
LoopT * getLoopFor(const BlockT *BB) const
Return the inner most loop that BB lives in.
const MDOperand & getOperand(unsigned I) const
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
unsigned getNumOperands() const
Return number of MDNode operands.
This class implements a map that also provides access to all stored values in a deterministic order.
static MemoryEffectsBase readOnly()
Create MemoryEffectsBase that can read any memory.
bool doesNotAccessMemory() const
Whether this function accesses no memory.
static MemoryEffectsBase argMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
Create MemoryEffectsBase that can only access argument memory.
static MemoryEffectsBase inaccessibleMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
Create MemoryEffectsBase that can only access inaccessible memory.
bool onlyAccessesInaccessibleMem() const
Whether this function only (at most) accesses inaccessible memory.
ModRefInfo getModRef(Location Loc) const
Get ModRefInfo for the given Location.
bool onlyAccessesArgPointees() const
Whether this function only (at most) accesses argument memory.
bool onlyReadsMemory() const
Whether this function only (at most) reads memory.
static MemoryEffectsBase writeOnly()
Create MemoryEffectsBase that can write any memory.
static MemoryEffectsBase inaccessibleOrArgMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
Create MemoryEffectsBase that can only access inaccessible or argument memory.
static MemoryEffectsBase none()
Create MemoryEffectsBase that cannot read or write any memory.
bool onlyAccessesInaccessibleOrArgMem() const
Whether this function only (at most) accesses argument and inaccessible memory.
static MemoryEffectsBase unknown()
Create MemoryEffectsBase that can read and write any memory.
static std::optional< MemoryLocation > getOrNone(const Instruction *Inst)
A Module instance is used to store all the information related to an LLVM module.
const DataLayout & getDataLayout() const
Get the data layout for the module's target platform.
Evaluate the size and offset of an object pointed to by a Value*.
static SizeOffsetValue unknown()
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr, BasicBlock::iterator InsertBefore)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
static PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
Return a value (possibly void), from a function.
Value * getReturnValue() const
Convenience accessor. Returns null if there is no return value.
This class represents an analyzed expression in the program.
Analysis pass that exposes the ScalarEvolution for a function.
The main scalar evolution driver.
const SCEV * getSCEVAtScope(const SCEV *S, const Loop *L)
Return a SCEV expression for the specified value at the specified scope in the program.
const SCEV * getSCEV(Value *V)
Return a SCEV expression for the full generality of the specified expression.
unsigned getSmallConstantMaxTripCount(const Loop *L)
Returns the upper bound of the loop trip count as a normal unsigned value.
ConstantRange getUnsignedRange(const SCEV *S)
Determine the unsigned range for a particular SCEV.
This class represents the LLVM 'select' instruction.
A vector that has set insertion semantics.
size_type size() const
Determine the number of elements in the SetVector.
bool insert(const value_type &X)
Insert a new element into the SetVector.
bool erase(PtrType Ptr)
erase - If the set contains the specified pointer, remove it and return true, otherwise return false.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
A SetVector that performs no allocations if smaller than a certain size.
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void append(ItTy in_start, ItTy in_end)
Add the specified range to the end of the SmallVector.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
An instruction for storing to memory.
StringRef - Represent a constant reference to a string, i.e.
Used to lazily calculate structure layout information for a target machine, based on the DataLayout s...
TypeSize getElementOffset(unsigned Idx) const
TypeSize getElementOffsetInBits(unsigned Idx) const
Class to represent struct types.
unsigned getNumElements() const
Random access to the elements.
Type * getElementType(unsigned N) const
Analysis pass providing the TargetTransformInfo.
Provides information about what library functions are available for the current target.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
unsigned getIntegerBitWidth() const
bool isPointerTy() const
True if this is an instance of PointerType.
static IntegerType * getInt1Ty(LLVMContext &C)
unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
bool isSized(SmallPtrSetImpl< Type * > *Visited=nullptr) const
Return true if it makes sense to take the size of this type.
static IntegerType * getInt8Ty(LLVMContext &C)
bool isPtrOrPtrVectorTy() const
Return true if this is a pointer type or a vector of pointer types.
static IntegerType * getInt32Ty(LLVMContext &C)
bool isIntegerTy() const
True if this is an instance of IntegerType.
bool isFPOrFPVectorTy() const
Return true if this is a FP type or a vector of FP.
bool isVoidTy() const
Return true if this is 'void'.
'undef' values are things that do not have specified contents.
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
This function has undefined behavior.
A Use represents the edge between a Value definition and its users.
User * getUser() const
Returns the User that contains this Use.
bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
const Use & getOperandUse(unsigned i) const
Value * getOperand(unsigned i) const
unsigned getNumOperands() const
bool isDroppable() const
A droppable user is a user for which uses can be dropped without affecting correctness and should be ...
ValueT lookup(const KeyT &Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
static constexpr uint64_t MaximumAlignment
const Value * stripAndAccumulateConstantOffsets(const DataLayout &DL, APInt &Offset, bool AllowNonInbounds, bool AllowInvariantGroup=false, function_ref< bool(Value &Value, APInt &Offset)> ExternalAnalysis=nullptr) const
Accumulate the constant offset this value has compared to a base pointer.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
LLVMContext & getContext() const
All values hold a context through their type.
iterator_range< use_iterator > uses()
StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
constexpr ScalarTy getFixedValue() const
constexpr bool isScalable() const
Returns whether the quantity is scaled by a runtime quantity (vscale).
An efficient, type-erasing, non-owning reference to a callable.
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
A raw_ostream that writes to an std::string.
Enumerate the SCCs of a directed graph in reverse topological order of the SCC DAG.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
bool isAssumedReadNone(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readnone.
bool isAssumedReadOnly(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readonly.
raw_ostream & operator<<(raw_ostream &OS, const RangeTy &R)
std::optional< Value * > combineOptionalValuesInAAValueLatice(const std::optional< Value * > &A, const std::optional< Value * > &B, Type *Ty)
Return the combination of A and B such that the result is a possible value of both.
bool isValidAtPosition(const ValueAndContext &VAC, InformationCache &InfoCache)
Return true if the value of VAC is a valid at the position of VAC, that is a constant,...
bool isAssumedThreadLocalObject(Attributor &A, Value &Obj, const AbstractAttribute &QueryingAA)
Return true if Obj is assumed to be a thread local object.
bool isDynamicallyUnique(Attributor &A, const AbstractAttribute &QueryingAA, const Value &V, bool ForAnalysisOnly=true)
Return true if V is dynamically unique, that is, there are no two "instances" of V at runtime with di...
bool getPotentialCopiesOfStoredValue(Attributor &A, StoreInst &SI, SmallSetVector< Value *, 4 > &PotentialCopies, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values of the one stored by SI into PotentialCopies.
bool isGPU(const Module &M)
Return true iff M target a GPU (and we can use GPU AS reasoning).
ValueScope
Flags to distinguish intra-procedural queries from potentially inter-procedural queries.
bool isValidInScope(const Value &V, const Function *Scope)
Return true if V is a valid value in Scope, that is a constant or an instruction/argument of Scope.
bool isPotentiallyReachable(Attributor &A, const Instruction &FromI, const Instruction &ToI, const AbstractAttribute &QueryingAA, const AA::InstExclusionSetTy *ExclusionSet=nullptr, std::function< bool(const Function &F)> GoBackwardsCB=nullptr)
Return true if ToI is potentially reachable from FromI without running into any instruction in Exclus...
bool isNoSyncInst(Attributor &A, const Instruction &I, const AbstractAttribute &QueryingAA)
Return true if I is a nosync instruction.
bool getPotentiallyLoadedValues(Attributor &A, LoadInst &LI, SmallSetVector< Value *, 4 > &PotentialValues, SmallSetVector< Instruction *, 4 > &PotentialValueOrigins, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values LI could read into PotentialValues.
Value * getWithType(Value &V, Type &Ty)
Try to convert V to type Ty without introducing new instructions.
constexpr char Attrs[]
Key for Kernel::Metadata::mAttrs.
AddressSpace getAddressSpace(T *V)
@ C
The default llvm calling convention, compatible with C.
@ Unsupported
This operation is completely unsupported on the target.
@ Undef
Value of the register doesn't matter.
@ SingleThread
Synchronized with respect to signal handlers executing in the same thread.
@ CE
Windows NT (Windows on ARM)
initializer< Ty > init(const Ty &Val)
LocationClass< Ty > location(Ty &L)
static unsigned combineHashValue(unsigned a, unsigned b)
Simplistic combination of 32-bit hash values into 32-bit hash values.
ElementType
The element type of an SRV or UAV resource.
Scope
Defines the scope in which this symbol should be visible: Default – Visible in the public interface o...
const_iterator begin(StringRef path, Style style=Style::native)
Get begin iterator over path.
const_iterator end(StringRef path)
Get end iterator over path.
This is an optimization pass for GlobalISel generic memory operations.
bool operator<(int64_t V1, const APSInt &V2)
bool isKnownNonZero(const Value *V, const DataLayout &DL, unsigned Depth=0, AssumptionCache *AC=nullptr, const Instruction *CxtI=nullptr, const DominatorTree *DT=nullptr, bool UseInstrInfo=true)
Return true if the given value is known to be non-zero when defined.
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
bool isLegalToPromote(const CallBase &CB, Function *Callee, const char **FailureReason=nullptr)
Return true if the given indirect call site can be made to call Callee.
Constant * getInitialValueOfAllocation(const Value *V, const TargetLibraryInfo *TLI, Type *Ty)
If this is a call to an allocation function that initializes memory to a fixed value,...
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
unsigned getPointerAddressSpace(const Type *T)
auto successors(const MachineBasicBlock *BB)
bool isRemovableAlloc(const CallBase *V, const TargetLibraryInfo *TLI)
Return true if this is a call to an allocation function that does not have side effects that we are r...
APFloat abs(APFloat X)
Returns the absolute value of the argument.
raw_fd_ostream & outs()
This returns a reference to a raw_fd_ostream for standard output.
bool operator!=(uint64_t V1, const APInt &V2)
UseCaptureKind DetermineUseCaptureKind(const Use &U, llvm::function_ref< bool(Value *, const DataLayout &)> IsDereferenceableOrNull)
Determine what kind of capture behaviour U may exhibit.
Value * getAllocAlignment(const CallBase *V, const TargetLibraryInfo *TLI)
Gets the alignment argument for an aligned_alloc-like function, using either built-in knowledge based...
Value * simplifyInstructionWithOperands(Instruction *I, ArrayRef< Value * > NewOps, const SimplifyQuery &Q)
Like simplifyInstruction but the operands of I are replaced with NewOps.
Value * GetPointerBaseWithConstantOffset(Value *Ptr, int64_t &Offset, const DataLayout &DL, bool AllowNonInbounds=true)
Analyze the specified pointer to see if it can be expressed as a base pointer plus a constant offset.
const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=6)
This method strips off any GEP address adjustments and pointer casts from the specified value,...
scc_iterator< T > scc_begin(const T &G)
Construct the begin iterator for a deduced graph type T.
PotentialValuesState< std::pair< AA::ValueAndContext, AA::ValueScope > > PotentialLLVMValuesState
bool isNoAliasCall(const Value *V)
Return true if this pointer is returned by a noalias function.
raw_ostream & WriteGraph(raw_ostream &O, const GraphType &G, bool ShortNames=false, const Twine &Title="")
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
ConstantRange getConstantRangeFromMetadata(const MDNode &RangeMD)
Parse out a conservative ConstantRange from !range metadata.
const Value * getPointerOperand(const Value *V)
A helper function that returns the pointer operand of a load, store or GEP instruction.
Value * simplifyInstruction(Instruction *I, const SimplifyQuery &Q)
See if we can compute a simplified version of this instruction.
Interval::pred_iterator pred_end(Interval *I)
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
unsigned Log2_32(uint32_t Value)
Return the floor log base 2 of the specified value, -1 if the value is zero.
constexpr bool isPowerOf2_32(uint32_t Value)
Return true if the argument is a power of two > 0.
void sort(IteratorTy Start, IteratorTy End)
MemoryEffectsBase< IRMemLocation > MemoryEffects
Summary of how a function affects memory in the program.
bool NullPointerIsDefined(const Function *F, unsigned AS=0)
Check whether null pointer dereferencing is considered undefined behavior for a given function or an ...
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool isPointerTy(const Type *T)
Interval::pred_iterator pred_begin(Interval *I)
pred_begin/pred_end - define methods so that Intervals may be used just like BasicBlocks can with the...
bool wouldInstructionBeTriviallyDead(const Instruction *I, const TargetLibraryInfo *TLI=nullptr)
Return true if the result produced by the instruction would have no side effects if it was not used.
bool set_union(S1Ty &S1, const S2Ty &S2)
set_union(A, B) - Compute A := A u B, return whether A changed.
void RemapInstruction(Instruction *I, ValueToValueMapTy &VM, RemapFlags Flags=RF_None, ValueMapTypeRemapper *TypeMapper=nullptr, ValueMaterializer *Materializer=nullptr)
Convert the instruction operands from referencing the current values into those specified by VM.
CallBase & promoteCall(CallBase &CB, Function *Callee, CastInst **RetBitCast=nullptr)
Promote the given indirect call site to unconditionally call Callee.
bool hasAssumption(const Function &F, const KnownAssumptionString &AssumptionStr)
Return true if F has the assumption AssumptionStr attached.
RetainedKnowledge getKnowledgeFromUse(const Use *U, ArrayRef< Attribute::AttrKind > AttrKinds)
Return a valid Knowledge associated to the Use U if its Attribute kind is in AttrKinds.
AtomicOrdering
Atomic ordering for LLVM's memory model.
ChangeStatus clampStateAndIndicateChange< DerefState >(DerefState &S, const DerefState &R)
Value * simplifyCmpInst(unsigned Predicate, Value *LHS, Value *RHS, const SimplifyQuery &Q)
Given operands for a CmpInst, fold the result or return null.
PotentialValuesState< APInt > PotentialConstantIntValuesState
DWARFExpression::Operation Op
bool isGuaranteedNotToBeUndefOrPoison(const Value *V, AssumptionCache *AC=nullptr, const Instruction *CtxI=nullptr, const DominatorTree *DT=nullptr, unsigned Depth=0)
Return true if this function can prove that V does not have undef bits and is never poison.
Value * getFreedOperand(const CallBase *CB, const TargetLibraryInfo *TLI)
If this if a call to a free function, return the freed operand.
ChangeStatus clampStateAndIndicateChange(StateType &S, const StateType &R)
Helper function to clamp a state S of type StateType with the information in R and indicate/return if...
bool isSafeToSpeculativelyExecute(const Instruction *I, const Instruction *CtxI=nullptr, AssumptionCache *AC=nullptr, const DominatorTree *DT=nullptr, const TargetLibraryInfo *TLI=nullptr)
Return true if the instruction does not have any effects besides calculating the result and does not ...
constexpr unsigned BitWidth
std::optional< APInt > getAllocSize(const CallBase *CB, const TargetLibraryInfo *TLI, function_ref< const Value *(const Value *)> Mapper=[](const Value *V) { return V;})
Return the size of the requested allocation.
DenseSet< StringRef > getAssumptions(const Function &F)
Return the set of all assumptions for the function F.
Align assumeAligned(uint64_t Value)
Treats the value 0 as a 1, so Align is always at least 1.
Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
@ OPTIONAL
The target may be valid if the source is not.
@ NONE
Do not track a dependence between source and target.
@ REQUIRED
The target cannot be valid if the source is not.
bool mayContainIrreducibleControl(const Function &F, const LoopInfo *LI)
T bit_floor(T Value)
Returns the largest integral power of two no greater than Value if Value is nonzero.
KnownFPClass computeKnownFPClass(const Value *V, const APInt &DemandedElts, FPClassTest InterestedClasses, unsigned Depth, const SimplifyQuery &SQ)
Determine which floating-point classes are valid for V, and return them in KnownFPClass bit sets.
bool isIdentifiedObject(const Value *V)
Return true if this pointer refers to a distinct and identifiable object.
constexpr StringRef AssumptionAttrKey
The key we use for assumption attributes.
Implement std::hash so that hash_code can be used in STL containers.
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
A type to track pointer/struct usage and accesses for AAPointerInfo.
AAPointerInfo::const_bin_iterator end() const
ChangeStatus addAccess(Attributor &A, const AAPointerInfo::RangeList &Ranges, Instruction &I, std::optional< Value * > Content, AAPointerInfo::AccessKind Kind, Type *Ty, Instruction *RemoteI=nullptr)
Add a new Access to the state at offset Offset and with size Size.
DenseMap< const Instruction *, SmallVector< unsigned > > RemoteIMap
AAPointerInfo::const_bin_iterator begin() const
bool forallInterferingAccesses(Instruction &I, function_ref< bool(const AAPointerInfo::Access &, bool)> CB, AA::RangeTy &Range) const
See AAPointerInfo::forallInterferingAccesses.
State & operator=(State &&R)
State(State &&SIS)=default
const AAPointerInfo::Access & getAccess(unsigned Index) const
bool forallInterferingAccesses(AA::RangeTy Range, function_ref< bool(const AAPointerInfo::Access &, bool)> CB) const
See AAPointerInfo::forallInterferingAccesses.
SmallVector< AAPointerInfo::Access > AccessList
bool isAtFixpoint() const override
See AbstractState::isAtFixpoint().
static State getWorstState(const State &SIS)
Return the worst possible representable state.
int64_t numOffsetBins() const
AAPointerInfo::OffsetBinsTy OffsetBins
ChangeStatus indicateOptimisticFixpoint() override
See AbstractState::indicateOptimisticFixpoint().
State & operator=(const State &R)
ChangeStatus indicatePessimisticFixpoint() override
See AbstractState::indicatePessimisticFixpoint().
const State & getAssumed() const
static State getBestState(const State &SIS)
Return the best possible representable state.
bool isValidState() const override
See AbstractState::isValidState().
----------------—AAIntraFnReachability Attribute-----------------------—
ReachabilityQueryInfo(const ReachabilityQueryInfo &RQI)
unsigned Hash
Precomputed hash for this RQI.
ReachabilityQueryInfo(const Instruction *From, const ToTy *To)
ReachabilityQueryInfo(Attributor &A, const Instruction &From, const ToTy &To, const AA::InstExclusionSetTy *ES, bool MakeUnique)
Constructor replacement to ensure unique and stable sets are used for the cache.
unsigned computeHashValue() const
An abstract interface for address space information.
static const char ID
Unique ID (due to the unique address)
An abstract interface for all align attributes.
static const char ID
Unique ID (due to the unique address)
Align getKnownAlign() const
Return known alignment.
An abstract attribute for getting assumption information.
static const char ID
Unique ID (due to the unique address)
An abstract state for querying live call edges.
static const char ID
Unique ID (due to the unique address)
An abstract Attribute for specializing "dynamic" components of "denormal-fp-math" and "denormal-fp-ma...
static const char ID
Unique ID (due to the unique address)
An abstract interface for all dereferenceable attribute.
uint32_t getKnownDereferenceableBytes() const
Return known dereferenceable bytes.
uint32_t getAssumedDereferenceableBytes() const
Return assumed dereferenceable bytes.
static const char ID
Unique ID (due to the unique address)
An abstract interface for llvm::GlobalValue information interference.
static const char ID
Unique ID (due to the unique address)
static const char ID
Unique ID (due to the unique address)
An abstract interface for indirect call information interference.
static const char ID
Unique ID (due to the unique address)
An abstract interface to track if a value leaves it's defining function instance.
static const char ID
Unique ID (due to the unique address)
An abstract Attribute for computing reachability between functions.
static const char ID
Unique ID (due to the unique address)
bool canReach(Attributor &A, const Function &Fn) const
If the function represented by this possition can reach Fn.
virtual bool instructionCanReach(Attributor &A, const Instruction &Inst, const Function &Fn, const AA::InstExclusionSetTy *ExclusionSet=nullptr) const =0
Can Inst reach Fn.
An abstract interface to determine reachability of point A to B.
static const char ID
Unique ID (due to the unique address)
virtual bool isAssumedReachable(Attributor &A, const Instruction &From, const Instruction &To, const AA::InstExclusionSetTy *ExclusionSet=nullptr) const =0
Returns true if 'From' instruction is assumed to reach, 'To' instruction.
An abstract interface for liveness abstract attribute.
static const char ID
Unique ID (due to the unique address)
An abstract interface for memory access kind related attributes (readnone/readonly/writeonly).
bool isAssumedReadOnly() const
Return true if we assume that the underlying value is not accessed (=written) in its respective scope...
bool isKnownReadNone() const
Return true if we know that the underlying value is not read or accessed in its respective scope.
static const char ID
Unique ID (due to the unique address)
bool isAssumedReadNone() const
Return true if we assume that the underlying value is not read or accessed in its respective scope.
An abstract interface for all memory location attributes (readnone/argmemonly/inaccessiblememonly/ina...
static std::string getMemoryLocationsAsStr(MemoryLocationsKind MLK)
Return the locations encoded by MLK as a readable string.
static const char ID
Unique ID (due to the unique address)
StateType::base_t MemoryLocationsKind
An abstract interface for all nonnull attributes.
static const char ID
Unique ID (due to the unique address)
An abstract interface for all noalias attributes.
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static const char ID
Unique ID (due to the unique address)
An abstract interface for all nocapture attributes.
static const char ID
Unique ID (due to the unique address)
bool isAssumedNoCaptureMaybeReturned() const
Return true if we assume that the underlying value is not captured in its respective scope but we all...
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
@ NO_CAPTURE_MAYBE_RETURNED
If we do not capture the value in memory or through integers we can only communicate it back as a der...
static void determineFunctionCaptureCapabilities(const IRPosition &IRP, const Function &F, BitIntegerState &State)
Update State according to the capture capabilities of F for position IRP.
static const char ID
Unique ID (due to the unique address)
An AbstractAttribute for nofree.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for norecurse.
static const char ID
Unique ID (due to the unique address)
An AbstractAttribute for noreturn.
static const char ID
Unique ID (due to the unique address)
static const char ID
Unique ID (due to the unique address)
static bool isAlignedBarrier(const CallBase &CB, bool ExecutedAligned)
Helper function to determine if CB is an aligned (GPU) barrier.
static bool isNonRelaxedAtomic(const Instruction *I)
Helper function used to determine whether an instruction is non-relaxed atomic.
static bool isNoSyncIntrinsic(const Instruction *I)
Helper function specific for intrinsics which are potentially volatile.
An abstract interface for all noundef attributes.
static const char ID
Unique ID (due to the unique address)
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static const char ID
Unique ID (due to the unique address)
An abstract Attribute for determining the necessity of the convergent attribute.
static const char ID
Unique ID (due to the unique address)
bool isAssumedNotConvergent() const
Return true if "non-convergent" is assumed.
An abstract interface for all nonnull attributes.
static const char ID
Unique ID (due to the unique address)
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See AbstractAttribute::isImpliedByIR(...).
A container for a list of ranges.
static void set_difference(const RangeList &L, const RangeList &R, RangeList &D)
Copy ranges from L that are not in R, into D.
An abstract interface for struct information.
virtual const_bin_iterator begin() const =0
static const char ID
Unique ID (due to the unique address)
virtual int64_t numOffsetBins() const =0
An abstract interface for potential values analysis.
static const char ID
Unique ID (due to the unique address)
static const char ID
Unique ID (due to the unique address)
static Value * getSingleValue(Attributor &A, const AbstractAttribute &AA, const IRPosition &IRP, SmallVectorImpl< AA::ValueAndContext > &Values)
Extract the single value in Values if any.
An abstract interface for privatizability.
virtual std::optional< Type * > getPrivatizableType() const =0
Return the type we can choose for a private copy of the underlying value.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for undefined behavior.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for getting all assumption underlying objects.
static const char ID
Unique ID (due to the unique address)
An abstract interface for range value analysis.
static const char ID
Unique ID (due to the unique address)
An abstract interface for value simplify abstract attribute.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for willreturn.
static const char ID
Unique ID (due to the unique address)
Helper to represent an access offset and size, with logic to deal with uncertainty and check for over...
static constexpr int64_t Unknown
static RangeTy getUnknown()
Base struct for all "concrete attribute" deductions.
void print(raw_ostream &OS) const
Helper functions, for debug purposes only.
virtual StateType & getState()=0
Return the internal abstract state for inspection.
An interface to query the internal state of an abstract attribute.
virtual ChangeStatus indicatePessimisticFixpoint()=0
Indicate that the abstract state should converge to the pessimistic state.
virtual bool isValidState() const =0
Return if this abstract state is in a valid state.
virtual ChangeStatus indicateOptimisticFixpoint()=0
Indicate that the abstract state should converge to the optimistic state.
Helper for AA::PointerInfo::Access DenseMap/Set usage ignoring everythign but the instruction.
static unsigned getHashValue(const Access &A)
static Access getTombstoneKey()
static bool isEqual(const Access &LHS, const Access &RHS)
static Access getEmptyKey()
This struct is a compact representation of a valid (non-zero power of two) alignment.
uint64_t value() const
This is a hole in the type system and should not be abused.
Helper struct used in the communication between an abstract attribute (AA) that wants to change the s...
std::function< void(const ArgumentReplacementInfo &, AbstractCallSite, SmallVectorImpl< Value * > &)> ACSRepairCBTy
Abstract call site (ACS) repair callback type.
const Argument & getReplacedArg() const
std::function< void(const ArgumentReplacementInfo &, Function &, Function::arg_iterator)> CalleeRepairCBTy
Callee repair callback type.
The fixpoint analysis framework that orchestrates the attribute deduction.
std::function< std::optional< Value * >(const IRPosition &, const AbstractAttribute *, bool &)> SimplifictionCallbackTy
Register CB as a simplification callback.
Specialization of the integer state for a bit-wise encoding.
BitIntegerState & removeAssumedBits(base_t BitsEncoding)
Remove the bits in BitsEncoding from the "assumed bits" if not known.
BitIntegerState & addKnownBits(base_t Bits)
Add the bits in BitsEncoding to the "known bits".
Simple wrapper for a single bit (boolean) state.
Represent subnormal handling kind for floating point instruction inputs and outputs.
static constexpr DenormalMode getDefault()
Return the assumed default mode for a function without denormal-fp-math.
static constexpr DenormalMode getInvalid()
static Access getTombstoneKey()
static unsigned getHashValue(const Access &A)
static Access getEmptyKey()
static bool isEqual(const Access &LHS, const Access &RHS)
static bool isEqual(const AA::RangeTy &A, const AA::RangeTy B)
static AA::RangeTy getTombstoneKey()
static unsigned getHashValue(const AA::RangeTy &Range)
static AA::RangeTy getEmptyKey()
static ReachabilityQueryInfo< ToTy > EmptyKey
static ReachabilityQueryInfo< ToTy > TombstoneKey
static ReachabilityQueryInfo< ToTy > * getEmptyKey()
static ReachabilityQueryInfo< ToTy > * getTombstoneKey()
static bool isEqual(const ReachabilityQueryInfo< ToTy > *LHS, const ReachabilityQueryInfo< ToTy > *RHS)
static unsigned getHashValue(const ReachabilityQueryInfo< ToTy > *RQI)
An information struct used to provide DenseMap with the various necessary components for a given valu...
State for dereferenceable attribute.
IncIntegerState DerefBytesState
State representing for dereferenceable bytes.
ChangeStatus manifest(Attributor &A) override
See AbstractAttribute::manifest(...).
Helper to describe and deal with positions in the LLVM-IR.
Function * getAssociatedFunction() const
Return the associated function, if any.
static const IRPosition callsite_returned(const CallBase &CB)
Create a position describing the returned value of CB.
static const IRPosition returned(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the returned value of F.
Argument * getAssociatedArgument() const
Return the associated argument, if any.
static const IRPosition value(const Value &V, const CallBaseContext *CBContext=nullptr)
Create a position describing the value of V.
int getCalleeArgNo() const
Return the callee argument number of the associated value if it is an argument or call site argument,...
static const IRPosition inst(const Instruction &I, const CallBaseContext *CBContext=nullptr)
Create a position describing the instruction I.
static const IRPosition callsite_argument(const CallBase &CB, unsigned ArgNo)
Create a position describing the argument of CB at position ArgNo.
@ IRP_ARGUMENT
An attribute for a function argument.
@ IRP_RETURNED
An attribute for the function return value.
@ IRP_CALL_SITE
An attribute for a call site (function scope).
@ IRP_CALL_SITE_RETURNED
An attribute for a call site return value.
@ IRP_FUNCTION
An attribute for a function (scope).
@ IRP_CALL_SITE_ARGUMENT
An attribute for a call site argument.
@ IRP_INVALID
An invalid position.
Instruction * getCtxI() const
Return the context instruction, if any.
static const IRPosition argument(const Argument &Arg, const CallBaseContext *CBContext=nullptr)
Create a position describing the argument Arg.
Type * getAssociatedType() const
Return the type this abstract attribute is associated with.
static const IRPosition function(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the function scope of F.
const CallBaseContext * getCallBaseContext() const
Get the call base context from the position.
Value & getAssociatedValue() const
Return the value this abstract attribute is associated with.
Value & getAnchorValue() const
Return the value this abstract attribute is anchored with.
int getCallSiteArgNo() const
Return the call site argument number of the associated value if it is an argument or call site argume...
static const IRPosition function_scope(const IRPosition &IRP, const CallBaseContext *CBContext=nullptr)
Create a position with function scope matching the "context" of IRP.
Kind getPositionKind() const
Return the associated position kind.
bool isArgumentPosition() const
Return true if the position is an argument or call site argument.
static const IRPosition callsite_function(const CallBase &CB)
Create a position describing the function scope of CB.
Function * getAnchorScope() const
Return the Function surrounding the anchor value.
Incoming for lane maks phi as machine instruction, incoming register Reg and incoming block Block are...
State for an integer range.
ConstantRange getKnown() const
Return the known state encoding.
ConstantRange getAssumed() const
Return the assumed state encoding.
bool isValidState() const override
See AbstractState::isValidState() NOTE: For now we simply pretend that the worst possible state is in...
bool isAtFixpoint() const override
See AbstractState::isAtFixpoint()
ChangeStatus indicateOptimisticFixpoint() override
See AbstractState::indicateOptimisticFixpoint(...)
base_t getAssumed() const
Return the assumed state encoding.
static constexpr base_t getWorstState()
Return the worst possible representable state.
ChangeStatus indicatePessimisticFixpoint() override
See AbstractState::indicatePessimisticFixpoint(...)
Helper that allows to insert a new assumption string in the known assumption set by creating a (stati...
FPClassTest KnownFPClasses
Floating-point classes the value could be one of.
This struct is a compact representation of a valid (power of two) or undefined (0) alignment.
A "must be executed context" for a given program point PP is the set of instructions,...
iterator & end()
Return an universal end iterator.
bool findInContextOf(const Instruction *I, const Instruction *PP)
Helper to look for I in the context of PP.
iterator & begin(const Instruction *PP)
Return an iterator to explore the context around PP.
bool checkForAllContext(const Instruction *PP, function_ref< bool(const Instruction *)> Pred)
}
Various options to control the behavior of getObjectSize.
static unsigned MaxPotentialValues
Maximum number of potential values to be tracked.
void unionAssumed(const MemberTy &C)
Union assumed set with the passed value.
static PotentialValuesState getBestState()
Return empty set as the best state of potential values.
const SetTy & getAssumedSet() const
Return this set.
Represent one information held inside an operand bundle of an llvm.assume.
A MapVector that performs no allocations if smaller than a certain size.
Helper to tie a abstract state implementation to an abstract attribute.
StateType & getState() override
See AbstractAttribute::getState(...).
bool unionAssumed(std::optional< Value * > Other)
Merge Other into the currently assumed simplified value.