55#include "llvm/IR/IntrinsicsAMDGPU.h"
56#include "llvm/IR/IntrinsicsNVPTX.h"
81#define DEBUG_TYPE "attributor"
85 cl::desc(
"Manifest Attributor internal string attributes."),
98 cl::desc(
"Maximum number of potential values to be "
99 "tracked for each position."),
104 "attributor-max-potential-values-iterations",
cl::Hidden,
106 "Maximum number of iterations we keep dismantling potential values."),
109STATISTIC(NumAAs,
"Number of abstract attributes created");
124#define BUILD_STAT_MSG_IR_ATTR(TYPE, NAME) \
125 ("Number of " #TYPE " marked '" #NAME "'")
126#define BUILD_STAT_NAME(NAME, TYPE) NumIR##TYPE##_##NAME
127#define STATS_DECL_(NAME, MSG) STATISTIC(NAME, MSG);
128#define STATS_DECL(NAME, TYPE, MSG) \
129 STATS_DECL_(BUILD_STAT_NAME(NAME, TYPE), MSG);
130#define STATS_TRACK(NAME, TYPE) ++(BUILD_STAT_NAME(NAME, TYPE));
131#define STATS_DECLTRACK(NAME, TYPE, MSG) \
133 STATS_DECL(NAME, TYPE, MSG) \
134 STATS_TRACK(NAME, TYPE) \
136#define STATS_DECLTRACK_ARG_ATTR(NAME) \
137 STATS_DECLTRACK(NAME, Arguments, BUILD_STAT_MSG_IR_ATTR(arguments, NAME))
138#define STATS_DECLTRACK_CSARG_ATTR(NAME) \
139 STATS_DECLTRACK(NAME, CSArguments, \
140 BUILD_STAT_MSG_IR_ATTR(call site arguments, NAME))
141#define STATS_DECLTRACK_FN_ATTR(NAME) \
142 STATS_DECLTRACK(NAME, Function, BUILD_STAT_MSG_IR_ATTR(functions, NAME))
143#define STATS_DECLTRACK_CS_ATTR(NAME) \
144 STATS_DECLTRACK(NAME, CS, BUILD_STAT_MSG_IR_ATTR(call site, NAME))
145#define STATS_DECLTRACK_FNRET_ATTR(NAME) \
146 STATS_DECLTRACK(NAME, FunctionReturn, \
147 BUILD_STAT_MSG_IR_ATTR(function returns, NAME))
148#define STATS_DECLTRACK_CSRET_ATTR(NAME) \
149 STATS_DECLTRACK(NAME, CSReturn, \
150 BUILD_STAT_MSG_IR_ATTR(call site returns, NAME))
151#define STATS_DECLTRACK_FLOATING_ATTR(NAME) \
152 STATS_DECLTRACK(NAME, Floating, \
153 ("Number of floating values known to be '" #NAME "'"))
158#define PIPE_OPERATOR(CLASS) \
159 raw_ostream &operator<<(raw_ostream &OS, const CLASS &AA) { \
160 return OS << static_cast<const AbstractAttribute &>(AA); \
215 bool HeaderOnly,
Cycle **CPtr =
nullptr) {
218 auto *BB =
I->getParent();
224 return !HeaderOnly || BB ==
C->getHeader();
235 if (
DL.getTypeSizeInBits(Ty) !=
DL.getTypeAllocSizeInBits(Ty))
240 if (
VectorType *SeqTy = dyn_cast<VectorType>(Ty))
244 if (
ArrayType *SeqTy = dyn_cast<ArrayType>(Ty))
247 if (!isa<StructType>(Ty))
260 StartPos +=
DL.getTypeAllocSizeInBits(ElTy);
270 bool AllowVolatile) {
271 if (!AllowVolatile &&
I->isVolatile())
274 if (
auto *LI = dyn_cast<LoadInst>(
I)) {
275 return LI->getPointerOperand();
278 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
279 return SI->getPointerOperand();
282 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(
I)) {
283 return CXI->getPointerOperand();
286 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(
I)) {
287 return RMWI->getPointerOperand();
309 bool GetMinOffset,
bool AllowNonInbounds,
310 bool UseAssumed =
false) {
312 auto AttributorAnalysis = [&](
Value &V,
APInt &ROffset) ->
bool {
317 UseAssumed ? DepClassTy::OPTIONAL
319 if (!ValueConstantRangeAA)
342 const Value *
Ptr, int64_t &BytesOffset,
344 APInt OffsetAPInt(
DL.getIndexTypeSizeInBits(
Ptr->getType()), 0);
347 true, AllowNonInbounds);
355template <
typename AAType,
typename StateType =
typename AAType::StateType,
357 bool RecurseForSelectAndPHI =
true>
359 Attributor &
A,
const AAType &QueryingAA, StateType &S,
361 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp return value states for "
362 << QueryingAA <<
" into " << S <<
"\n");
364 assert((QueryingAA.getIRPosition().getPositionKind() ==
366 QueryingAA.getIRPosition().getPositionKind() ==
368 "Can only clamp returned value states for a function returned or call "
369 "site returned position!");
373 std::optional<StateType>
T;
376 auto CheckReturnValue = [&](
Value &RV) ->
bool {
381 return AA::hasAssumedIRAttr<IRAttributeKind>(
382 A, &QueryingAA, RVPos, DepClassTy::REQUIRED, IsKnown);
386 A.getAAFor<AAType>(QueryingAA, RVPos, DepClassTy::REQUIRED);
390 <<
" AA: " << AA->getAsStr(&
A) <<
" @ " << RVPos <<
"\n");
391 const StateType &AAS = AA->getState();
393 T = StateType::getBestState(AAS);
395 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" RV State: " <<
T
397 return T->isValidState();
400 if (!
A.checkForAllReturnedValues(CheckReturnValue, QueryingAA,
401 AA::ValueScope::Intraprocedural,
402 RecurseForSelectAndPHI))
403 S.indicatePessimisticFixpoint();
410template <
typename AAType,
typename BaseType,
411 typename StateType =
typename BaseType::StateType,
412 bool PropagateCallBaseContext =
false,
414 bool RecurseForSelectAndPHI =
true>
415struct AAReturnedFromReturnedValues :
public BaseType {
421 StateType S(StateType::getBestState(this->getState()));
423 RecurseForSelectAndPHI>(
425 PropagateCallBaseContext ? this->getCallBaseContext() : nullptr);
428 return clampStateAndIndicateChange<StateType>(this->getState(), S);
434template <
typename AAType,
typename StateType =
typename AAType::StateType,
436static void clampCallSiteArgumentStates(
Attributor &
A,
const AAType &QueryingAA,
438 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp call site argument states for "
439 << QueryingAA <<
" into " << S <<
"\n");
441 assert(QueryingAA.getIRPosition().getPositionKind() ==
443 "Can only clamp call site argument states for an argument position!");
447 std::optional<StateType>
T;
450 unsigned ArgNo = QueryingAA.getIRPosition().getCallSiteArgNo();
462 return AA::hasAssumedIRAttr<IRAttributeKind>(
463 A, &QueryingAA, ACSArgPos, DepClassTy::REQUIRED, IsKnown);
467 A.getAAFor<AAType>(QueryingAA, ACSArgPos, DepClassTy::REQUIRED);
470 LLVM_DEBUG(
dbgs() <<
"[Attributor] ACS: " << *ACS.getInstruction()
471 <<
" AA: " << AA->getAsStr(&
A) <<
" @" << ACSArgPos
473 const StateType &AAS = AA->getState();
475 T = StateType::getBestState(AAS);
477 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" CSA State: " <<
T
479 return T->isValidState();
482 bool UsedAssumedInformation =
false;
483 if (!
A.checkForAllCallSites(CallSiteCheck, QueryingAA,
true,
484 UsedAssumedInformation))
485 S.indicatePessimisticFixpoint();
492template <
typename AAType,
typename BaseType,
493 typename StateType =
typename AAType::StateType,
495bool getArgumentStateFromCallBaseContext(
Attributor &
A,
499 "Expected an 'argument' position !");
505 assert(ArgNo >= 0 &&
"Invalid Arg No!");
511 return AA::hasAssumedIRAttr<IRAttributeKind>(
512 A, &QueryingAttribute, CBArgPos, DepClassTy::REQUIRED, IsKnown);
516 A.getAAFor<AAType>(QueryingAttribute, CBArgPos, DepClassTy::REQUIRED);
519 const StateType &CBArgumentState =
520 static_cast<const StateType &
>(AA->getState());
522 LLVM_DEBUG(
dbgs() <<
"[Attributor] Briding Call site context to argument"
523 <<
"Position:" << Pos <<
"CB Arg state:" << CBArgumentState
527 State ^= CBArgumentState;
532template <
typename AAType,
typename BaseType,
533 typename StateType =
typename AAType::StateType,
534 bool BridgeCallBaseContext =
false,
536struct AAArgumentFromCallSiteArguments :
public BaseType {
542 StateType S = StateType::getBestState(this->getState());
544 if (BridgeCallBaseContext) {
546 getArgumentStateFromCallBaseContext<AAType,
BaseType, StateType,
548 A, *
this, this->getIRPosition(), S);
550 return clampStateAndIndicateChange<StateType>(this->getState(), S);
552 clampCallSiteArgumentStates<AAType, StateType, IRAttributeKind>(
A, *
this,
557 return clampStateAndIndicateChange<StateType>(this->getState(), S);
562template <
typename AAType,
typename BaseType,
563 typename StateType =
typename BaseType::StateType,
564 bool IntroduceCallBaseContext =
false,
566struct AACalleeToCallSite :
public BaseType {
571 auto IRPKind = this->getIRPosition().getPositionKind();
574 "Can only wrap function returned positions for call site "
575 "returned positions!");
576 auto &S = this->getState();
578 CallBase &CB = cast<CallBase>(this->getAnchorValue());
579 if (IntroduceCallBaseContext)
580 LLVM_DEBUG(
dbgs() <<
"[Attributor] Introducing call base context:" << CB
585 for (
const Function *Callee : Callees) {
589 IntroduceCallBaseContext ? &CB :
nullptr)
591 *
Callee, IntroduceCallBaseContext ? &CB : nullptr);
595 if (!AA::hasAssumedIRAttr<IRAttributeKind>(
596 A,
this, FnPos, DepClassTy::REQUIRED, IsKnown))
602 A.getAAFor<AAType>(*
this, FnPos, DepClassTy::REQUIRED);
606 if (S.isAtFixpoint())
607 return S.isValidState();
611 if (!
A.checkForAllCallees(CalleePred, *
this, CB))
612 return S.indicatePessimisticFixpoint();
618template <
class AAType,
typename StateType =
typename AAType::StateType>
619static void followUsesInContext(AAType &AA,
Attributor &
A,
624 auto EIt = Explorer.
begin(CtxI), EEnd = Explorer.
end(CtxI);
625 for (
unsigned u = 0;
u <
Uses.size(); ++
u) {
627 if (
const Instruction *UserI = dyn_cast<Instruction>(
U->getUser())) {
629 if (Found && AA.followUseInMBEC(
A, U, UserI, State))
630 for (
const Use &Us : UserI->
uses())
645template <
class AAType,
typename StateType =
typename AAType::StateType>
646static void followUsesInMBEC(AAType &AA,
Attributor &
A, StateType &S,
649 A.getInfoCache().getMustBeExecutedContextExplorer();
655 for (
const Use &U : AA.getIRPosition().getAssociatedValue().uses())
658 followUsesInContext<AAType>(AA,
A, *Explorer, &CtxI,
Uses, S);
660 if (S.isAtFixpoint())
665 if (
const BranchInst *Br = dyn_cast<BranchInst>(
I))
666 if (Br->isConditional())
705 StateType ParentState;
709 ParentState.indicateOptimisticFixpoint();
711 for (
const BasicBlock *BB : Br->successors()) {
712 StateType ChildState;
714 size_t BeforeSize =
Uses.size();
715 followUsesInContext(AA,
A, *Explorer, &BB->front(),
Uses, ChildState);
718 for (
auto It =
Uses.begin() + BeforeSize; It !=
Uses.end();)
721 ParentState &= ChildState;
734namespace PointerInfo {
795 R.indicatePessimisticFixpoint();
889 if (!
Range.mayOverlap(ItRange))
891 bool IsExact =
Range == ItRange && !
Range.offsetOrSizeAreUnknown();
892 for (
auto Index : It.getSecond()) {
894 if (!CB(Access, IsExact))
914 for (
unsigned Index : LocalList->getSecond()) {
917 if (
Range.offsetAndSizeAreUnknown())
933 RemoteI = RemoteI ? RemoteI : &
I;
937 bool AccExists =
false;
939 for (
auto Index : LocalList) {
941 if (
A.getLocalInst() == &
I) {
950 <<
"[AAPointerInfo] Inserting access in new offset bins\n";);
952 for (
auto Key : ToAdd) {
961 "New Access should have been at AccIndex");
962 LocalList.push_back(AccIndex);
976 auto &ExistingRanges =
Before.getRanges();
977 auto &NewRanges = Current.getRanges();
984 <<
"[AAPointerInfo] Removing access from old offset bins\n";);
991 "Expected bin to actually contain the Access.");
1010 using const_iterator = VecTy::const_iterator;
1013 const_iterator begin()
const {
return Offsets.begin(); }
1014 const_iterator end()
const {
return Offsets.end(); }
1017 return Offsets ==
RHS.Offsets;
1023 bool isUnassigned()
const {
return Offsets.size() == 0; }
1025 bool isUnknown()
const {
1038 void addToAll(int64_t Inc) {
1039 for (
auto &
Offset : Offsets) {
1048 void merge(
const OffsetInfo &R) {
Offsets.append(
R.Offsets); }
1063struct AAPointerInfoImpl
1064 :
public StateWrapper<AA::PointerInfo::State, AAPointerInfo> {
1069 const std::string getAsStr(
Attributor *
A)
const override {
1070 return std::string(
"PointerInfo ") +
1071 (isValidState() ? (std::string(
"#") +
1072 std::to_string(OffsetBins.
size()) +
" bins")
1078 return AAPointerInfo::manifest(
A);
1081 virtual const_bin_iterator
begin()
const override {
return State::begin(); }
1082 virtual const_bin_iterator
end()
const override {
return State::end(); }
1083 virtual int64_t numOffsetBins()
const override {
1084 return State::numOffsetBins();
1087 bool forallInterferingAccesses(
1091 return State::forallInterferingAccesses(
Range, CB);
1094 bool forallInterferingAccesses(
1096 bool FindInterferingWrites,
bool FindInterferingReads,
1097 function_ref<
bool(
const Access &,
bool)> UserCB,
bool &HasBeenWrittenTo,
1099 function_ref<
bool(
const Access &)> SkipCB)
const override {
1100 HasBeenWrittenTo =
false;
1107 bool IsAssumedNoSync = AA::hasAssumedIRAttr<Attribute::NoSync>(
1112 bool AllInSameNoSyncFn = IsAssumedNoSync;
1113 bool InstIsExecutedByInitialThreadOnly =
1114 ExecDomainAA && ExecDomainAA->isExecutedByInitialThreadOnly(
I);
1121 bool InstIsExecutedInAlignedRegion =
1122 FindInterferingReads && ExecDomainAA &&
1123 ExecDomainAA->isExecutedInAlignedRegion(
A,
I);
1125 if (InstIsExecutedInAlignedRegion || InstIsExecutedByInitialThreadOnly)
1126 A.recordDependence(*ExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1129 bool IsThreadLocalObj =
1138 auto CanIgnoreThreadingForInst = [&](
const Instruction &
I) ->
bool {
1139 if (IsThreadLocalObj || AllInSameNoSyncFn)
1141 const auto *FnExecDomainAA =
1142 I.getFunction() == &
Scope
1147 if (!FnExecDomainAA)
1149 if (InstIsExecutedInAlignedRegion ||
1150 (FindInterferingWrites &&
1151 FnExecDomainAA->isExecutedInAlignedRegion(
A,
I))) {
1152 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1155 if (InstIsExecutedByInitialThreadOnly &&
1156 FnExecDomainAA->isExecutedByInitialThreadOnly(
I)) {
1157 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1166 auto CanIgnoreThreading = [&](
const Access &Acc) ->
bool {
1167 return CanIgnoreThreadingForInst(*Acc.getRemoteInst()) ||
1168 (Acc.getRemoteInst() != Acc.getLocalInst() &&
1169 CanIgnoreThreadingForInst(*Acc.getLocalInst()));
1173 bool IsKnownNoRecurse;
1174 AA::hasAssumedIRAttr<Attribute::NoRecurse>(
1181 bool InstInKernel =
Scope.hasFnAttribute(
"kernel");
1182 bool ObjHasKernelLifetime =
false;
1183 const bool UseDominanceReasoning =
1184 FindInterferingWrites && IsKnownNoRecurse;
1195 case AA::GPUAddressSpace::Shared:
1196 case AA::GPUAddressSpace::Constant:
1197 case AA::GPUAddressSpace::Local:
1209 std::function<
bool(
const Function &)> IsLiveInCalleeCB;
1211 if (
auto *AI = dyn_cast<AllocaInst>(&getAssociatedValue())) {
1216 bool IsKnownNoRecurse;
1217 if (AA::hasAssumedIRAttr<Attribute::NoRecurse>(
1219 IsKnownNoRecurse)) {
1220 IsLiveInCalleeCB = [AIFn](
const Function &Fn) {
return AIFn != &Fn; };
1222 }
else if (
auto *GV = dyn_cast<GlobalValue>(&getAssociatedValue())) {
1225 ObjHasKernelLifetime = HasKernelLifetime(GV, *GV->getParent());
1226 if (ObjHasKernelLifetime)
1227 IsLiveInCalleeCB = [](
const Function &Fn) {
1228 return !Fn.hasFnAttribute(
"kernel");
1236 auto AccessCB = [&](
const Access &Acc,
bool Exact) {
1238 bool AccInSameScope = AccScope == &
Scope;
1242 if (InstInKernel && ObjHasKernelLifetime && !AccInSameScope &&
1246 if (Exact && Acc.isMustAccess() && Acc.getRemoteInst() != &
I) {
1247 if (Acc.isWrite() || (isa<LoadInst>(
I) && Acc.isWriteOrAssumption()))
1248 ExclusionSet.
insert(Acc.getRemoteInst());
1251 if ((!FindInterferingWrites || !Acc.isWriteOrAssumption()) &&
1252 (!FindInterferingReads || !Acc.isRead()))
1255 bool Dominates = FindInterferingWrites && DT && Exact &&
1256 Acc.isMustAccess() && AccInSameScope &&
1259 DominatingWrites.
insert(&Acc);
1263 AllInSameNoSyncFn &= Acc.getRemoteInst()->getFunction() == &
Scope;
1265 InterferingAccesses.
push_back({&Acc, Exact});
1268 if (!State::forallInterferingAccesses(
I, AccessCB,
Range))
1271 HasBeenWrittenTo = !DominatingWrites.
empty();
1275 for (
const Access *Acc : DominatingWrites) {
1276 if (!LeastDominatingWriteInst) {
1277 LeastDominatingWriteInst = Acc->getRemoteInst();
1278 }
else if (DT->
dominates(LeastDominatingWriteInst,
1279 Acc->getRemoteInst())) {
1280 LeastDominatingWriteInst = Acc->getRemoteInst();
1285 auto CanSkipAccess = [&](
const Access &Acc,
bool Exact) {
1286 if (SkipCB && SkipCB(Acc))
1288 if (!CanIgnoreThreading(Acc))
1294 bool ReadChecked = !FindInterferingReads;
1295 bool WriteChecked = !FindInterferingWrites;
1301 &ExclusionSet, IsLiveInCalleeCB))
1306 if (!WriteChecked) {
1308 &ExclusionSet, IsLiveInCalleeCB))
1309 WriteChecked =
true;
1323 if (!WriteChecked && HasBeenWrittenTo &&
1324 Acc.getRemoteInst()->getFunction() != &
Scope) {
1334 if (!FnReachabilityAA ||
1335 !FnReachabilityAA->instructionCanReach(
1336 A, *LeastDominatingWriteInst,
1337 *Acc.getRemoteInst()->getFunction(), &ExclusionSet))
1338 WriteChecked =
true;
1344 if (ReadChecked && WriteChecked)
1347 if (!DT || !UseDominanceReasoning)
1349 if (!DominatingWrites.count(&Acc))
1351 return LeastDominatingWriteInst != Acc.getRemoteInst();
1356 for (
auto &It : InterferingAccesses) {
1357 if ((!AllInSameNoSyncFn && !IsThreadLocalObj && !ExecDomainAA) ||
1358 !CanSkipAccess(*It.first, It.second)) {
1359 if (!UserCB(*It.first, It.second))
1369 using namespace AA::PointerInfo;
1371 return indicatePessimisticFixpoint();
1373 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1374 bool IsByval = OtherAAImpl.getAssociatedArgument()->hasByValAttr();
1378 const auto &State = OtherAAImpl.getState();
1379 for (
const auto &It : State) {
1380 for (
auto Index : It.getSecond()) {
1381 const auto &RAcc = State.getAccess(
Index);
1382 if (IsByval && !RAcc.isRead())
1384 bool UsedAssumedInformation =
false;
1386 auto Content =
A.translateArgumentToCallSiteContent(
1387 RAcc.getContent(), CB, *
this, UsedAssumedInformation);
1388 AK =
AccessKind(AK & (IsByval ? AccessKind::AK_R : AccessKind::AK_RW));
1389 AK =
AccessKind(AK | (RAcc.isMayAccess() ? AK_MAY : AK_MUST));
1391 Changed |= addAccess(
A, RAcc.getRanges(), CB,
Content, AK,
1392 RAcc.getType(), RAcc.getRemoteInst());
1399 const OffsetInfo &Offsets,
CallBase &CB) {
1400 using namespace AA::PointerInfo;
1402 return indicatePessimisticFixpoint();
1404 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1408 const auto &State = OtherAAImpl.getState();
1409 for (
const auto &It : State) {
1410 for (
auto Index : It.getSecond()) {
1411 const auto &RAcc = State.getAccess(
Index);
1412 for (
auto Offset : Offsets) {
1416 if (!NewRanges.isUnknown()) {
1417 NewRanges.addToAllOffsets(
Offset);
1420 addAccess(
A, NewRanges, CB, RAcc.getContent(), RAcc.getKind(),
1421 RAcc.getType(), RAcc.getRemoteInst());
1430 void trackPointerInfoStatistics(
const IRPosition &IRP)
const {}
1434 for (
auto &It : OffsetBins) {
1435 O <<
"[" << It.first.Offset <<
"-" << It.first.Offset + It.first.Size
1436 <<
"] : " << It.getSecond().size() <<
"\n";
1437 for (
auto AccIndex : It.getSecond()) {
1438 auto &Acc = AccessList[AccIndex];
1439 O <<
" - " << Acc.getKind() <<
" - " << *Acc.getLocalInst() <<
"\n";
1440 if (Acc.getLocalInst() != Acc.getRemoteInst())
1441 O <<
" --> " << *Acc.getRemoteInst()
1443 if (!Acc.isWrittenValueYetUndetermined()) {
1444 if (isa_and_nonnull<Function>(Acc.getWrittenValue()))
1445 O <<
" - c: func " << Acc.getWrittenValue()->getName()
1447 else if (Acc.getWrittenValue())
1448 O <<
" - c: " << *Acc.getWrittenValue() <<
"\n";
1450 O <<
" - c: <unknown>\n";
1457struct AAPointerInfoFloating :
public AAPointerInfoImpl {
1460 : AAPointerInfoImpl(IRP,
A) {}
1467 using namespace AA::PointerInfo;
1470 TypeSize AccessSize =
DL.getTypeStoreSize(&Ty);
1480 if (!VT || VT->getElementCount().isScalable() ||
1482 (*Content)->getType() != VT ||
1483 DL.getTypeStoreSize(VT->getElementType()).isScalable()) {
1493 int64_t ElementSize =
DL.getTypeStoreSize(ElementType).getFixedValue();
1494 auto *ConstContent = cast<Constant>(*
Content);
1498 for (
int i = 0, e = VT->getElementCount().getFixedValue(); i != e; ++i) {
1500 ConstContent, ConstantInt::get(Int32Ty, i));
1503 Changed = Changed | addAccess(
A, {ElementOffsets, ElementSize},
I,
1507 for (
auto &ElementOffset : ElementOffsets)
1508 ElementOffset += ElementSize;
1522 OffsetInfo &UsrOI,
const OffsetInfo &PtrOI,
1526 void trackStatistics()
const override {
1527 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1531bool AAPointerInfoFloating::collectConstantsForGEP(
Attributor &
A,
1534 const OffsetInfo &PtrOI,
1536 unsigned BitWidth =
DL.getIndexTypeSizeInBits(
GEP->getType());
1540 assert(!UsrOI.isUnknown() && !PtrOI.isUnknown() &&
1541 "Don't look for constant values if the offset has already been "
1542 "determined to be unknown.");
1544 if (!
GEP->collectOffset(
DL,
BitWidth, VariableOffsets, ConstantOffset)) {
1550 << (VariableOffsets.
empty() ?
"" :
"not") <<
" constant "
1554 Union.addToAll(ConstantOffset.getSExtValue());
1559 for (
const auto &VI : VariableOffsets) {
1562 if (!PotentialConstantsAA || !PotentialConstantsAA->isValidState()) {
1568 if (PotentialConstantsAA->undefIsContained())
1576 if (AssumedSet.empty())
1580 for (
const auto &ConstOffset : AssumedSet) {
1581 auto CopyPerOffset =
Union;
1582 CopyPerOffset.addToAll(ConstOffset.getSExtValue() *
1583 VI.second.getZExtValue());
1584 Product.merge(CopyPerOffset);
1589 UsrOI = std::move(Union);
1594 using namespace AA::PointerInfo;
1597 Value &AssociatedValue = getAssociatedValue();
1600 OffsetInfoMap[&AssociatedValue].
insert(0);
1602 auto HandlePassthroughUser = [&](
Value *Usr,
Value *CurPtr,
bool &Follow) {
1613 "CurPtr does not exist in the map!");
1615 auto &UsrOI = OffsetInfoMap[Usr];
1616 auto &PtrOI = OffsetInfoMap[CurPtr];
1617 assert(!PtrOI.isUnassigned() &&
1618 "Cannot pass through if the input Ptr was not visited!");
1624 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
1626 User *Usr =
U.getUser();
1627 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Analyze " << *CurPtr <<
" in " << *Usr
1630 "The current pointer offset should have been seeded!");
1634 return HandlePassthroughUser(Usr, CurPtr, Follow);
1635 if (!isa<GEPOperator>(CE)) {
1636 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled constant user " << *CE
1641 if (
auto *
GEP = dyn_cast<GEPOperator>(Usr)) {
1644 auto &UsrOI = OffsetInfoMap[Usr];
1645 auto &PtrOI = OffsetInfoMap[CurPtr];
1647 if (UsrOI.isUnknown())
1650 if (PtrOI.isUnknown()) {
1656 Follow = collectConstantsForGEP(
A,
DL, UsrOI, PtrOI,
GEP);
1659 if (isa<PtrToIntInst>(Usr))
1661 if (isa<CastInst>(Usr) || isa<SelectInst>(Usr) || isa<ReturnInst>(Usr))
1662 return HandlePassthroughUser(Usr, CurPtr, Follow);
1667 if (
auto *
PHI = dyn_cast<PHINode>(Usr)) {
1670 bool IsFirstPHIUser = !OffsetInfoMap.
count(
PHI);
1671 auto &UsrOI = OffsetInfoMap[
PHI];
1672 auto &PtrOI = OffsetInfoMap[CurPtr];
1676 if (PtrOI.isUnknown()) {
1677 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand offset unknown "
1678 << *CurPtr <<
" in " << *
PHI <<
"\n");
1679 Follow = !UsrOI.isUnknown();
1685 if (UsrOI == PtrOI) {
1686 assert(!PtrOI.isUnassigned() &&
1687 "Cannot assign if the current Ptr was not visited!");
1688 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant (so far)");
1698 auto It = OffsetInfoMap.
find(CurPtrBase);
1699 if (It == OffsetInfoMap.
end()) {
1700 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand is too complex "
1701 << *CurPtr <<
" in " << *
PHI
1702 <<
" (base: " << *CurPtrBase <<
")\n");
1717 *
PHI->getFunction());
1719 auto BaseOI = It->getSecond();
1720 BaseOI.addToAll(
Offset.getZExtValue());
1721 if (IsFirstPHIUser || BaseOI == UsrOI) {
1722 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant " << *CurPtr
1723 <<
" in " << *Usr <<
"\n");
1724 return HandlePassthroughUser(Usr, CurPtr, Follow);
1728 dbgs() <<
"[AAPointerInfo] PHI operand pointer offset mismatch "
1729 << *CurPtr <<
" in " << *
PHI <<
"\n");
1740 if (
auto *LoadI = dyn_cast<LoadInst>(Usr)) {
1748 if (!handleAccess(
A, *LoadI,
nullptr, AK,
1749 OffsetInfoMap[CurPtr].Offsets, Changed,
1754 if (
auto *
II = dyn_cast<IntrinsicInst>(&
I))
1755 return II->isAssumeLikeIntrinsic();
1766 }
while (FromI && FromI != ToI);
1772 if (IntrI.getIntrinsicID() != Intrinsic::assume)
1775 if (IntrI.getParent() == BB) {
1776 if (IsImpactedInRange(LoadI->getNextNonDebugInstruction(), &IntrI))
1782 if ((*PredIt) != BB)
1787 if (SuccBB == IntrBB)
1789 if (isa<UnreachableInst>(SuccBB->getTerminator()))
1793 if (IsImpactedInRange(LoadI->getNextNonDebugInstruction(),
1796 if (IsImpactedInRange(&IntrBB->
front(), &IntrI))
1802 std::pair<Value *, IntrinsicInst *> Assumption;
1803 for (
const Use &LoadU : LoadI->
uses()) {
1804 if (
auto *CmpI = dyn_cast<CmpInst>(LoadU.getUser())) {
1805 if (!CmpI->isEquality() || !CmpI->isTrueWhenEqual())
1807 for (
const Use &CmpU : CmpI->
uses()) {
1808 if (
auto *IntrI = dyn_cast<IntrinsicInst>(CmpU.getUser())) {
1809 if (!IsValidAssume(*IntrI))
1811 int Idx = CmpI->getOperandUse(0) == LoadU;
1812 Assumption = {CmpI->getOperand(
Idx), IntrI};
1817 if (Assumption.first)
1822 if (!Assumption.first || !Assumption.second)
1826 << *Assumption.second <<
": " << *LoadI
1827 <<
" == " << *Assumption.first <<
"\n");
1828 bool UsedAssumedInformation =
false;
1829 std::optional<Value *>
Content =
nullptr;
1830 if (Assumption.first)
1832 A.getAssumedSimplified(*Assumption.first, *
this,
1834 return handleAccess(
1835 A, *Assumption.second,
Content, AccessKind::AK_ASSUMPTION,
1836 OffsetInfoMap[CurPtr].Offsets, Changed, *LoadI->getType());
1841 for (
auto *OtherOp : OtherOps) {
1842 if (OtherOp == CurPtr) {
1845 <<
"[AAPointerInfo] Escaping use in store like instruction " <<
I
1857 bool UsedAssumedInformation =
false;
1858 std::optional<Value *>
Content =
nullptr;
1862 return handleAccess(
A,
I,
Content, AK, OffsetInfoMap[CurPtr].Offsets,
1866 if (
auto *StoreI = dyn_cast<StoreInst>(Usr))
1867 return HandleStoreLike(*StoreI, StoreI->getValueOperand(),
1868 *StoreI->getValueOperand()->getType(),
1869 {StoreI->getValueOperand()}, AccessKind::AK_W);
1870 if (
auto *RMWI = dyn_cast<AtomicRMWInst>(Usr))
1871 return HandleStoreLike(*RMWI,
nullptr, *RMWI->getValOperand()->getType(),
1872 {RMWI->getValOperand()}, AccessKind::AK_RW);
1873 if (
auto *CXI = dyn_cast<AtomicCmpXchgInst>(Usr))
1874 return HandleStoreLike(
1875 *CXI,
nullptr, *CXI->getNewValOperand()->getType(),
1876 {CXI->getCompareOperand(), CXI->getNewValOperand()},
1879 if (
auto *CB = dyn_cast<CallBase>(Usr)) {
1883 A.getInfoCache().getTargetLibraryInfoForFunction(*CB->
getFunction());
1894 translateAndAddState(
A, *CSArgPI, OffsetInfoMap[CurPtr], *CB) |
1896 return isValidState();
1898 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Call user not handled " << *CB
1904 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] User not handled " << *Usr <<
"\n");
1907 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
1908 assert(OffsetInfoMap.
count(OldU) &&
"Old use should be known already!");
1909 if (OffsetInfoMap.
count(NewU)) {
1911 if (!(OffsetInfoMap[NewU] == OffsetInfoMap[OldU])) {
1912 dbgs() <<
"[AAPointerInfo] Equivalent use callback failed: "
1913 << OffsetInfoMap[NewU] <<
" vs " << OffsetInfoMap[OldU]
1917 return OffsetInfoMap[NewU] == OffsetInfoMap[OldU];
1919 OffsetInfoMap[NewU] = OffsetInfoMap[OldU];
1922 if (!
A.checkForAllUses(UsePred, *
this, AssociatedValue,
1924 true, EquivalentUseCB)) {
1925 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Check for all uses failed, abort!\n");
1926 return indicatePessimisticFixpoint();
1930 dbgs() <<
"Accesses by bin after update:\n";
1937struct AAPointerInfoReturned final : AAPointerInfoImpl {
1939 : AAPointerInfoImpl(IRP,
A) {}
1943 return indicatePessimisticFixpoint();
1947 void trackStatistics()
const override {
1948 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1952struct AAPointerInfoArgument final : AAPointerInfoFloating {
1954 : AAPointerInfoFloating(IRP,
A) {}
1957 void trackStatistics()
const override {
1958 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1962struct AAPointerInfoCallSiteArgument final : AAPointerInfoFloating {
1964 : AAPointerInfoFloating(IRP,
A) {}
1968 using namespace AA::PointerInfo;
1972 if (
auto *
MI = dyn_cast_or_null<MemIntrinsic>(getCtxI())) {
1976 LengthVal =
Length->getSExtValue();
1977 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
1980 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled memory intrinsic "
1982 return indicatePessimisticFixpoint();
1985 ArgNo == 0 ? AccessKind::AK_MUST_WRITE : AccessKind::AK_MUST_READ;
1987 Changed | addAccess(
A, {0, LengthVal}, *
MI,
nullptr,
Kind,
nullptr);
1990 dbgs() <<
"Accesses by bin after update:\n";
2001 Argument *Arg = getAssociatedArgument();
2006 if (ArgAA && ArgAA->getState().isValidState())
2007 return translateAndAddStateFromCallee(
A, *ArgAA,
2008 *cast<CallBase>(getCtxI()));
2010 return indicatePessimisticFixpoint();
2013 bool IsKnownNoCapture;
2014 if (!AA::hasAssumedIRAttr<Attribute::NoCapture>(
2015 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNoCapture))
2016 return indicatePessimisticFixpoint();
2018 bool IsKnown =
false;
2020 return ChangeStatus::UNCHANGED;
2023 ReadOnly ? AccessKind::AK_MAY_READ : AccessKind::AK_MAY_READ_WRITE;
2029 void trackStatistics()
const override {
2030 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2034struct AAPointerInfoCallSiteReturned final : AAPointerInfoFloating {
2036 : AAPointerInfoFloating(IRP,
A) {}
2039 void trackStatistics()
const override {
2040 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2054 assert(!AA::hasAssumedIRAttr<Attribute::NoUnwind>(
2055 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2059 const std::string getAsStr(
Attributor *
A)
const override {
2060 return getAssumed() ?
"nounwind" :
"may-unwind";
2066 (
unsigned)Instruction::Invoke, (
unsigned)Instruction::CallBr,
2067 (
unsigned)Instruction::Call, (
unsigned)Instruction::CleanupRet,
2068 (
unsigned)Instruction::CatchSwitch, (
unsigned)Instruction::Resume};
2071 if (!
I.mayThrow(
true))
2074 if (
const auto *CB = dyn_cast<CallBase>(&
I)) {
2075 bool IsKnownNoUnwind;
2076 return AA::hasAssumedIRAttr<Attribute::NoUnwind>(
2083 bool UsedAssumedInformation =
false;
2084 if (!
A.checkForAllInstructions(CheckForNoUnwind, *
this, Opcodes,
2085 UsedAssumedInformation))
2086 return indicatePessimisticFixpoint();
2088 return ChangeStatus::UNCHANGED;
2092struct AANoUnwindFunction final :
public AANoUnwindImpl {
2094 : AANoUnwindImpl(IRP,
A) {}
2101struct AANoUnwindCallSite final
2102 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl> {
2104 : AACalleeToCallSite<
AANoUnwind, AANoUnwindImpl>(IRP,
A) {}
2115 case Intrinsic::nvvm_barrier0:
2116 case Intrinsic::nvvm_barrier0_and:
2117 case Intrinsic::nvvm_barrier0_or:
2118 case Intrinsic::nvvm_barrier0_popc:
2120 case Intrinsic::amdgcn_s_barrier:
2121 if (ExecutedAligned)
2134 if (
auto *FI = dyn_cast<FenceInst>(
I))
2137 if (
auto *AI = dyn_cast<AtomicCmpXchgInst>(
I)) {
2144 switch (
I->getOpcode()) {
2145 case Instruction::AtomicRMW:
2146 Ordering = cast<AtomicRMWInst>(
I)->getOrdering();
2148 case Instruction::Store:
2149 Ordering = cast<StoreInst>(
I)->getOrdering();
2151 case Instruction::Load:
2152 Ordering = cast<LoadInst>(
I)->getOrdering();
2156 "New atomic operations need to be known in the attributor.");
2167 if (
auto *
MI = dyn_cast<MemIntrinsic>(
I))
2168 return !
MI->isVolatile();
2179 assert(!AA::hasAssumedIRAttr<Attribute::NoSync>(
A,
nullptr, getIRPosition(),
2180 DepClassTy::NONE, IsKnown));
2184 const std::string getAsStr(
Attributor *
A)
const override {
2185 return getAssumed() ?
"nosync" :
"may-sync";
2201 if (
I.mayReadOrWriteMemory())
2206 if (AA::hasAssumedIRAttr<Attribute::NoSync>(
2215 bool UsedAssumedInformation =
false;
2216 if (!
A.checkForAllReadWriteInstructions(CheckRWInstForNoSync, *
this,
2217 UsedAssumedInformation) ||
2218 !
A.checkForAllCallLikeInstructions(CheckForNoSync, *
this,
2219 UsedAssumedInformation))
2220 return indicatePessimisticFixpoint();
2225struct AANoSyncFunction final :
public AANoSyncImpl {
2227 : AANoSyncImpl(IRP,
A) {}
2234struct AANoSyncCallSite final : AACalleeToCallSite<AANoSync, AANoSyncImpl> {
2236 : AACalleeToCallSite<
AANoSync, AANoSyncImpl>(IRP,
A) {}
2246struct AANoFreeImpl :
public AANoFree {
2252 assert(!AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
nullptr, getIRPosition(),
2253 DepClassTy::NONE, IsKnown));
2261 return AA::hasAssumedIRAttr<Attribute::NoFree>(
2263 DepClassTy::REQUIRED, IsKnown);
2266 bool UsedAssumedInformation =
false;
2267 if (!
A.checkForAllCallLikeInstructions(CheckForNoFree, *
this,
2268 UsedAssumedInformation))
2269 return indicatePessimisticFixpoint();
2270 return ChangeStatus::UNCHANGED;
2274 const std::string getAsStr(
Attributor *
A)
const override {
2275 return getAssumed() ?
"nofree" :
"may-free";
2279struct AANoFreeFunction final :
public AANoFreeImpl {
2281 : AANoFreeImpl(IRP,
A) {}
2288struct AANoFreeCallSite final : AACalleeToCallSite<AANoFree, AANoFreeImpl> {
2290 : AACalleeToCallSite<
AANoFree, AANoFreeImpl>(IRP,
A) {}
2297struct AANoFreeFloating : AANoFreeImpl {
2299 : AANoFreeImpl(IRP,
A) {}
2309 if (AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
this,
2311 DepClassTy::OPTIONAL, IsKnown))
2312 return ChangeStatus::UNCHANGED;
2314 Value &AssociatedValue = getIRPosition().getAssociatedValue();
2315 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
2317 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
2325 return AA::hasAssumedIRAttr<Attribute::NoFree>(
2327 DepClassTy::REQUIRED, IsKnown);
2330 if (isa<GetElementPtrInst>(UserI) || isa<BitCastInst>(UserI) ||
2331 isa<PHINode>(UserI) || isa<SelectInst>(UserI)) {
2335 if (isa<StoreInst>(UserI) || isa<LoadInst>(UserI) ||
2336 isa<ReturnInst>(UserI))
2342 if (!
A.checkForAllUses(Pred, *
this, AssociatedValue))
2343 return indicatePessimisticFixpoint();
2345 return ChangeStatus::UNCHANGED;
2350struct AANoFreeArgument final : AANoFreeFloating {
2352 : AANoFreeFloating(IRP,
A) {}
2359struct AANoFreeCallSiteArgument final : AANoFreeFloating {
2361 : AANoFreeFloating(IRP,
A) {}
2369 Argument *Arg = getAssociatedArgument();
2371 return indicatePessimisticFixpoint();
2374 if (AA::hasAssumedIRAttr<Attribute::NoFree>(
A,
this, ArgPos,
2375 DepClassTy::REQUIRED, IsKnown))
2376 return ChangeStatus::UNCHANGED;
2377 return indicatePessimisticFixpoint();
2385struct AANoFreeReturned final : AANoFreeFloating {
2387 : AANoFreeFloating(IRP,
A) {
2402 void trackStatistics()
const override {}
2406struct AANoFreeCallSiteReturned final : AANoFreeFloating {
2408 : AANoFreeFloating(IRP,
A) {}
2411 return ChangeStatus::UNCHANGED;
2422 bool IgnoreSubsumingPositions) {
2424 AttrKinds.
push_back(Attribute::NonNull);
2427 AttrKinds.
push_back(Attribute::Dereferenceable);
2428 if (
A.hasAttr(IRP, AttrKinds, IgnoreSubsumingPositions, Attribute::NonNull))
2435 if (!Fn->isDeclaration()) {
2445 bool UsedAssumedInformation =
false;
2446 if (!
A.checkForAllInstructions(
2448 Worklist.push_back({*cast<ReturnInst>(I).getReturnValue(), &I});
2452 UsedAssumedInformation,
false,
true))
2464 Attribute::NonNull)});
2469static int64_t getKnownNonNullAndDerefBytesForUse(
2471 const Use *U,
const Instruction *
I,
bool &IsNonNull,
bool &TrackUse) {
2474 const Value *UseV =
U->get();
2481 if (isa<CastInst>(
I)) {
2486 if (isa<GetElementPtrInst>(
I)) {
2496 if (
const auto *CB = dyn_cast<CallBase>(
I)) {
2499 U, {Attribute::NonNull, Attribute::Dereferenceable})) {
2516 bool IsKnownNonNull;
2517 AA::hasAssumedIRAttr<Attribute::NonNull>(
A, &QueryingAA, IRP,
2519 IsNonNull |= IsKnownNonNull;
2526 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
2527 Loc->Size.isScalable() ||
I->isVolatile())
2533 if (
Base &&
Base == &AssociatedValue) {
2534 int64_t DerefBytes = Loc->Size.getValue() +
Offset;
2536 return std::max(int64_t(0), DerefBytes);
2543 int64_t DerefBytes = Loc->Size.getValue();
2545 return std::max(int64_t(0), DerefBytes);
2556 Value &
V = *getAssociatedValue().stripPointerCasts();
2557 if (isa<ConstantPointerNull>(V)) {
2558 indicatePessimisticFixpoint();
2563 followUsesInMBEC(*
this,
A, getState(), *CtxI);
2569 bool IsNonNull =
false;
2570 bool TrackUse =
false;
2571 getKnownNonNullAndDerefBytesForUse(
A, *
this, getAssociatedValue(), U,
I,
2572 IsNonNull, TrackUse);
2573 State.setKnown(IsNonNull);
2578 const std::string getAsStr(
Attributor *
A)
const override {
2579 return getAssumed() ?
"nonnull" :
"may-null";
2584struct AANonNullFloating :
public AANonNullImpl {
2586 : AANonNullImpl(IRP,
A) {}
2591 bool IsKnownNonNull;
2592 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2593 A, *
this, IRP, DepClassTy::OPTIONAL, IsKnownNonNull);
2597 bool UsedAssumedInformation =
false;
2598 Value *AssociatedValue = &getAssociatedValue();
2600 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
2605 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
2609 if (
auto *
PHI = dyn_cast<PHINode>(AssociatedValue))
2611 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2612 A, this, IRPosition::value(*Op), DepClassTy::OPTIONAL,
2615 return ChangeStatus::UNCHANGED;
2616 if (
auto *
Select = dyn_cast<SelectInst>(AssociatedValue))
2617 if (AA::hasAssumedIRAttr<Attribute::NonNull>(
2619 DepClassTy::OPTIONAL, IsKnown) &&
2620 AA::hasAssumedIRAttr<Attribute::NonNull>(
2622 DepClassTy::OPTIONAL, IsKnown))
2623 return ChangeStatus::UNCHANGED;
2630 if (AVIRP == getIRPosition() || !CheckIRP(AVIRP))
2631 return indicatePessimisticFixpoint();
2632 return ChangeStatus::UNCHANGED;
2635 for (
const auto &VAC : Values)
2637 return indicatePessimisticFixpoint();
2639 return ChangeStatus::UNCHANGED;
2647struct AANonNullReturned final
2648 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2649 false, AANonNull::IRAttributeKind, false> {
2656 const std::string getAsStr(
Attributor *
A)
const override {
2657 return getAssumed() ?
"nonnull" :
"may-null";
2665struct AANonNullArgument final
2666 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl> {
2668 : AAArgumentFromCallSiteArguments<
AANonNull, AANonNullImpl>(IRP,
A) {}
2674struct AANonNullCallSiteArgument final : AANonNullFloating {
2676 : AANonNullFloating(IRP,
A) {}
2683struct AANonNullCallSiteReturned final
2684 : AACalleeToCallSite<AANonNull, AANonNullImpl> {
2686 : AACalleeToCallSite<
AANonNull, AANonNullImpl>(IRP,
A) {}
2702 assert(!AA::hasAssumedIRAttr<Attribute::MustProgress>(
2703 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2708 const std::string getAsStr(
Attributor *
A)
const override {
2709 return getAssumed() ?
"mustprogress" :
"may-not-progress";
2713struct AAMustProgressFunction final : AAMustProgressImpl {
2715 : AAMustProgressImpl(IRP,
A) {}
2720 if (AA::hasAssumedIRAttr<Attribute::WillReturn>(
2721 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnown)) {
2723 return indicateOptimisticFixpoint();
2724 return ChangeStatus::UNCHANGED;
2729 bool IsKnownMustProgress;
2730 return AA::hasAssumedIRAttr<Attribute::MustProgress>(
2731 A,
this, IPos, DepClassTy::REQUIRED, IsKnownMustProgress,
2735 bool AllCallSitesKnown =
true;
2736 if (!
A.checkForAllCallSites(CheckForMustProgress, *
this,
2739 return indicatePessimisticFixpoint();
2741 return ChangeStatus::UNCHANGED;
2745 void trackStatistics()
const override {
2751struct AAMustProgressCallSite final : AAMustProgressImpl {
2753 : AAMustProgressImpl(IRP,
A) {}
2762 bool IsKnownMustProgress;
2763 if (!AA::hasAssumedIRAttr<Attribute::MustProgress>(
2764 A,
this, FnPos, DepClassTy::REQUIRED, IsKnownMustProgress))
2765 return indicatePessimisticFixpoint();
2766 return ChangeStatus::UNCHANGED;
2770 void trackStatistics()
const override {
2785 assert(!AA::hasAssumedIRAttr<Attribute::NoRecurse>(
2786 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2791 const std::string getAsStr(
Attributor *
A)
const override {
2792 return getAssumed() ?
"norecurse" :
"may-recurse";
2796struct AANoRecurseFunction final : AANoRecurseImpl {
2798 : AANoRecurseImpl(IRP,
A) {}
2805 bool IsKnownNoRecurse;
2806 if (!AA::hasAssumedIRAttr<Attribute::NoRecurse>(
2809 DepClassTy::NONE, IsKnownNoRecurse))
2811 return IsKnownNoRecurse;
2813 bool UsedAssumedInformation =
false;
2814 if (
A.checkForAllCallSites(CallSitePred, *
this,
true,
2815 UsedAssumedInformation)) {
2821 if (!UsedAssumedInformation)
2822 indicateOptimisticFixpoint();
2823 return ChangeStatus::UNCHANGED;
2828 DepClassTy::REQUIRED);
2829 if (EdgeReachability && EdgeReachability->
canReach(
A, *getAnchorScope()))
2830 return indicatePessimisticFixpoint();
2831 return ChangeStatus::UNCHANGED;
2838struct AANoRecurseCallSite final
2839 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl> {
2841 : AACalleeToCallSite<
AANoRecurse, AANoRecurseImpl>(IRP,
A) {}
2856 const std::string getAsStr(
Attributor *
A)
const override {
2857 return getAssumed() ?
"non-convergent" :
"may-be-convergent";
2861struct AANonConvergentFunction final : AANonConvergentImpl {
2863 : AANonConvergentImpl(IRP,
A) {}
2869 auto CalleeIsNotConvergent = [&](
Instruction &Inst) {
2870 CallBase &CB = cast<CallBase>(Inst);
2872 if (!Callee ||
Callee->isIntrinsic()) {
2875 if (
Callee->isDeclaration()) {
2876 return !
Callee->hasFnAttribute(Attribute::Convergent);
2883 bool UsedAssumedInformation =
false;
2884 if (!
A.checkForAllCallLikeInstructions(CalleeIsNotConvergent, *
this,
2885 UsedAssumedInformation)) {
2886 return indicatePessimisticFixpoint();
2888 return ChangeStatus::UNCHANGED;
2892 if (isKnownNotConvergent() &&
2893 A.hasAttr(getIRPosition(), Attribute::Convergent)) {
2894 A.removeAttrs(getIRPosition(), {Attribute::Convergent});
2895 return ChangeStatus::CHANGED;
2897 return ChangeStatus::UNCHANGED;
2914 const size_t UBPrevSize = KnownUBInsts.size();
2915 const size_t NoUBPrevSize = AssumedNoUBInsts.size();
2919 if (
I.isVolatile() &&
I.mayWriteToMemory())
2923 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2932 "Expected pointer operand of memory accessing instruction");
2936 std::optional<Value *> SimplifiedPtrOp =
2937 stopOnUndefOrAssumed(
A, PtrOp, &
I);
2938 if (!SimplifiedPtrOp || !*SimplifiedPtrOp)
2940 const Value *PtrOpVal = *SimplifiedPtrOp;
2945 if (!isa<ConstantPointerNull>(PtrOpVal)) {
2946 AssumedNoUBInsts.insert(&
I);
2958 AssumedNoUBInsts.insert(&
I);
2960 KnownUBInsts.insert(&
I);
2969 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2973 auto *BrInst = cast<BranchInst>(&
I);
2976 if (BrInst->isUnconditional())
2981 std::optional<Value *> SimplifiedCond =
2982 stopOnUndefOrAssumed(
A, BrInst->getCondition(), BrInst);
2983 if (!SimplifiedCond || !*SimplifiedCond)
2985 AssumedNoUBInsts.insert(&
I);
2993 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3002 for (
unsigned idx = 0; idx < CB.
arg_size(); idx++) {
3008 if (idx >=
Callee->arg_size())
3020 bool IsKnownNoUndef;
3021 AA::hasAssumedIRAttr<Attribute::NoUndef>(
3022 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNoUndef);
3023 if (!IsKnownNoUndef)
3025 bool UsedAssumedInformation =
false;
3026 std::optional<Value *> SimplifiedVal =
3029 if (UsedAssumedInformation)
3031 if (SimplifiedVal && !*SimplifiedVal)
3033 if (!SimplifiedVal || isa<UndefValue>(**SimplifiedVal)) {
3034 KnownUBInsts.insert(&
I);
3038 !isa<ConstantPointerNull>(**SimplifiedVal))
3040 bool IsKnownNonNull;
3041 AA::hasAssumedIRAttr<Attribute::NonNull>(
3042 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNonNull);
3044 KnownUBInsts.insert(&
I);
3050 auto &RI = cast<ReturnInst>(
I);
3053 std::optional<Value *> SimplifiedRetValue =
3054 stopOnUndefOrAssumed(
A, RI.getReturnValue(), &
I);
3055 if (!SimplifiedRetValue || !*SimplifiedRetValue)
3072 if (isa<ConstantPointerNull>(*SimplifiedRetValue)) {
3073 bool IsKnownNonNull;
3074 AA::hasAssumedIRAttr<Attribute::NonNull>(
3078 KnownUBInsts.insert(&
I);
3084 bool UsedAssumedInformation =
false;
3085 A.checkForAllInstructions(InspectMemAccessInstForUB, *
this,
3086 {Instruction::Load, Instruction::Store,
3087 Instruction::AtomicCmpXchg,
3088 Instruction::AtomicRMW},
3089 UsedAssumedInformation,
3091 A.checkForAllInstructions(InspectBrInstForUB, *
this, {Instruction::Br},
3092 UsedAssumedInformation,
3094 A.checkForAllCallLikeInstructions(InspectCallSiteForUB, *
this,
3095 UsedAssumedInformation);
3099 if (!getAnchorScope()->getReturnType()->isVoidTy()) {
3101 if (!
A.isAssumedDead(ReturnIRP,
this,
nullptr, UsedAssumedInformation)) {
3102 bool IsKnownNoUndef;
3103 AA::hasAssumedIRAttr<Attribute::NoUndef>(
3104 A,
this, ReturnIRP, DepClassTy::NONE, IsKnownNoUndef);
3106 A.checkForAllInstructions(InspectReturnInstForUB, *
this,
3107 {Instruction::Ret}, UsedAssumedInformation,
3112 if (NoUBPrevSize != AssumedNoUBInsts.size() ||
3113 UBPrevSize != KnownUBInsts.size())
3114 return ChangeStatus::CHANGED;
3115 return ChangeStatus::UNCHANGED;
3119 return KnownUBInsts.count(
I);
3122 bool isAssumedToCauseUB(
Instruction *
I)
const override {
3129 switch (
I->getOpcode()) {
3130 case Instruction::Load:
3131 case Instruction::Store:
3132 case Instruction::AtomicCmpXchg:
3133 case Instruction::AtomicRMW:
3134 return !AssumedNoUBInsts.count(
I);
3135 case Instruction::Br: {
3136 auto *BrInst = cast<BranchInst>(
I);
3137 if (BrInst->isUnconditional())
3139 return !AssumedNoUBInsts.count(
I);
3148 if (KnownUBInsts.empty())
3149 return ChangeStatus::UNCHANGED;
3151 A.changeToUnreachableAfterManifest(
I);
3152 return ChangeStatus::CHANGED;
3156 const std::string getAsStr(
Attributor *
A)
const override {
3157 return getAssumed() ?
"undefined-behavior" :
"no-ub";
3202 bool UsedAssumedInformation =
false;
3203 std::optional<Value *> SimplifiedV =
3206 if (!UsedAssumedInformation) {
3211 KnownUBInsts.insert(
I);
3212 return std::nullopt;
3218 if (isa<UndefValue>(V)) {
3219 KnownUBInsts.insert(
I);
3220 return std::nullopt;
3226struct AAUndefinedBehaviorFunction final : AAUndefinedBehaviorImpl {
3228 : AAUndefinedBehaviorImpl(IRP,
A) {}
3231 void trackStatistics()
const override {
3233 "Number of instructions known to have UB");
3235 KnownUBInsts.size();
3256 if (SCCI.hasCycle())
3266 for (
auto *L : LI->getLoopsInPreorder()) {
3280 assert(!AA::hasAssumedIRAttr<Attribute::WillReturn>(
3281 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
3286 bool isImpliedByMustprogressAndReadonly(
Attributor &
A,
bool KnownOnly) {
3287 if (!
A.hasAttr(getIRPosition(), {Attribute::MustProgress}))
3292 return IsKnown || !KnownOnly;
3298 if (isImpliedByMustprogressAndReadonly(
A,
false))
3299 return ChangeStatus::UNCHANGED;
3304 if (AA::hasAssumedIRAttr<Attribute::WillReturn>(
3305 A,
this, IPos, DepClassTy::REQUIRED, IsKnown)) {
3311 bool IsKnownNoRecurse;
3312 return AA::hasAssumedIRAttr<Attribute::NoRecurse>(
3313 A,
this, IPos, DepClassTy::REQUIRED, IsKnownNoRecurse);
3316 bool UsedAssumedInformation =
false;
3317 if (!
A.checkForAllCallLikeInstructions(CheckForWillReturn, *
this,
3318 UsedAssumedInformation))
3319 return indicatePessimisticFixpoint();
3321 return ChangeStatus::UNCHANGED;
3325 const std::string getAsStr(
Attributor *
A)
const override {
3326 return getAssumed() ?
"willreturn" :
"may-noreturn";
3330struct AAWillReturnFunction final : AAWillReturnImpl {
3332 : AAWillReturnImpl(IRP,
A) {}
3336 AAWillReturnImpl::initialize(
A);
3339 assert(
F &&
"Did expect an anchor function");
3340 if (
F->isDeclaration() || mayContainUnboundedCycle(*
F,
A))
3341 indicatePessimisticFixpoint();
3349struct AAWillReturnCallSite final
3350 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl> {
3352 : AACalleeToCallSite<
AAWillReturn, AAWillReturnImpl>(IRP,
A) {}
3356 if (isImpliedByMustprogressAndReadonly(
A,
false))
3357 return ChangeStatus::UNCHANGED;
3359 return AACalleeToCallSite::updateImpl(
A);
3381 const ToTy *To =
nullptr;
3391 assert(Hash == 0 &&
"Computed hash twice!");
3395 detail::combineHashValue(PairDMI ::getHashValue({
From, To}),
3396 InstSetDMI::getHashValue(ExclusionSet));
3406 :
From(&
From), To(&To), ExclusionSet(ES) {
3408 if (!ES || ES->
empty()) {
3409 ExclusionSet =
nullptr;
3410 }
else if (MakeUnique) {
3411 ExclusionSet =
A.getInfoCache().getOrCreateUniqueBlockExecutionSet(ES);
3416 :
From(RQI.
From), To(RQI.To), ExclusionSet(RQI.ExclusionSet) {}
3429 return &TombstoneKey;
3436 if (!PairDMI::isEqual({
LHS->From,
LHS->To}, {
RHS->From,
RHS->To}))
3438 return InstSetDMI::isEqual(
LHS->ExclusionSet,
RHS->ExclusionSet);
3442#define DefineKeys(ToTy) \
3444 ReachabilityQueryInfo<ToTy> \
3445 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::EmptyKey = \
3446 ReachabilityQueryInfo<ToTy>( \
3447 DenseMapInfo<const Instruction *>::getEmptyKey(), \
3448 DenseMapInfo<const ToTy *>::getEmptyKey()); \
3450 ReachabilityQueryInfo<ToTy> \
3451 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::TombstoneKey = \
3452 ReachabilityQueryInfo<ToTy>( \
3453 DenseMapInfo<const Instruction *>::getTombstoneKey(), \
3454 DenseMapInfo<const ToTy *>::getTombstoneKey());
3463template <
typename BaseTy,
typename ToTy>
3464struct CachedReachabilityAA :
public BaseTy {
3470 bool isQueryAA()
const override {
return true; }
3475 for (
unsigned u = 0,
e = QueryVector.size();
u <
e; ++
u) {
3476 RQITy *RQI = QueryVector[
u];
3477 if (RQI->Result == RQITy::Reachable::No &&
3479 Changed = ChangeStatus::CHANGED;
3485 bool IsTemporaryRQI) = 0;
3488 RQITy &RQI,
bool UsedExclusionSet,
bool IsTemporaryRQI) {
3493 QueryCache.erase(&RQI);
3499 if (
Result == RQITy::Reachable::Yes || !UsedExclusionSet) {
3500 RQITy PlainRQI(RQI.From, RQI.To);
3501 if (!QueryCache.count(&PlainRQI)) {
3502 RQITy *RQIPtr =
new (
A.Allocator) RQITy(RQI.From, RQI.To);
3504 QueryVector.push_back(RQIPtr);
3505 QueryCache.insert(RQIPtr);
3510 if (IsTemporaryRQI &&
Result != RQITy::Reachable::Yes && UsedExclusionSet) {
3511 assert((!RQI.ExclusionSet || !RQI.ExclusionSet->empty()) &&
3512 "Did not expect empty set!");
3513 RQITy *RQIPtr =
new (
A.Allocator)
3514 RQITy(
A, *RQI.From, *RQI.To, RQI.ExclusionSet,
true);
3515 assert(RQIPtr->Result == RQITy::Reachable::No &&
"Already reachable?");
3517 assert(!QueryCache.count(RQIPtr));
3518 QueryVector.push_back(RQIPtr);
3519 QueryCache.insert(RQIPtr);
3522 if (
Result == RQITy::Reachable::No && IsTemporaryRQI)
3523 A.registerForUpdate(*
this);
3524 return Result == RQITy::Reachable::Yes;
3527 const std::string getAsStr(
Attributor *
A)
const override {
3529 return "#queries(" + std::to_string(QueryVector.size()) +
")";
3532 bool checkQueryCache(
Attributor &
A, RQITy &StackRQI,
3533 typename RQITy::Reachable &
Result) {
3534 if (!this->getState().isValidState()) {
3535 Result = RQITy::Reachable::Yes;
3541 if (StackRQI.ExclusionSet) {
3542 RQITy PlainRQI(StackRQI.From, StackRQI.To);
3543 auto It = QueryCache.find(&PlainRQI);
3544 if (It != QueryCache.end() && (*It)->Result == RQITy::Reachable::No) {
3545 Result = RQITy::Reachable::No;
3550 auto It = QueryCache.find(&StackRQI);
3551 if (It != QueryCache.end()) {
3558 QueryCache.insert(&StackRQI);
3567struct AAIntraFnReachabilityFunction final
3568 :
public CachedReachabilityAA<AAIntraFnReachability, Instruction> {
3569 using Base = CachedReachabilityAA<AAIntraFnReachability, Instruction>;
3576 bool isAssumedReachable(
3579 auto *NonConstThis =
const_cast<AAIntraFnReachabilityFunction *
>(
this);
3583 RQITy StackRQI(
A,
From, To, ExclusionSet,
false);
3584 typename RQITy::Reachable
Result;
3585 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
3586 return NonConstThis->isReachableImpl(
A, StackRQI,
3588 return Result == RQITy::Reachable::Yes;
3595 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3598 [&](
const auto &DeadEdge) {
3599 return LivenessAA->isEdgeDead(DeadEdge.first,
3603 return LivenessAA->isAssumedDead(BB);
3605 return ChangeStatus::UNCHANGED;
3609 return Base::updateImpl(
A);
3613 bool IsTemporaryRQI)
override {
3615 bool UsedExclusionSet =
false;
3620 while (IP && IP != &To) {
3621 if (ExclusionSet && IP != Origin && ExclusionSet->
count(IP)) {
3622 UsedExclusionSet =
true;
3633 "Not an intra-procedural query!");
3637 if (FromBB == ToBB &&
3638 WillReachInBlock(*RQI.From, *RQI.To, RQI.ExclusionSet))
3639 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3644 if (!WillReachInBlock(ToBB->
front(), *RQI.To, RQI.ExclusionSet))
3645 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3650 if (RQI.ExclusionSet)
3651 for (
auto *
I : *RQI.ExclusionSet)
3652 if (
I->getFunction() == Fn)
3653 ExclusionBlocks.
insert(
I->getParent());
3656 if (ExclusionBlocks.
count(FromBB) &&
3659 return rememberResult(
A, RQITy::Reachable::No, RQI,
true, IsTemporaryRQI);
3662 A.getAAFor<
AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3663 if (LivenessAA && LivenessAA->isAssumedDead(ToBB)) {
3664 DeadBlocks.insert(ToBB);
3665 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3674 while (!Worklist.
empty()) {
3676 if (!Visited.
insert(BB).second)
3679 if (LivenessAA && LivenessAA->isEdgeDead(BB, SuccBB)) {
3680 LocalDeadEdges.
insert({BB, SuccBB});
3685 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3688 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3691 if (ExclusionBlocks.
count(SuccBB)) {
3692 UsedExclusionSet =
true;
3699 DeadEdges.insert(LocalDeadEdges.
begin(), LocalDeadEdges.
end());
3700 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3705 void trackStatistics()
const override {}
3725 bool IgnoreSubsumingPositions) {
3726 assert(ImpliedAttributeKind == Attribute::NoAlias &&
3727 "Unexpected attribute kind");
3730 if (isa<AllocaInst>(Val))
3733 IgnoreSubsumingPositions =
true;
3736 if (isa<UndefValue>(Val))
3739 if (isa<ConstantPointerNull>(Val) &&
3744 if (
A.hasAttr(IRP, {Attribute::ByVal, Attribute::NoAlias},
3745 IgnoreSubsumingPositions, Attribute::NoAlias))
3755 "Noalias is a pointer attribute");
3758 const std::string getAsStr(
Attributor *
A)
const override {
3759 return getAssumed() ?
"noalias" :
"may-alias";
3764struct AANoAliasFloating final : AANoAliasImpl {
3766 : AANoAliasImpl(IRP,
A) {}
3771 return indicatePessimisticFixpoint();
3775 void trackStatistics()
const override {
3781struct AANoAliasArgument final
3782 : AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl> {
3783 using Base = AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl>;
3795 if (AA::hasAssumedIRAttr<Attribute::NoSync>(
3797 DepClassTy::OPTIONAL, IsKnownNoSycn))
3798 return Base::updateImpl(
A);
3803 return Base::updateImpl(
A);
3807 bool UsedAssumedInformation =
false;
3808 if (
A.checkForAllCallSites(
3810 true, UsedAssumedInformation))
3811 return Base::updateImpl(
A);
3819 return indicatePessimisticFixpoint();
3826struct AANoAliasCallSiteArgument final : AANoAliasImpl {
3828 : AANoAliasImpl(IRP,
A) {}
3834 const CallBase &CB,
unsigned OtherArgNo) {
3836 if (this->getCalleeArgNo() == (
int)OtherArgNo)
3848 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadNone()) {
3849 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3856 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadOnly() &&
3858 A.recordDependence(MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3859 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3865 AAR =
A.getInfoCache().getAnalysisResultForFunction<
AAManager>(
3869 bool IsAliasing = !AAR || !AAR->
isNoAlias(&getAssociatedValue(), ArgOp);
3871 "callsite arguments: "
3872 << getAssociatedValue() <<
" " << *ArgOp <<
" => "
3873 << (IsAliasing ?
"" :
"no-") <<
"alias \n");
3878 bool isKnownNoAliasDueToNoAliasPreservation(
3898 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
3909 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
3914 bool IsKnownNoCapture;
3915 if (AA::hasAssumedIRAttr<Attribute::NoCapture>(
3917 DepClassTy::OPTIONAL, IsKnownNoCapture))
3923 A, *UserI, *getCtxI(), *
this,
nullptr,
3924 [ScopeFn](
const Function &Fn) {
return &Fn != ScopeFn; }))
3932 case UseCaptureKind::NO_CAPTURE:
3934 case UseCaptureKind::MAY_CAPTURE:
3938 case UseCaptureKind::PASSTHROUGH:
3945 bool IsKnownNoCapture;
3947 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
3948 A,
this, VIRP, DepClassTy::NONE, IsKnownNoCapture,
false, &NoCaptureAA);
3949 if (!IsAssumedNoCapture &&
3951 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue())) {
3953 dbgs() <<
"[AANoAliasCSArg] " << getAssociatedValue()
3954 <<
" cannot be noalias as it is potentially captured\n");
3959 A.recordDependence(*NoCaptureAA, *
this, DepClassTy::OPTIONAL);
3964 const auto &CB = cast<CallBase>(getAnchorValue());
3965 for (
unsigned OtherArgNo = 0; OtherArgNo < CB.
arg_size(); OtherArgNo++)
3966 if (mayAliasWithArgument(
A, AAR, MemBehaviorAA, CB, OtherArgNo))
3976 auto *MemBehaviorAA =
3979 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3980 return ChangeStatus::UNCHANGED;
3983 bool IsKnownNoAlias;
3985 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
3986 A,
this, VIRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
3988 <<
" is not no-alias at the definition\n");
3989 return indicatePessimisticFixpoint();
3993 if (MemBehaviorAA &&
3994 isKnownNoAliasDueToNoAliasPreservation(
A, AAR, *MemBehaviorAA)) {
3996 dbgs() <<
"[AANoAlias] No-Alias deduced via no-alias preservation\n");
3997 return ChangeStatus::UNCHANGED;
4000 return indicatePessimisticFixpoint();
4008struct AANoAliasReturned final : AANoAliasImpl {
4010 : AANoAliasImpl(IRP,
A) {}
4015 auto CheckReturnValue = [&](
Value &RV) ->
bool {
4016 if (
Constant *
C = dyn_cast<Constant>(&RV))
4017 if (
C->isNullValue() || isa<UndefValue>(
C))
4022 if (!isa<CallBase>(&RV))
4026 bool IsKnownNoAlias;
4027 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
4028 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoAlias))
4031 bool IsKnownNoCapture;
4033 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
4034 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
4036 return IsAssumedNoCapture ||
4040 if (!
A.checkForAllReturnedValues(CheckReturnValue, *
this))
4041 return indicatePessimisticFixpoint();
4043 return ChangeStatus::UNCHANGED;
4051struct AANoAliasCallSiteReturned final
4052 : AACalleeToCallSite<AANoAlias, AANoAliasImpl> {
4054 : AACalleeToCallSite<
AANoAlias, AANoAliasImpl>(IRP,
A) {}
4064struct AAIsDeadValueImpl :
public AAIsDead {
4068 bool isAssumedDead()
const override {
return isAssumed(IS_DEAD); }
4071 bool isKnownDead()
const override {
return isKnown(IS_DEAD); }
4074 bool isAssumedDead(
const BasicBlock *BB)
const override {
return false; }
4077 bool isKnownDead(
const BasicBlock *BB)
const override {
return false; }
4080 bool isAssumedDead(
const Instruction *
I)
const override {
4081 return I == getCtxI() && isAssumedDead();
4085 bool isKnownDead(
const Instruction *
I)
const override {
4086 return isAssumedDead(
I) && isKnownDead();
4090 const std::string getAsStr(
Attributor *
A)
const override {
4091 return isAssumedDead() ?
"assumed-dead" :
"assumed-live";
4097 if (
V.getType()->isVoidTy() ||
V.use_empty())
4101 if (!isa<Constant>(V)) {
4102 if (
auto *
I = dyn_cast<Instruction>(&V))
4103 if (!
A.isRunOn(*
I->getFunction()))
4105 bool UsedAssumedInformation =
false;
4106 std::optional<Constant *>
C =
4107 A.getAssumedConstant(V, *
this, UsedAssumedInformation);
4112 auto UsePred = [&](
const Use &
U,
bool &Follow) {
return false; };
4117 return A.checkForAllUses(UsePred, *
this, V,
false,
4118 DepClassTy::REQUIRED,
4127 auto *CB = dyn_cast<CallBase>(
I);
4128 if (!CB || isa<IntrinsicInst>(CB))
4133 bool IsKnownNoUnwind;
4134 if (!AA::hasAssumedIRAttr<Attribute::NoUnwind>(
4135 A,
this, CallIRP, DepClassTy::OPTIONAL, IsKnownNoUnwind))
4143struct AAIsDeadFloating :
public AAIsDeadValueImpl {
4145 : AAIsDeadValueImpl(IRP,
A) {}
4149 AAIsDeadValueImpl::initialize(
A);
4151 if (isa<UndefValue>(getAssociatedValue())) {
4152 indicatePessimisticFixpoint();
4156 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4157 if (!isAssumedSideEffectFree(
A,
I)) {
4158 if (!isa_and_nonnull<StoreInst>(
I) && !isa_and_nonnull<FenceInst>(
I))
4159 indicatePessimisticFixpoint();
4161 removeAssumedBits(HAS_NO_EFFECT);
4168 if (!ExecDomainAA || !ExecDomainAA->isNoOpFence(FI))
4170 A.recordDependence(*ExecDomainAA, *
this, DepClassTy::OPTIONAL);
4177 if (
SI.isVolatile())
4183 bool UsedAssumedInformation =
false;
4184 if (!AssumeOnlyInst) {
4185 PotentialCopies.clear();
4187 UsedAssumedInformation)) {
4190 <<
"[AAIsDead] Could not determine potential copies of store!\n");
4194 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Store has " << PotentialCopies.size()
4195 <<
" potential copies.\n");
4200 UsedAssumedInformation))
4202 if (
auto *LI = dyn_cast<LoadInst>(V)) {
4204 auto &UserI = cast<Instruction>(*U.getUser());
4205 if (InfoCache.isOnlyUsedByAssume(UserI)) {
4207 AssumeOnlyInst->insert(&UserI);
4210 return A.isAssumedDead(U,
this,
nullptr, UsedAssumedInformation);
4216 <<
" is assumed live!\n");
4222 const std::string getAsStr(
Attributor *
A)
const override {
4223 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4224 if (isa_and_nonnull<StoreInst>(
I))
4226 return "assumed-dead-store";
4227 if (isa_and_nonnull<FenceInst>(
I))
4229 return "assumed-dead-fence";
4230 return AAIsDeadValueImpl::getAsStr(
A);
4235 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
4236 if (
auto *SI = dyn_cast_or_null<StoreInst>(
I)) {
4237 if (!isDeadStore(
A, *SI))
4238 return indicatePessimisticFixpoint();
4239 }
else if (
auto *FI = dyn_cast_or_null<FenceInst>(
I)) {
4240 if (!isDeadFence(
A, *FI))
4241 return indicatePessimisticFixpoint();
4243 if (!isAssumedSideEffectFree(
A,
I))
4244 return indicatePessimisticFixpoint();
4245 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4246 return indicatePessimisticFixpoint();
4251 bool isRemovableStore()
const override {
4252 return isAssumed(IS_REMOVABLE) && isa<StoreInst>(&getAssociatedValue());
4257 Value &
V = getAssociatedValue();
4258 if (
auto *
I = dyn_cast<Instruction>(&V)) {
4263 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
4265 bool IsDead = isDeadStore(
A, *SI, &AssumeOnlyInst);
4268 A.deleteAfterManifest(*
I);
4269 for (
size_t i = 0; i < AssumeOnlyInst.
size(); ++i) {
4271 for (
auto *Usr : AOI->
users())
4272 AssumeOnlyInst.
insert(cast<Instruction>(Usr));
4273 A.deleteAfterManifest(*AOI);
4277 if (
auto *FI = dyn_cast<FenceInst>(
I)) {
4279 A.deleteAfterManifest(*FI);
4282 if (isAssumedSideEffectFree(
A,
I) && !isa<InvokeInst>(
I)) {
4283 A.deleteAfterManifest(*
I);
4291 void trackStatistics()
const override {
4300struct AAIsDeadArgument :
public AAIsDeadFloating {
4302 : AAIsDeadFloating(IRP,
A) {}
4306 Argument &Arg = *getAssociatedArgument();
4307 if (
A.isValidFunctionSignatureRewrite(Arg, {}))
4308 if (
A.registerFunctionSignatureRewrite(
4312 return ChangeStatus::CHANGED;
4314 return ChangeStatus::UNCHANGED;
4321struct AAIsDeadCallSiteArgument :
public AAIsDeadValueImpl {
4323 : AAIsDeadValueImpl(IRP,
A) {}
4327 AAIsDeadValueImpl::initialize(
A);
4328 if (isa<UndefValue>(getAssociatedValue()))
4329 indicatePessimisticFixpoint();
4338 Argument *Arg = getAssociatedArgument();
4340 return indicatePessimisticFixpoint();
4342 auto *ArgAA =
A.getAAFor<
AAIsDead>(*
this, ArgPos, DepClassTy::REQUIRED);
4344 return indicatePessimisticFixpoint();
4350 CallBase &CB = cast<CallBase>(getAnchorValue());
4352 assert(!isa<UndefValue>(
U.get()) &&
4353 "Expected undef values to be filtered out!");
4355 if (
A.changeUseAfterManifest(U, UV))
4356 return ChangeStatus::CHANGED;
4357 return ChangeStatus::UNCHANGED;
4364struct AAIsDeadCallSiteReturned :
public AAIsDeadFloating {
4366 : AAIsDeadFloating(IRP,
A) {}
4369 bool isAssumedDead()
const override {
4370 return AAIsDeadFloating::isAssumedDead() && IsAssumedSideEffectFree;
4375 AAIsDeadFloating::initialize(
A);
4376 if (isa<UndefValue>(getAssociatedValue())) {
4377 indicatePessimisticFixpoint();
4382 IsAssumedSideEffectFree = isAssumedSideEffectFree(
A, getCtxI());
4388 if (IsAssumedSideEffectFree && !isAssumedSideEffectFree(
A, getCtxI())) {
4389 IsAssumedSideEffectFree =
false;
4390 Changed = ChangeStatus::CHANGED;
4392 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4393 return indicatePessimisticFixpoint();
4398 void trackStatistics()
const override {
4399 if (IsAssumedSideEffectFree)
4406 const std::string getAsStr(
Attributor *
A)
const override {
4407 return isAssumedDead()
4409 : (getAssumed() ?
"assumed-dead-users" :
"assumed-live");
4413 bool IsAssumedSideEffectFree =
true;
4416struct AAIsDeadReturned :
public AAIsDeadValueImpl {
4418 : AAIsDeadValueImpl(IRP,
A) {}
4423 bool UsedAssumedInformation =
false;
4424 A.checkForAllInstructions([](
Instruction &) {
return true; }, *
this,
4425 {Instruction::Ret}, UsedAssumedInformation);
4428 if (ACS.isCallbackCall() || !ACS.getInstruction())
4430 return areAllUsesAssumedDead(
A, *ACS.getInstruction());
4433 if (!
A.checkForAllCallSites(PredForCallSite, *
this,
true,
4434 UsedAssumedInformation))
4435 return indicatePessimisticFixpoint();
4437 return ChangeStatus::UNCHANGED;
4443 bool AnyChange =
false;
4451 bool UsedAssumedInformation =
false;
4452 A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
4453 UsedAssumedInformation);
4454 return AnyChange ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
4461struct AAIsDeadFunction :
public AAIsDead {
4467 assert(
F &&
"Did expect an anchor function");
4468 if (!isAssumedDeadInternalFunction(
A)) {
4469 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4470 assumeLive(
A,
F->getEntryBlock());
4474 bool isAssumedDeadInternalFunction(
Attributor &
A) {
4475 if (!getAnchorScope()->hasLocalLinkage())
4477 bool UsedAssumedInformation =
false;
4479 true, UsedAssumedInformation);
4483 const std::string getAsStr(
Attributor *
A)
const override {
4484 return "Live[#BB " + std::to_string(AssumedLiveBlocks.size()) +
"/" +
4485 std::to_string(getAnchorScope()->
size()) +
"][#TBEP " +
4486 std::to_string(ToBeExploredFrom.size()) +
"][#KDE " +
4487 std::to_string(KnownDeadEnds.size()) +
"]";
4492 assert(getState().isValidState() &&
4493 "Attempted to manifest an invalid state!");
4498 if (AssumedLiveBlocks.empty()) {
4499 A.deleteAfterManifest(
F);
4500 return ChangeStatus::CHANGED;
4506 bool Invoke2CallAllowed = !mayCatchAsynchronousExceptions(
F);
4508 KnownDeadEnds.set_union(ToBeExploredFrom);
4509 for (
const Instruction *DeadEndI : KnownDeadEnds) {
4510 auto *CB = dyn_cast<CallBase>(DeadEndI);
4513 bool IsKnownNoReturn;
4514 bool MayReturn = !AA::hasAssumedIRAttr<Attribute::NoReturn>(
4517 if (MayReturn && (!Invoke2CallAllowed || !isa<InvokeInst>(CB)))
4520 if (
auto *
II = dyn_cast<InvokeInst>(DeadEndI))
4521 A.registerInvokeWithDeadSuccessor(
const_cast<InvokeInst &
>(*
II));
4523 A.changeToUnreachableAfterManifest(
4524 const_cast<Instruction *
>(DeadEndI->getNextNode()));
4525 HasChanged = ChangeStatus::CHANGED;
4530 if (!AssumedLiveBlocks.count(&BB)) {
4531 A.deleteAfterManifest(BB);
4533 HasChanged = ChangeStatus::CHANGED;
4543 assert(
From->getParent() == getAnchorScope() &&
4545 "Used AAIsDead of the wrong function");
4546 return isValidState() && !AssumedLiveEdges.count(std::make_pair(
From, To));
4550 void trackStatistics()
const override {}
4553 bool isAssumedDead()
const override {
return false; }
4556 bool isKnownDead()
const override {
return false; }
4559 bool isAssumedDead(
const BasicBlock *BB)
const override {
4561 "BB must be in the same anchor scope function.");
4565 return !AssumedLiveBlocks.count(BB);
4569 bool isKnownDead(
const BasicBlock *BB)
const override {
4570 return getKnown() && isAssumedDead(BB);
4574 bool isAssumedDead(
const Instruction *
I)
const override {
4575 assert(
I->getParent()->getParent() == getAnchorScope() &&
4576 "Instruction must be in the same anchor scope function.");
4583 if (!AssumedLiveBlocks.count(
I->getParent()))
4589 if (KnownDeadEnds.count(PrevI) || ToBeExploredFrom.count(PrevI))
4597 bool isKnownDead(
const Instruction *
I)
const override {
4598 return getKnown() && isAssumedDead(
I);
4604 if (!AssumedLiveBlocks.insert(&BB).second)
4612 if (
const auto *CB = dyn_cast<CallBase>(&
I))
4614 if (
F->hasLocalLinkage())
4615 A.markLiveInternalFunction(*
F);
4639 bool IsKnownNoReturn;
4640 if (AA::hasAssumedIRAttr<Attribute::NoReturn>(
4642 return !IsKnownNoReturn;
4654 bool UsedAssumedInformation =
4655 identifyAliveSuccessors(
A, cast<CallBase>(
II), AA, AliveSuccessors);
4660 if (AAIsDeadFunction::mayCatchAsynchronousExceptions(*
II.getFunction())) {
4661 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4665 bool IsKnownNoUnwind;
4666 if (AA::hasAssumedIRAttr<Attribute::NoUnwind>(
4668 UsedAssumedInformation |= !IsKnownNoUnwind;
4670 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4673 return UsedAssumedInformation;
4680 bool UsedAssumedInformation =
false;
4684 std::optional<Constant *>
C =
4685 A.getAssumedConstant(*BI.
getCondition(), AA, UsedAssumedInformation);
4686 if (!
C || isa_and_nonnull<UndefValue>(*
C)) {
4688 }
else if (isa_and_nonnull<ConstantInt>(*
C)) {
4690 BI.
getSuccessor(1 - cast<ConstantInt>(*C)->getValue().getZExtValue());
4695 UsedAssumedInformation =
false;
4698 return UsedAssumedInformation;
4705 bool UsedAssumedInformation =
false;
4709 UsedAssumedInformation)) {
4716 if (Values.
empty() ||
4717 (Values.
size() == 1 &&
4718 isa_and_nonnull<UndefValue>(Values.
front().getValue()))) {
4720 return UsedAssumedInformation;
4723 Type &Ty = *
SI.getCondition()->getType();
4725 auto CheckForConstantInt = [&](
Value *
V) {
4726 if (
auto *CI = dyn_cast_if_present<ConstantInt>(
AA::getWithType(*V, Ty))) {
4734 return CheckForConstantInt(
VAC.getValue());
4738 return UsedAssumedInformation;
4741 unsigned MatchedCases = 0;
4742 for (
const auto &CaseIt :
SI.cases()) {
4743 if (
Constants.count(CaseIt.getCaseValue())) {
4745 AliveSuccessors.
push_back(&CaseIt.getCaseSuccessor()->front());
4752 AliveSuccessors.
push_back(&
SI.getDefaultDest()->front());
4753 return UsedAssumedInformation;
4759 if (AssumedLiveBlocks.empty()) {
4760 if (isAssumedDeadInternalFunction(
A))
4764 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4765 assumeLive(
A,
F->getEntryBlock());
4769 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Live [" << AssumedLiveBlocks.size() <<
"/"
4770 << getAnchorScope()->
size() <<
"] BBs and "
4771 << ToBeExploredFrom.size() <<
" exploration points and "
4772 << KnownDeadEnds.size() <<
" known dead ends\n");
4777 ToBeExploredFrom.end());
4778 decltype(ToBeExploredFrom) NewToBeExploredFrom;
4781 while (!Worklist.
empty()) {
4787 while (!
I->isTerminator() && !isa<CallBase>(
I))
4788 I =
I->getNextNode();
4790 AliveSuccessors.
clear();
4792 bool UsedAssumedInformation =
false;
4793 switch (
I->getOpcode()) {
4797 "Expected non-terminators to be handled already!");
4801 case Instruction::Call:
4802 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<CallInst>(*
I),
4803 *
this, AliveSuccessors);
4805 case Instruction::Invoke:
4806 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<InvokeInst>(*
I),
4807 *
this, AliveSuccessors);
4809 case Instruction::Br:
4810 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<BranchInst>(*
I),
4811 *
this, AliveSuccessors);
4813 case Instruction::Switch:
4814 UsedAssumedInformation = identifyAliveSuccessors(
A, cast<SwitchInst>(*
I),
4815 *
this, AliveSuccessors);
4819 if (UsedAssumedInformation) {
4820 NewToBeExploredFrom.insert(
I);
4821 }
else if (AliveSuccessors.
empty() ||
4822 (
I->isTerminator() &&
4823 AliveSuccessors.
size() <
I->getNumSuccessors())) {
4824 if (KnownDeadEnds.insert(
I))
4829 << AliveSuccessors.
size() <<
" UsedAssumedInformation: "
4830 << UsedAssumedInformation <<
"\n");
4832 for (
const Instruction *AliveSuccessor : AliveSuccessors) {
4833 if (!
I->isTerminator()) {
4834 assert(AliveSuccessors.size() == 1 &&
4835 "Non-terminator expected to have a single successor!");
4839 auto Edge = std::make_pair(
I->getParent(), AliveSuccessor->getParent());
4840 if (AssumedLiveEdges.insert(Edge).second)
4842 if (assumeLive(
A, *AliveSuccessor->getParent()))
4849 if (NewToBeExploredFrom.size() != ToBeExploredFrom.size() ||
4851 return !ToBeExploredFrom.count(I);
4854 ToBeExploredFrom = std::move(NewToBeExploredFrom);
4863 if (ToBeExploredFrom.empty() &&
4864 getAnchorScope()->
size() == AssumedLiveBlocks.size() &&
4866 return DeadEndI->isTerminator() && DeadEndI->getNumSuccessors() == 0;
4868 return indicatePessimisticFixpoint();
4873struct AAIsDeadCallSite final : AAIsDeadFunction {
4875 : AAIsDeadFunction(IRP,
A) {}
4884 "supported for call sites yet!");
4889 return indicatePessimisticFixpoint();
4893 void trackStatistics()
const override {}
4907 Value &
V = *getAssociatedValue().stripPointerCasts();
4909 A.getAttrs(getIRPosition(),
4910 {Attribute::Dereferenceable, Attribute::DereferenceableOrNull},
4913 takeKnownDerefBytesMaximum(Attr.getValueAsInt());
4916 bool IsKnownNonNull;
4917 AA::hasAssumedIRAttr<Attribute::NonNull>(
4918 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNonNull);
4920 bool CanBeNull, CanBeFreed;
4921 takeKnownDerefBytesMaximum(
V.getPointerDereferenceableBytes(
4922 A.getDataLayout(), CanBeNull, CanBeFreed));
4925 followUsesInMBEC(*
this,
A, getState(), *CtxI);
4930 StateType &getState()
override {
return *
this; }
4931 const StateType &getState()
const override {
return *
this; }
4937 const Value *UseV =
U->get();
4942 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
I->isVolatile())
4947 Loc->Ptr,
Offset,
A.getDataLayout(),
true);
4948 if (
Base &&
Base == &getAssociatedValue())
4949 State.addAccessedBytes(
Offset, Loc->Size.getValue());
4955 bool IsNonNull =
false;
4956 bool TrackUse =
false;
4957 int64_t DerefBytes = getKnownNonNullAndDerefBytesForUse(
4958 A, *
this, getAssociatedValue(), U,
I, IsNonNull, TrackUse);
4959 LLVM_DEBUG(
dbgs() <<
"[AADereferenceable] Deref bytes: " << DerefBytes
4960 <<
" for instruction " << *
I <<
"\n");
4962 addAccessedBytesForUse(
A, U,
I, State);
4963 State.takeKnownDerefBytesMaximum(DerefBytes);
4970 bool IsKnownNonNull;
4971 bool IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
4972 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
4973 if (IsAssumedNonNull &&
4974 A.hasAttr(getIRPosition(), Attribute::DereferenceableOrNull)) {
4975 A.removeAttrs(getIRPosition(), {Attribute::DereferenceableOrNull});
4976 return ChangeStatus::CHANGED;
4984 bool IsKnownNonNull;
4985 bool IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
4986 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
4987 if (IsAssumedNonNull)
4989 Ctx, getAssumedDereferenceableBytes()));
4992 Ctx, getAssumedDereferenceableBytes()));
4996 const std::string getAsStr(
Attributor *
A)
const override {
4997 if (!getAssumedDereferenceableBytes())
4998 return "unknown-dereferenceable";
4999 bool IsKnownNonNull;
5000 bool IsAssumedNonNull =
false;
5002 IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
5003 *
A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5004 return std::string(
"dereferenceable") +
5005 (IsAssumedNonNull ?
"" :
"_or_null") +
5006 (isAssumedGlobal() ?
"_globally" :
"") +
"<" +
5007 std::to_string(getKnownDereferenceableBytes()) +
"-" +
5008 std::to_string(getAssumedDereferenceableBytes()) +
">" +
5009 (!
A ?
" [non-null is unknown]" :
"");
5014struct AADereferenceableFloating : AADereferenceableImpl {
5016 : AADereferenceableImpl(IRP,
A) {}
5021 bool UsedAssumedInformation =
false;
5023 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5025 Values.
push_back({getAssociatedValue(), getCtxI()});
5028 Stripped = Values.
size() != 1 ||
5029 Values.
front().getValue() != &getAssociatedValue();
5035 auto VisitValueCB = [&](
const Value &
V) ->
bool {
5037 DL.getIndexSizeInBits(
V.getType()->getPointerAddressSpace());
5045 int64_t DerefBytes = 0;
5046 if (!AA || (!Stripped &&
this == AA)) {
5049 bool CanBeNull, CanBeFreed;
5051 Base->getPointerDereferenceableBytes(
DL, CanBeNull, CanBeFreed);
5052 T.GlobalState.indicatePessimisticFixpoint();
5055 DerefBytes =
DS.DerefBytesState.getAssumed();
5056 T.GlobalState &=
DS.GlobalState;
5062 int64_t OffsetSExt =
Offset.getSExtValue();
5066 T.takeAssumedDerefBytesMinimum(
5067 std::max(int64_t(0), DerefBytes - OffsetSExt));
5072 T.takeKnownDerefBytesMaximum(
5073 std::max(int64_t(0), DerefBytes - OffsetSExt));
5074 T.indicatePessimisticFixpoint();
5075 }
else if (OffsetSExt > 0) {
5081 T.indicatePessimisticFixpoint();
5085 return T.isValidState();
5088 for (
const auto &VAC : Values)
5089 if (!VisitValueCB(*
VAC.getValue()))
5090 return indicatePessimisticFixpoint();
5096 void trackStatistics()
const override {
5102struct AADereferenceableReturned final
5103 : AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl> {
5105 AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl>;
5110 void trackStatistics()
const override {
5116struct AADereferenceableArgument final
5117 : AAArgumentFromCallSiteArguments<AADereferenceable,
5118 AADereferenceableImpl> {
5120 AAArgumentFromCallSiteArguments<AADereferenceable, AADereferenceableImpl>;
5125 void trackStatistics()
const override {
5131struct AADereferenceableCallSiteArgument final : AADereferenceableFloating {
5133 : AADereferenceableFloating(IRP,
A) {}
5136 void trackStatistics()
const override {
5142struct AADereferenceableCallSiteReturned final
5143 : AACalleeToCallSite<AADereferenceable, AADereferenceableImpl> {
5144 using Base = AACalleeToCallSite<AADereferenceable, AADereferenceableImpl>;
5149 void trackStatistics()
const override {
5159 Value &AssociatedValue,
const Use *U,
5163 if (isa<CastInst>(
I)) {
5165 TrackUse = !isa<PtrToIntInst>(
I);
5168 if (
auto *
GEP = dyn_cast<GetElementPtrInst>(
I)) {
5169 if (
GEP->hasAllConstantIndices())
5175 if (
const auto *CB = dyn_cast<CallBase>(
I)) {
5189 const Value *UseV =
U->get();
5190 if (
auto *SI = dyn_cast<StoreInst>(
I)) {
5191 if (
SI->getPointerOperand() == UseV)
5192 MA =
SI->getAlign();
5193 }
else if (
auto *LI = dyn_cast<LoadInst>(
I)) {
5194 if (LI->getPointerOperand() == UseV)
5195 MA = LI->getAlign();
5196 }
else if (
auto *AI = dyn_cast<AtomicRMWInst>(
I)) {
5197 if (AI->getPointerOperand() == UseV)
5198 MA = AI->getAlign();
5199 }
else if (
auto *AI = dyn_cast<AtomicCmpXchgInst>(
I)) {
5200 if (AI->getPointerOperand() == UseV)
5201 MA = AI->getAlign();
5207 unsigned Alignment = MA->value();
5211 if (
Base == &AssociatedValue) {
5230 A.getAttrs(getIRPosition(), {Attribute::Alignment},
Attrs);
5232 takeKnownMaximum(Attr.getValueAsInt());
5234 Value &
V = *getAssociatedValue().stripPointerCasts();
5235 takeKnownMaximum(
V.getPointerAlignment(
A.getDataLayout()).value());
5238 followUsesInMBEC(*
this,
A, getState(), *CtxI);
5243 ChangeStatus LoadStoreChanged = ChangeStatus::UNCHANGED;
5246 Value &AssociatedValue = getAssociatedValue();
5247 for (
const Use &U : AssociatedValue.
uses()) {
5248 if (
auto *SI = dyn_cast<StoreInst>(
U.getUser())) {
5249 if (
SI->getPointerOperand() == &AssociatedValue)
5250 if (
SI->getAlign() < getAssumedAlign()) {
5252 "Number of times alignment added to a store");
5253 SI->setAlignment(getAssumedAlign());
5254 LoadStoreChanged = ChangeStatus::CHANGED;
5256 }
else if (
auto *LI = dyn_cast<LoadInst>(
U.getUser())) {
5257 if (LI->getPointerOperand() == &AssociatedValue)
5258 if (LI->getAlign() < getAssumedAlign()) {
5259 LI->setAlignment(getAssumedAlign());
5261 "Number of times alignment added to a load");
5262 LoadStoreChanged = ChangeStatus::CHANGED;
5269 Align InheritAlign =
5270 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5271 if (InheritAlign >= getAssumedAlign())
5272 return LoadStoreChanged;
5273 return Changed | LoadStoreChanged;
5283 if (getAssumedAlign() > 1)
5291 bool TrackUse =
false;
5293 unsigned int KnownAlign =
5294 getKnownAlignForUse(
A, *
this, getAssociatedValue(), U,
I, TrackUse);
5295 State.takeKnownMaximum(KnownAlign);
5301 const std::string getAsStr(
Attributor *
A)
const override {
5302 return "align<" + std::to_string(getKnownAlign().
value()) +
"-" +
5303 std::to_string(getAssumedAlign().
value()) +
">";
5308struct AAAlignFloating : AAAlignImpl {
5316 bool UsedAssumedInformation =
false;
5318 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5320 Values.
push_back({getAssociatedValue(), getCtxI()});
5323 Stripped = Values.
size() != 1 ||
5324 Values.
front().getValue() != &getAssociatedValue();
5328 auto VisitValueCB = [&](
Value &
V) ->
bool {
5329 if (isa<UndefValue>(V) || isa<ConstantPointerNull>(V))
5332 DepClassTy::REQUIRED);
5333 if (!AA || (!Stripped &&
this == AA)) {
5335 unsigned Alignment = 1;
5348 Alignment =
V.getPointerAlignment(
DL).value();
5351 T.takeKnownMaximum(Alignment);
5352 T.indicatePessimisticFixpoint();
5358 return T.isValidState();
5361 for (
const auto &VAC : Values) {
5362 if (!VisitValueCB(*
VAC.getValue()))
5363 return indicatePessimisticFixpoint();
5376struct AAAlignReturned final
5377 : AAReturnedFromReturnedValues<AAAlign, AAAlignImpl> {
5378 using Base = AAReturnedFromReturnedValues<AAAlign, AAAlignImpl>;
5386struct AAAlignArgument final
5387 : AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl> {
5388 using Base = AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl>;
5396 if (
A.getInfoCache().isInvolvedInMustTailCall(*getAssociatedArgument()))
5397 return ChangeStatus::UNCHANGED;
5398 return Base::manifest(
A);
5405struct AAAlignCallSiteArgument final : AAAlignFloating {
5407 : AAAlignFloating(IRP,
A) {}
5414 if (
Argument *Arg = getAssociatedArgument())
5415 if (
A.getInfoCache().isInvolvedInMustTailCall(*Arg))
5416 return ChangeStatus::UNCHANGED;
5418 Align InheritAlign =
5419 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5420 if (InheritAlign >= getAssumedAlign())
5421 Changed = ChangeStatus::UNCHANGED;
5428 if (
Argument *Arg = getAssociatedArgument()) {
5431 const auto *ArgAlignAA =
A.getAAFor<
AAAlign>(
5434 takeKnownMaximum(ArgAlignAA->getKnownAlign().value());
5444struct AAAlignCallSiteReturned final
5445 : AACalleeToCallSite<AAAlign, AAAlignImpl> {
5446 using Base = AACalleeToCallSite<AAAlign, AAAlignImpl>;
5463 assert(!AA::hasAssumedIRAttr<Attribute::NoReturn>(
5464 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
5469 const std::string getAsStr(
Attributor *
A)
const override {
5470 return getAssumed() ?
"noreturn" :
"may-return";
5475 auto CheckForNoReturn = [](
Instruction &) {
return false; };
5476 bool UsedAssumedInformation =
false;
5477 if (!
A.checkForAllInstructions(CheckForNoReturn, *
this,
5478 {(unsigned)Instruction::Ret},
5479 UsedAssumedInformation))
5480 return indicatePessimisticFixpoint();
5481 return ChangeStatus::UNCHANGED;
5485struct AANoReturnFunction final : AANoReturnImpl {
5487 : AANoReturnImpl(IRP,
A) {}
5494struct AANoReturnCallSite final
5495 : AACalleeToCallSite<AANoReturn, AANoReturnImpl> {
5497 : AACalleeToCallSite<
AANoReturn, AANoReturnImpl>(IRP,
A) {}
5514 Value &
V = getAssociatedValue();
5515 if (
auto *
C = dyn_cast<Constant>(&V)) {
5516 if (
C->isThreadDependent())
5517 indicatePessimisticFixpoint();
5519 indicateOptimisticFixpoint();
5522 if (
auto *CB = dyn_cast<CallBase>(&V))
5525 indicateOptimisticFixpoint();
5528 if (
auto *
I = dyn_cast<Instruction>(&V)) {
5533 indicatePessimisticFixpoint();
5543 Value &
V = getAssociatedValue();
5545 if (
auto *
I = dyn_cast<Instruction>(&V))
5546 Scope =
I->getFunction();
5547 if (
auto *
A = dyn_cast<Argument>(&V)) {
5549 if (!
Scope->hasLocalLinkage())
5553 return indicateOptimisticFixpoint();
5555 bool IsKnownNoRecurse;
5556 if (AA::hasAssumedIRAttr<Attribute::NoRecurse>(
5561 auto UsePred = [&](
const Use &
U,
bool &Follow) {
5562 const Instruction *UserI = dyn_cast<Instruction>(
U.getUser());
5563 if (!UserI || isa<GetElementPtrInst>(UserI) || isa<CastInst>(UserI) ||
5564 isa<PHINode>(UserI) || isa<SelectInst>(UserI)) {
5568 if (isa<LoadInst>(UserI) || isa<CmpInst>(UserI) ||
5569 (isa<StoreInst>(UserI) &&
5570 cast<StoreInst>(UserI)->getValueOperand() !=
U.get()))
5572 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
5576 if (!Callee || !
Callee->hasLocalLinkage())
5582 DepClassTy::OPTIONAL);
5583 if (!ArgInstanceInfoAA ||
5584 !ArgInstanceInfoAA->isAssumedUniqueForAnalysis())
5589 A, *CB, *Scope, *
this,
nullptr,
5597 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
5598 if (
auto *SI = dyn_cast<StoreInst>(OldU.
getUser())) {
5599 auto *
Ptr =
SI->getPointerOperand()->stripPointerCasts();
5607 if (!
A.checkForAllUses(UsePred, *
this, V,
true,
5608 DepClassTy::OPTIONAL,
5609 true, EquivalentUseCB))
5610 return indicatePessimisticFixpoint();
5616 const std::string getAsStr(
Attributor *
A)
const override {
5617 return isAssumedUniqueForAnalysis() ?
"<unique [fAa]>" :
"<unknown>";
5621 void trackStatistics()
const override {}
5625struct AAInstanceInfoFloating : AAInstanceInfoImpl {
5627 : AAInstanceInfoImpl(IRP,
A) {}
5631struct AAInstanceInfoArgument final : AAInstanceInfoFloating {
5633 : AAInstanceInfoFloating(IRP,
A) {}
5637struct AAInstanceInfoCallSiteArgument final : AAInstanceInfoImpl {
5639 : AAInstanceInfoImpl(IRP,
A) {}
5647 Argument *Arg = getAssociatedArgument();
5649 return indicatePessimisticFixpoint();
5654 return indicatePessimisticFixpoint();
5660struct AAInstanceInfoReturned final : AAInstanceInfoImpl {
5662 : AAInstanceInfoImpl(IRP,
A) {
5678struct AAInstanceInfoCallSiteReturned final : AAInstanceInfoFloating {
5680 : AAInstanceInfoFloating(IRP,
A) {}
5687 bool IgnoreSubsumingPositions) {
5688 assert(ImpliedAttributeKind == Attribute::NoCapture &&
5689 "Unexpected attribute kind");
5692 return V.use_empty();
5698 if (isa<UndefValue>(V) || (isa<ConstantPointerNull>(V) &&
5699 V.getType()->getPointerAddressSpace() == 0)) {
5703 if (
A.hasAttr(IRP, {Attribute::NoCapture},
5704 true, Attribute::NoCapture))
5710 {Attribute::NoCapture, Attribute::ByVal},
5712 A.manifestAttrs(IRP,
5720 determineFunctionCaptureCapabilities(IRP, *
F, State);
5722 A.manifestAttrs(IRP,
5741 bool ReadOnly =
F.onlyReadsMemory();
5742 bool NoThrow =
F.doesNotThrow();
5743 bool IsVoidReturn =
F.getReturnType()->isVoidTy();
5744 if (ReadOnly && NoThrow && IsVoidReturn) {
5757 if (NoThrow && IsVoidReturn)
5762 if (!NoThrow || ArgNo < 0 ||
5763 !
F.getAttributes().hasAttrSomewhere(Attribute::Returned))
5766 for (
unsigned U = 0, E =
F.arg_size(); U < E; ++U)
5767 if (
F.hasParamAttribute(U, Attribute::Returned)) {
5768 if (U ==
unsigned(ArgNo))
5786 assert(!AA::hasAssumedIRAttr<Attribute::NoCapture>(
5787 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
5797 if (!isAssumedNoCaptureMaybeReturned())
5800 if (isArgumentPosition()) {
5801 if (isAssumedNoCapture())
5809 const std::string getAsStr(
Attributor *
A)
const override {
5810 if (isKnownNoCapture())
5811 return "known not-captured";
5812 if (isAssumedNoCapture())
5813 return "assumed not-captured";
5814 if (isKnownNoCaptureMaybeReturned())
5815 return "known not-captured-maybe-returned";
5816 if (isAssumedNoCaptureMaybeReturned())
5817 return "assumed not-captured-maybe-returned";
5818 return "assumed-captured";
5826 LLVM_DEBUG(
dbgs() <<
"[AANoCapture] Check use: " << *
U.get() <<
" in "
5830 if (isa<PtrToIntInst>(UInst)) {
5832 return isCapturedIn(State,
true,
true,
5838 if (isa<StoreInst>(UInst))
5839 return isCapturedIn(State,
true,
true,
5843 if (isa<ReturnInst>(UInst)) {
5845 return isCapturedIn(State,
false,
false,
5847 return isCapturedIn(State,
true,
true,
5853 auto *CB = dyn_cast<CallBase>(UInst);
5855 return isCapturedIn(State,
true,
true,
5862 bool IsKnownNoCapture;
5864 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
5865 A,
this, CSArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
5867 if (IsAssumedNoCapture)
5868 return isCapturedIn(State,
false,
false,
5872 return isCapturedIn(State,
false,
false,
5877 return isCapturedIn(State,
true,
true,
5885 bool CapturedInInt,
bool CapturedInRet) {
5886 LLVM_DEBUG(
dbgs() <<
" - captures [Mem " << CapturedInMem <<
"|Int "
5887 << CapturedInInt <<
"|Ret " << CapturedInRet <<
"]\n");
5903 return indicatePessimisticFixpoint();
5910 return indicatePessimisticFixpoint();
5918 T.addKnownBits(NOT_CAPTURED_IN_MEM);
5920 addKnownBits(NOT_CAPTURED_IN_MEM);
5927 auto CheckReturnedArgs = [&](
bool &UsedAssumedInformation) {
5931 UsedAssumedInformation))
5933 bool SeenConstant =
false;
5935 if (isa<Constant>(
VAC.getValue())) {
5938 SeenConstant =
true;
5939 }
else if (!isa<Argument>(
VAC.getValue()) ||
5940 VAC.getValue() == getAssociatedArgument())
5946 bool IsKnownNoUnwind;
5947 if (AA::hasAssumedIRAttr<Attribute::NoUnwind>(
5949 bool IsVoidTy =
F->getReturnType()->isVoidTy();
5950 bool UsedAssumedInformation =
false;
5951 if (IsVoidTy || CheckReturnedArgs(UsedAssumedInformation)) {
5952 T.addKnownBits(NOT_CAPTURED_IN_RET);
5953 if (
T.isKnown(NOT_CAPTURED_IN_MEM))
5955 if (IsKnownNoUnwind && (IsVoidTy || !UsedAssumedInformation)) {
5956 addKnownBits(NOT_CAPTURED_IN_RET);
5957 if (isKnown(NOT_CAPTURED_IN_MEM))
5958 return indicateOptimisticFixpoint();
5969 auto UseCheck = [&](
const Use &
U,
bool &Follow) ->
bool {
5974 return checkUse(
A,
T, U, Follow);
5982 if (!
A.checkForAllUses(UseCheck, *
this, *V))
5983 return indicatePessimisticFixpoint();
5986 auto Assumed = S.getAssumed();
5987 S.intersectAssumedBits(
T.getAssumed());
5988 if (!isAssumedNoCaptureMaybeReturned())
5989 return indicatePessimisticFixpoint();
5995struct AANoCaptureArgument final : AANoCaptureImpl {
5997 : AANoCaptureImpl(IRP,
A) {}
6004struct AANoCaptureCallSiteArgument final : AANoCaptureImpl {
6006 : AANoCaptureImpl(IRP,
A) {}
6014 Argument *Arg = getAssociatedArgument();
6016 return indicatePessimisticFixpoint();
6018 bool IsKnownNoCapture;
6020 if (AA::hasAssumedIRAttr<Attribute::NoCapture>(
6021 A,
this, ArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
6023 return ChangeStatus::UNCHANGED;
6025 return indicatePessimisticFixpoint();
6034struct AANoCaptureFloating final : AANoCaptureImpl {
6036 : AANoCaptureImpl(IRP,
A) {}
6039 void trackStatistics()
const override {
6045struct AANoCaptureReturned final : AANoCaptureImpl {
6047 : AANoCaptureImpl(IRP,
A) {
6062 void trackStatistics()
const override {}
6066struct AANoCaptureCallSiteReturned final : AANoCaptureImpl {
6068 : AANoCaptureImpl(IRP,
A) {}
6074 determineFunctionCaptureCapabilities(getIRPosition(), *
F, *
this);
6078 void trackStatistics()
const override {
6089 SimplifiedAssociatedValue,
Other, Ty);
6090 if (SimplifiedAssociatedValue == std::optional<Value *>(
nullptr))
6094 if (SimplifiedAssociatedValue)
6095 dbgs() <<
"[ValueSimplify] is assumed to be "
6096 << **SimplifiedAssociatedValue <<
"\n";
6098 dbgs() <<
"[ValueSimplify] is assumed to be <none>\n";
6110 if (getAssociatedValue().
getType()->isVoidTy())
6111 indicatePessimisticFixpoint();
6112 if (
A.hasSimplificationCallback(getIRPosition()))
6113 indicatePessimisticFixpoint();
6117 const std::string getAsStr(
Attributor *
A)
const override {
6119 dbgs() <<
"SAV: " << (
bool)SimplifiedAssociatedValue <<
" ";
6120 if (SimplifiedAssociatedValue && *SimplifiedAssociatedValue)
6121 dbgs() <<
"SAV: " << **SimplifiedAssociatedValue <<
" ";
6123 return isValidState() ? (isAtFixpoint() ?
"simplified" :
"maybe-simple")
6128 void trackStatistics()
const override {}
6131 std::optional<Value *>
6132 getAssumedSimplifiedValue(
Attributor &
A)
const override {
6133 return SimplifiedAssociatedValue;
6144 if (CtxI &&
V.getType()->canLosslesslyBitCastTo(&Ty))
6146 : BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6159 assert(CtxI &&
"Cannot reproduce an instruction without context!");
6160 if (
Check && (
I.mayReadFromMemory() ||
6165 Value *NewOp = reproduceValue(
A, QueryingAA, *
Op, Ty, CtxI,
Check, VMap);
6167 assert(
Check &&
"Manifest of new value unexpectedly failed!");
6193 if (
const auto &NewV = VMap.
lookup(&V))
6195 bool UsedAssumedInformation =
false;
6196 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
6198 if (!SimpleV.has_value())
6202 EffectiveV = *SimpleV;
6203 if (
auto *
C = dyn_cast<Constant>(EffectiveV))
6207 return ensureType(
A, *EffectiveV, Ty, CtxI,
Check);
6208 if (
auto *
I = dyn_cast<Instruction>(EffectiveV))
6209 if (
Value *NewV = reproduceInst(
A, QueryingAA, *
I, Ty, CtxI,
Check, VMap))
6210 return ensureType(
A, *NewV, Ty, CtxI,
Check);
6217 Value *NewV = SimplifiedAssociatedValue
6218 ? *SimplifiedAssociatedValue
6220 if (NewV && NewV != &getAssociatedValue()) {
6224 if (reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6226 return reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6235 const IRPosition &IRP,
bool Simplify =
true) {
6236 bool UsedAssumedInformation =
false;
6239 QueryingValueSimplified =
A.getAssumedSimplified(
6241 return unionAssumed(QueryingValueSimplified);
6245 template <
typename AAType>
bool askSimplifiedValueFor(
Attributor &
A) {
6246 if (!getAssociatedValue().
getType()->isIntegerTy())
6251 A.getAAFor<AAType>(*
this, getIRPosition(), DepClassTy::NONE);
6255 std::optional<Constant *> COpt = AA->getAssumedConstant(
A);
6258 SimplifiedAssociatedValue = std::nullopt;
6259 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6262 if (
auto *
C = *COpt) {
6263 SimplifiedAssociatedValue =
C;
6264 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6270 bool askSimplifiedValueForOtherAAs(
Attributor &
A) {
6271 if (askSimplifiedValueFor<AAValueConstantRange>(
A))
6273 if (askSimplifiedValueFor<AAPotentialConstantValues>(
A))
6281 for (
auto &U : getAssociatedValue().
uses()) {
6285 if (
auto *
PHI = dyn_cast_or_null<PHINode>(IP))
6286 IP =
PHI->getIncomingBlock(U)->getTerminator();
6287 if (
auto *NewV = manifestReplacementValue(
A, IP)) {
6289 <<
" -> " << *NewV <<
" :: " << *
this <<
"\n");
6290 if (
A.changeUseAfterManifest(U, *NewV))
6291 Changed = ChangeStatus::CHANGED;
6295 return Changed | AAValueSimplify::manifest(
A);
6300 SimplifiedAssociatedValue = &getAssociatedValue();
6301 return AAValueSimplify::indicatePessimisticFixpoint();
6305struct AAValueSimplifyArgument final : AAValueSimplifyImpl {
6307 : AAValueSimplifyImpl(IRP,
A) {}
6310 AAValueSimplifyImpl::initialize(
A);
6311 if (
A.hasAttr(getIRPosition(),
6312 {Attribute::InAlloca, Attribute::Preallocated,
6313 Attribute::StructRet, Attribute::Nest, Attribute::ByVal},
6315 indicatePessimisticFixpoint();
6322 Argument *Arg = getAssociatedArgument();
6328 return indicatePessimisticFixpoint();
6331 auto Before = SimplifiedAssociatedValue;
6345 bool UsedAssumedInformation =
false;
6346 std::optional<Constant *> SimpleArgOp =
6347 A.getAssumedConstant(ACSArgPos, *
this, UsedAssumedInformation);
6354 return unionAssumed(*SimpleArgOp);
6359 bool UsedAssumedInformation =
false;
6360 if (hasCallBaseContext() &&
6361 getCallBaseContext()->getCalledOperand() == Arg->
getParent())
6365 Success =
A.checkForAllCallSites(PredForCallSite, *
this,
true,
6366 UsedAssumedInformation);
6369 if (!askSimplifiedValueForOtherAAs(
A))
6370 return indicatePessimisticFixpoint();
6373 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6374 : ChangeStatus ::CHANGED;
6378 void trackStatistics()
const override {
6383struct AAValueSimplifyReturned : AAValueSimplifyImpl {
6385 : AAValueSimplifyImpl(IRP,
A) {}
6388 std::optional<Value *>
6389 getAssumedSimplifiedValue(
Attributor &
A)
const override {
6390 if (!isValidState())
6392 return SimplifiedAssociatedValue;
6397 auto Before = SimplifiedAssociatedValue;
6400 auto &RI = cast<ReturnInst>(
I);
6401 return checkAndUpdate(
6406 bool UsedAssumedInformation =
false;
6407 if (!
A.checkForAllInstructions(ReturnInstCB, *
this, {Instruction::Ret},
6408 UsedAssumedInformation))
6409 if (!askSimplifiedValueForOtherAAs(
A))
6410 return indicatePessimisticFixpoint();
6413 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6414 : ChangeStatus ::CHANGED;
6420 return ChangeStatus::UNCHANGED;
6424 void trackStatistics()
const override {
6429struct AAValueSimplifyFloating : AAValueSimplifyImpl {
6431 : AAValueSimplifyImpl(IRP,
A) {}
6435 AAValueSimplifyImpl::initialize(
A);
6436 Value &
V = getAnchorValue();
6439 if (isa<Constant>(V))
6440 indicatePessimisticFixpoint();
6445 auto Before = SimplifiedAssociatedValue;
6446 if (!askSimplifiedValueForOtherAAs(
A))
6447 return indicatePessimisticFixpoint();
6450 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6451 : ChangeStatus ::CHANGED;
6455 void trackStatistics()
const override {
6460struct AAValueSimplifyFunction : AAValueSimplifyImpl {
6462 : AAValueSimplifyImpl(IRP,
A) {}
6466 SimplifiedAssociatedValue =
nullptr;
6467 indicateOptimisticFixpoint();
6472 "AAValueSimplify(Function|CallSite)::updateImpl will not be called");
6475 void trackStatistics()
const override {
6480struct AAValueSimplifyCallSite : AAValueSimplifyFunction {
6482 : AAValueSimplifyFunction(IRP,
A) {}
6484 void trackStatistics()
const override {
6489struct AAValueSimplifyCallSiteReturned : AAValueSimplifyImpl {
6491 : AAValueSimplifyImpl(IRP,
A) {}
6494 AAValueSimplifyImpl::initialize(
A);
6495 Function *Fn = getAssociatedFunction();
6496 assert(Fn &&
"Did expect an associted function");
6502 checkAndUpdate(
A, *
this, IRP))
6503 indicateOptimisticFixpoint();
6505 indicatePessimisticFixpoint();
6513 return indicatePessimisticFixpoint();
6516 void trackStatistics()
const override {
6521struct AAValueSimplifyCallSiteArgument : AAValueSimplifyFloating {
6523 : AAValueSimplifyFloating(IRP,
A) {}
6531 if (FloatAA && FloatAA->getState().isValidState())
6534 if (
auto *NewV = manifestReplacementValue(
A, getCtxI())) {
6535 Use &
U = cast<CallBase>(&getAnchorValue())
6536 ->getArgOperandUse(getCallSiteArgNo());
6537 if (
A.changeUseAfterManifest(U, *NewV))
6538 Changed = ChangeStatus::CHANGED;
6541 return Changed | AAValueSimplify::manifest(
A);
6544 void trackStatistics()
const override {
6554 struct AllocationInfo {
6566 }
Status = STACK_DUE_TO_USE;
6570 bool HasPotentiallyFreeingUnknownUses =
false;
6574 bool MoveAllocaIntoEntry =
true;
6580 struct DeallocationInfo {
6588 bool MightFreeUnknownObjects =
false;
6597 ~AAHeapToStackFunction() {
6600 for (
auto &It : AllocationInfos)
6601 It.second->~AllocationInfo();
6602 for (
auto &It : DeallocationInfos)
6603 It.second->~DeallocationInfo();
6607 AAHeapToStack::initialize(
A);
6610 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6617 DeallocationInfos[CB] =
new (
A.Allocator) DeallocationInfo{CB, FreedOp};
6626 AllocationInfo *AI =
new (
A.Allocator) AllocationInfo{CB};
6627 AllocationInfos[CB] = AI;
6629 TLI->getLibFunc(*CB, AI->LibraryFunctionId);
6635 bool UsedAssumedInformation =
false;
6636 bool Success =
A.checkForAllCallLikeInstructions(
6637 AllocationIdentifierCB, *
this, UsedAssumedInformation,
6641 assert(
Success &&
"Did not expect the call base visit callback to fail!");
6645 bool &) -> std::optional<Value *> {
return nullptr; };
6646 for (
const auto &It : AllocationInfos)
6649 for (
const auto &It : DeallocationInfos)
6654 const std::string getAsStr(
Attributor *
A)
const override {
6655 unsigned NumH2SMallocs = 0, NumInvalidMallocs = 0;
6656 for (
const auto &It : AllocationInfos) {
6657 if (It.second->Status == AllocationInfo::INVALID)
6658 ++NumInvalidMallocs;
6662 return "[H2S] Mallocs Good/Bad: " + std::to_string(NumH2SMallocs) +
"/" +
6663 std::to_string(NumInvalidMallocs);
6667 void trackStatistics()
const override {
6670 "Number of malloc/calloc/aligned_alloc calls converted to allocas");
6671 for (
const auto &It : AllocationInfos)
6672 if (It.second->Status != AllocationInfo::INVALID)
6676 bool isAssumedHeapToStack(
const CallBase &CB)
const override {
6678 if (AllocationInfo *AI =
6679 AllocationInfos.lookup(
const_cast<CallBase *
>(&CB)))
6680 return AI->Status != AllocationInfo::INVALID;
6684 bool isAssumedHeapToStackRemovedFree(
CallBase &CB)
const override {
6685 if (!isValidState())
6688 for (
const auto &It : AllocationInfos) {
6689 AllocationInfo &AI = *It.second;
6690 if (AI.Status == AllocationInfo::INVALID)
6693 if (AI.PotentialFreeCalls.count(&CB))
6701 assert(getState().isValidState() &&
6702 "Attempted to manifest an invalid state!");
6706 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6708 for (
auto &It : AllocationInfos) {
6709 AllocationInfo &AI = *It.second;
6710 if (AI.Status == AllocationInfo::INVALID)
6713 for (
CallBase *FreeCall : AI.PotentialFreeCalls) {
6714 LLVM_DEBUG(
dbgs() <<
"H2S: Removing free call: " << *FreeCall <<
"\n");
6715 A.deleteAfterManifest(*FreeCall);
6716 HasChanged = ChangeStatus::CHANGED;
6719 LLVM_DEBUG(
dbgs() <<
"H2S: Removing malloc-like call: " << *AI.CB
6724 if (TLI->getLibFunc(*AI.CB, IsAllocShared))
6725 if (IsAllocShared == LibFunc___kmpc_alloc_shared)
6726 return OR <<
"Moving globalized variable to the stack.";
6727 return OR <<
"Moving memory allocation from the heap to the stack.";
6729 if (AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
6736 std::optional<APInt> SizeAPI =
getSize(
A, *
this, AI);
6738 Size = ConstantInt::get(AI.CB->getContext(), *SizeAPI);
6745 cast<ConstantInt>(SizeOffsetPair.
Offset)->isZero());
6750 ?
F->getEntryBlock().begin()
6751 : AI.CB->getIterator();
6754 if (
MaybeAlign RetAlign = AI.CB->getRetAlign())
6755 Alignment = std::max(Alignment, *RetAlign);
6757 std::optional<APInt> AlignmentAPI = getAPInt(
A, *
this, *
Align);
6758 assert(AlignmentAPI && AlignmentAPI->getZExtValue() > 0 &&
6759 "Expected an alignment during manifest!");
6761 std::max(Alignment,
assumeAligned(AlignmentAPI->getZExtValue()));
6765 unsigned AS =
DL.getAllocaAddrSpace();
6768 AI.CB->getName() +
".h2s", IP);
6770 if (Alloca->
getType() != AI.CB->getType())
6771 Alloca = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6772 Alloca, AI.CB->getType(),
"malloc_cast", AI.CB->getIterator());
6777 "Must be able to materialize initial memory state of allocation");
6781 if (
auto *
II = dyn_cast<InvokeInst>(AI.CB)) {
6782 auto *NBB =
II->getNormalDest();
6784 A.deleteAfterManifest(*AI.CB);
6786 A.deleteAfterManifest(*AI.CB);
6792 if (!isa<UndefValue>(InitVal)) {
6795 Builder.CreateMemSet(Alloca, InitVal,
Size, std::nullopt);
6797 HasChanged = ChangeStatus::CHANGED;
6805 bool UsedAssumedInformation =
false;
6806 std::optional<Constant *> SimpleV =
6807 A.getAssumedConstant(V, AA, UsedAssumedInformation);
6809 return APInt(64, 0);
6810 if (
auto *CI = dyn_cast_or_null<ConstantInt>(*SimpleV))
6811 return CI->getValue();
6812 return std::nullopt;
6816 AllocationInfo &AI) {
6817 auto Mapper = [&](
const Value *
V) ->
const Value * {
6818 bool UsedAssumedInformation =
false;
6819 if (std::optional<Constant *> SimpleV =
6820 A.getAssumedConstant(*V, AA, UsedAssumedInformation))
6827 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6845 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6847 const auto *LivenessAA =
6851 A.getInfoCache().getMustBeExecutedContextExplorer();
6853 bool StackIsAccessibleByOtherThreads =
6854 A.getInfoCache().stackIsAccessibleByOtherThreads();
6857 A.getInfoCache().getAnalysisResultForFunction<
LoopAnalysis>(*F);
6858 std::optional<bool> MayContainIrreducibleControl;
6860 if (&
F->getEntryBlock() == &BB)
6862 if (!MayContainIrreducibleControl.has_value())
6864 if (*MayContainIrreducibleControl)
6873 bool HasUpdatedFrees =
false;
6875 auto UpdateFrees = [&]() {
6876 HasUpdatedFrees =
true;
6878 for (
auto &It : DeallocationInfos) {
6879 DeallocationInfo &DI = *It.second;
6882 if (DI.MightFreeUnknownObjects)
6886 bool UsedAssumedInformation =
false;
6887 if (
A.isAssumedDead(*DI.CB,
this, LivenessAA, UsedAssumedInformation,
6894 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown underlying object for free!\n");
6895 DI.MightFreeUnknownObjects =
true;
6901 if (isa<ConstantPointerNull>(Obj) || isa<UndefValue>(Obj))
6904 CallBase *ObjCB = dyn_cast<CallBase>(Obj);
6908 DI.MightFreeUnknownObjects =
true;
6912 AllocationInfo *AI = AllocationInfos.lookup(ObjCB);
6914 LLVM_DEBUG(
dbgs() <<
"[H2S] Free of a non-allocation object: " << *Obj
6916 DI.MightFreeUnknownObjects =
true;
6920 DI.PotentialAllocationCalls.insert(ObjCB);
6924 auto FreeCheck = [&](AllocationInfo &AI) {
6928 if (!StackIsAccessibleByOtherThreads) {
6930 if (!AA::hasAssumedIRAttr<Attribute::NoSync>(
6933 dbgs() <<
"[H2S] found an escaping use, stack is not accessible by "
6934 "other threads and function is not nosync:\n");
6938 if (!HasUpdatedFrees)
6942 if (AI.PotentialFreeCalls.size() != 1) {
6944 << AI.PotentialFreeCalls.size() <<
"\n");
6947 CallBase *UniqueFree = *AI.PotentialFreeCalls.begin();
6948 DeallocationInfo *DI = DeallocationInfos.lookup(UniqueFree);
6951 dbgs() <<
"[H2S] unique free call was not known as deallocation call "
6952 << *UniqueFree <<
"\n");
6955 if (DI->MightFreeUnknownObjects) {
6957 dbgs() <<
"[H2S] unique free call might free unknown allocations\n");
6960 if (DI->PotentialAllocationCalls.empty())
6962 if (DI->PotentialAllocationCalls.size() > 1) {
6964 << DI->PotentialAllocationCalls.size()
6965 <<
" different allocations\n");
6968 if (*DI->PotentialAllocationCalls.begin() != AI.CB) {
6971 <<
"[H2S] unique free call not known to free this allocation but "
6972 << **DI->PotentialAllocationCalls.begin() <<
"\n");
6977 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared) {
6979 if (!Explorer || !Explorer->findInContextOf(UniqueFree, CtxI)) {
6980 LLVM_DEBUG(
dbgs() <<
"[H2S] unique free call might not be executed "
6981 "with the allocation "
6982 << *UniqueFree <<
"\n");
6989 auto UsesCheck = [&](AllocationInfo &AI) {
6990 bool ValidUsesOnly =
true;
6992 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
6994 if (isa<LoadInst>(UserI))
6996 if (
auto *SI = dyn_cast<StoreInst>(UserI)) {
6997 if (
SI->getValueOperand() ==
U.get()) {
6999 <<
"[H2S] escaping store to memory: " << *UserI <<
"\n");
7000 ValidUsesOnly =
false;
7006 if (
auto *CB = dyn_cast<CallBase>(UserI)) {
7009 if (DeallocationInfos.count(CB)) {
7010 AI.PotentialFreeCalls.insert(CB);
7017 bool IsKnownNoCapture;
7018 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
7023 bool IsAssumedNoFree = AA::hasAssumedIRAttr<Attribute::NoFree>(
7026 if (!IsAssumedNoCapture ||
7027 (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7028 !IsAssumedNoFree)) {
7029 AI.HasPotentiallyFreeingUnknownUses |= !IsAssumedNoFree;
7034 <<
"Could not move globalized variable to the stack. "
7035 "Variable is potentially captured in call. Mark "
7036 "parameter as `__attribute__((noescape))` to override.";
7039 if (ValidUsesOnly &&
7040 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
7044 ValidUsesOnly =
false;
7049 if (isa<GetElementPtrInst>(UserI) || isa<BitCastInst>(UserI) ||
7050 isa<PHINode>(UserI) || isa<SelectInst>(UserI)) {
7057 ValidUsesOnly =
false;
7060 if (!
A.checkForAllUses(Pred, *
this, *AI.CB,
false,
7062 [&](
const Use &OldU,
const Use &NewU) {
7063 auto *SI = dyn_cast<StoreInst>(OldU.getUser());
7064 return !SI || StackIsAccessibleByOtherThreads ||
7065 AA::isAssumedThreadLocalObject(
7066 A, *SI->getPointerOperand(), *this);
7069 return ValidUsesOnly;
7074 for (
auto &It : AllocationInfos) {
7075 AllocationInfo &AI = *It.second;
7076 if (AI.Status == AllocationInfo::INVALID)
7080 std::optional<APInt> APAlign = getAPInt(
A, *
this, *
Align);
7084 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown allocation alignment: " << *AI.CB
7086 AI.Status = AllocationInfo::INVALID;
7091 !APAlign->isPowerOf2()) {
7092 LLVM_DEBUG(
dbgs() <<
"[H2S] Invalid allocation alignment: " << APAlign
7094 AI.Status = AllocationInfo::INVALID;
7101 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7106 dbgs() <<
"[H2S] Unknown allocation size: " << *AI.CB <<
"\n";
7108 dbgs() <<
"[H2S] Allocation size too large: " << *AI.CB <<
" vs. "
7112 AI.Status = AllocationInfo::INVALID;
7118 switch (AI.Status) {
7119 case AllocationInfo::STACK_DUE_TO_USE:
7122 AI.Status = AllocationInfo::STACK_DUE_TO_FREE;
7124 case AllocationInfo::STACK_DUE_TO_FREE:
7127 AI.Status = AllocationInfo::INVALID;
7130 case AllocationInfo::INVALID:
7137 bool IsGlobalizedLocal =
7138 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared;
7139 if (AI.MoveAllocaIntoEntry &&
7140 (!
Size.has_value() ||
7141 (!IsGlobalizedLocal && IsInLoop(*AI.CB->getParent()))))
7142 AI.MoveAllocaIntoEntry =
false;
7156 AAPrivatizablePtr::indicatePessimisticFixpoint();
7157 PrivatizableType =
nullptr;
7158 return ChangeStatus::CHANGED;
7164 virtual std::optional<Type *> identifyPrivatizableType(
Attributor &
A) = 0;
7168 std::optional<Type *> combineTypes(std::optional<Type *> T0,
7169 std::optional<Type *>
T1) {
7179 std::optional<Type *> getPrivatizableType()
const override {
7180 return PrivatizableType;
7183 const std::string getAsStr(
Attributor *
A)
const override {
7184 return isAssumedPrivatizablePtr() ?
"[priv]" :
"[no-priv]";
7188 std::optional<Type *> PrivatizableType;
7193struct AAPrivatizablePtrArgument final :
public AAPrivatizablePtrImpl {
7195 : AAPrivatizablePtrImpl(IRP,
A) {}
7198 std::optional<Type *> identifyPrivatizableType(
Attributor &
A)
override {
7201 bool UsedAssumedInformation =
false;
7203 A.getAttrs(getIRPosition(), {Attribute::ByVal},
Attrs,
7205 if (!
Attrs.empty() &&
7207 true, UsedAssumedInformation))
7208 return Attrs[0].getValueAsType();
7210 std::optional<Type *> Ty;
7211 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
7234 dbgs() <<
"[AAPrivatizablePtr] ACSPos: " << ACSArgPos <<
", CSTy: ";
7238 dbgs() <<
"<nullptr>";
7243 Ty = combineTypes(Ty, CSTy);
7246 dbgs() <<
" : New Type: ";
7248 (*Ty)->print(
dbgs());
7250 dbgs() <<
"<nullptr>";
7259 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7260 UsedAssumedInformation))
7267 PrivatizableType = identifyPrivatizableType(
A);
7268 if (!PrivatizableType)
7269 return ChangeStatus::UNCHANGED;
7270 if (!*PrivatizableType)
7271 return indicatePessimisticFixpoint();
7276 DepClassTy::OPTIONAL);
7279 if (!
A.hasAttr(getIRPosition(), Attribute::ByVal) &&
7282 return indicatePessimisticFixpoint();
7288 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7292 Function &Fn = *getIRPosition().getAnchorScope();
7296 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Missing TTI for function "
7298 return indicatePessimisticFixpoint();
7308 bool UsedAssumedInformation =
false;
7309 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7310 UsedAssumedInformation)) {
7312 dbgs() <<
"[AAPrivatizablePtr] ABI incompatibility detected for "
7314 return indicatePessimisticFixpoint();
7318 Argument *Arg = getAssociatedArgument();
7319 if (!
A.isValidFunctionSignatureRewrite(*Arg, ReplacementTypes)) {
7321 return indicatePessimisticFixpoint();
7328 auto IsCompatiblePrivArgOfCallback = [&](
CallBase &CB) {
7331 for (
const Use *U : CallbackUses) {
7333 assert(CBACS && CBACS.isCallbackCall());
7334 for (
Argument &CBArg : CBACS.getCalledFunction()->args()) {
7335 int CBArgNo = CBACS.getCallArgOperandNo(CBArg);
7339 <<
"[AAPrivatizablePtr] Argument " << *Arg
7340 <<
"check if can be privatized in the context of its parent ("
7342 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7344 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7345 <<
")\n[AAPrivatizablePtr] " << CBArg <<
" : "
7346 << CBACS.getCallArgOperand(CBArg) <<
" vs "
7348 <<
"[AAPrivatizablePtr] " << CBArg <<
" : "
7349 << CBACS.getCallArgOperandNo(CBArg) <<
" vs " << ArgNo <<
"\n";
7352 if (CBArgNo !=
int(ArgNo))
7356 if (CBArgPrivAA && CBArgPrivAA->isValidState()) {
7360 if (*CBArgPrivTy == PrivatizableType)
7365 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7366 <<
" cannot be privatized in the context of its parent ("
7368 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7370 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7371 <<
").\n[AAPrivatizablePtr] for which the argument "
7372 "privatization is not compatible.\n";
7386 "Expected a direct call operand for callback call operand");
7391 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7392 <<
" check if be privatized in the context of its parent ("
7394 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7396 << DCArgNo <<
"@" << DCCallee->
getName() <<
").\n";
7399 if (
unsigned(DCArgNo) < DCCallee->
arg_size()) {
7402 DepClassTy::REQUIRED);
7403 if (DCArgPrivAA && DCArgPrivAA->isValidState()) {
7407 if (*DCArgPrivTy == PrivatizableType)
7413 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7414 <<
" cannot be privatized in the context of its parent ("
7416 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7419 <<
").\n[AAPrivatizablePtr] for which the argument "
7420 "privatization is not compatible.\n";
7432 return IsCompatiblePrivArgOfDirectCS(ACS);
7436 if (!
A.checkForAllCallSites(IsCompatiblePrivArgOfOtherCallSite, *
this,
true,
7437 UsedAssumedInformation))
7438 return indicatePessimisticFixpoint();
7440 return ChangeStatus::UNCHANGED;
7446 identifyReplacementTypes(
Type *PrivType,
7450 assert(PrivType &&
"Expected privatizable type!");
7453 if (
auto *PrivStructType = dyn_cast<StructType>(PrivType)) {
7454 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++)
7455 ReplacementTypes.
push_back(PrivStructType->getElementType(u));
7456 }
else if (
auto *PrivArrayType = dyn_cast<ArrayType>(PrivType)) {
7457 ReplacementTypes.
append(PrivArrayType->getNumElements(),
7458 PrivArrayType->getElementType());
7469 assert(PrivType &&
"Expected privatizable type!");
7475 if (
auto *PrivStructType = dyn_cast<StructType>(PrivType)) {
7476 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7477 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7482 }
else if (
auto *PrivArrayType = dyn_cast<ArrayType>(PrivType)) {
7483 Type *PointeeTy = PrivArrayType->getElementType();
7484 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7485 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7496 void createReplacementValues(
Align Alignment,
Type *PrivType,
7500 assert(PrivType &&
"Expected privatizable type!");
7507 if (
auto *PrivStructType = dyn_cast<StructType>(PrivType)) {
7508 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7509 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7510 Type *PointeeTy = PrivStructType->getElementType(u);
7514 L->setAlignment(Alignment);
7517 }
else if (
auto *PrivArrayType = dyn_cast<ArrayType>(PrivType)) {
7518 Type *PointeeTy = PrivArrayType->getElementType();
7519 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7520 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7523 L->setAlignment(Alignment);
7528 L->setAlignment(Alignment);
7535 if (!PrivatizableType)
7536 return ChangeStatus::UNCHANGED;
7537 assert(*PrivatizableType &&
"Expected privatizable type!");
7543 bool UsedAssumedInformation =
false;
7544 if (!
A.checkForAllInstructions(
7546 CallInst &CI = cast<CallInst>(I);
7547 if (CI.isTailCall())
7548 TailCalls.push_back(&CI);
7551 *
this, {Instruction::Call}, UsedAssumedInformation))
7552 return ChangeStatus::UNCHANGED;
7554 Argument *Arg = getAssociatedArgument();
7557 const auto *AlignAA =
7566 BasicBlock &EntryBB = ReplacementFn.getEntryBlock();
7569 unsigned AS =
DL.getAllocaAddrSpace();
7571 Arg->
getName() +
".priv", IP);
7572 createInitialization(*PrivatizableType, *AI, ReplacementFn,
7573 ArgIt->getArgNo(), IP);
7576 AI = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
7581 CI->setTailCall(
false);
7592 createReplacementValues(
7593 AlignAA ? AlignAA->getAssumedAlign() :
Align(0),
7594 *PrivatizableType, ACS,
7602 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7605 if (
A.registerFunctionSignatureRewrite(*Arg, ReplacementTypes,
7606 std::move(FnRepairCB),
7607 std::move(ACSRepairCB)))
7608 return ChangeStatus::CHANGED;
7609 return ChangeStatus::UNCHANGED;
7613 void trackStatistics()
const override {
7618struct AAPrivatizablePtrFloating :
public AAPrivatizablePtrImpl {
7620 : AAPrivatizablePtrImpl(IRP,
A) {}
7625 indicatePessimisticFixpoint();
7630 "updateImpl will not be called");
7634 std::optional<Type *> identifyPrivatizableType(
Attributor &
A)
override {
7637 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] No underlying object found!\n");
7641 if (
auto *AI = dyn_cast<AllocaInst>(Obj))
7642 if (
auto *CI = dyn_cast<ConstantInt>(AI->getArraySize()))
7644 return AI->getAllocatedType();
7645 if (
auto *Arg = dyn_cast<Argument>(Obj)) {
7648 if (PrivArgAA && PrivArgAA->isAssumedPrivatizablePtr())
7652 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Underlying object neither valid "
7653 "alloca nor privatizable argument: "
7659 void trackStatistics()
const override {
7664struct AAPrivatizablePtrCallSiteArgument final
7665 :
public AAPrivatizablePtrFloating {
7667 : AAPrivatizablePtrFloating(IRP,
A) {}
7671 if (
A.hasAttr(getIRPosition(), Attribute::ByVal))
7672 indicateOptimisticFixpoint();
7677 PrivatizableType = identifyPrivatizableType(
A);
7678 if (!PrivatizableType)
7679 return ChangeStatus::UNCHANGED;
7680 if (!*PrivatizableType)
7681 return indicatePessimisticFixpoint();
7684 bool IsKnownNoCapture;
7685 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
7686 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoCapture);
7687 if (!IsAssumedNoCapture) {
7688 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might be captured!\n");
7689 return indicatePessimisticFixpoint();
7692 bool IsKnownNoAlias;
7693 if (!AA::hasAssumedIRAttr<Attribute::NoAlias>(
7694 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
7695 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might alias!\n");
7696 return indicatePessimisticFixpoint();
7701 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer is written!\n");
7702 return indicatePessimisticFixpoint();
7705 return ChangeStatus::UNCHANGED;
7709 void trackStatistics()
const override {
7714struct AAPrivatizablePtrCallSiteReturned final
7715 :
public AAPrivatizablePtrFloating {
7717 : AAPrivatizablePtrFloating(IRP,
A) {}
7722 indicatePessimisticFixpoint();
7726 void trackStatistics()
const override {
7731struct AAPrivatizablePtrReturned final :
public AAPrivatizablePtrFloating {
7733 : AAPrivatizablePtrFloating(IRP,
A) {}
7738 indicatePessimisticFixpoint();
7742 void trackStatistics()
const override {
7758 intersectAssumedBits(BEST_STATE);
7759 getKnownStateFromValue(
A, getIRPosition(), getState());
7760 AAMemoryBehavior::initialize(
A);
7766 bool IgnoreSubsumingPositions =
false) {
7768 A.getAttrs(IRP, AttrKinds, Attrs, IgnoreSubsumingPositions);
7770 switch (Attr.getKindAsEnum()) {
7771 case Attribute::ReadNone:
7774 case Attribute::ReadOnly:
7777 case Attribute::WriteOnly:
7786 if (!
I->mayReadFromMemory())
7788 if (!
I->mayWriteToMemory())
7801 else if (isAssumedWriteOnly())
7810 if (
A.hasAttr(IRP, Attribute::ReadNone,
7812 return ChangeStatus::UNCHANGED;
7821 return ChangeStatus::UNCHANGED;
7824 A.removeAttrs(IRP, AttrKinds);
7827 A.removeAttrs(IRP, Attribute::Writable);
7834 const std::string getAsStr(
Attributor *
A)
const override {
7839 if (isAssumedWriteOnly())
7841 return "may-read/write";
7849 Attribute::ReadNone, Attribute::ReadOnly, Attribute::WriteOnly};
7852struct AAMemoryBehaviorFloating : AAMemoryBehaviorImpl {
7854 : AAMemoryBehaviorImpl(IRP,
A) {}
7860 void trackStatistics()
const override {
7865 else if (isAssumedWriteOnly())
7880struct AAMemoryBehaviorArgument : AAMemoryBehaviorFloating {
7882 : AAMemoryBehaviorFloating(IRP,
A) {}
7886 intersectAssumedBits(BEST_STATE);
7891 bool HasByVal =
A.hasAttr(IRP, {Attribute::ByVal},
7893 getKnownStateFromValue(
A, IRP, getState(),
7900 return ChangeStatus::UNCHANGED;
7904 if (
A.hasAttr(getIRPosition(),
7905 {Attribute::InAlloca, Attribute::Preallocated})) {
7906 removeKnownBits(NO_WRITES);
7907 removeAssumedBits(NO_WRITES);
7909 A.removeAttrs(getIRPosition(), AttrKinds);
7910 return AAMemoryBehaviorFloating::manifest(
A);
7914 void trackStatistics()
const override {
7919 else if (isAssumedWriteOnly())
7924struct AAMemoryBehaviorCallSiteArgument final : AAMemoryBehaviorArgument {
7926 : AAMemoryBehaviorArgument(IRP,
A) {}
7932 Argument *Arg = getAssociatedArgument();
7934 indicatePessimisticFixpoint();
7938 addKnownBits(NO_WRITES);
7939 removeKnownBits(NO_READS);
7940 removeAssumedBits(NO_READS);
7942 AAMemoryBehaviorArgument::initialize(
A);
7943 if (getAssociatedFunction()->isDeclaration())
7944 indicatePessimisticFixpoint();
7953 Argument *Arg = getAssociatedArgument();
7958 return indicatePessimisticFixpoint();
7963 void trackStatistics()
const override {
7968 else if (isAssumedWriteOnly())
7974struct AAMemoryBehaviorCallSiteReturned final : AAMemoryBehaviorFloating {
7976 : AAMemoryBehaviorFloating(IRP,
A) {}
7980 AAMemoryBehaviorImpl::initialize(
A);
7985 return ChangeStatus::UNCHANGED;
7989 void trackStatistics()
const override {}
7993struct AAMemoryBehaviorFunction final :
public AAMemoryBehaviorImpl {
7995 : AAMemoryBehaviorImpl(IRP,
A) {}
8005 Function &
F = cast<Function>(getAnchorValue());
8011 else if (isAssumedWriteOnly())
8014 A.removeAttrs(getIRPosition(), AttrKinds);
8019 return A.manifestAttrs(getIRPosition(),
8024 void trackStatistics()
const override {
8029 else if (isAssumedWriteOnly())
8035struct AAMemoryBehaviorCallSite final
8036 : AACalleeToCallSite<AAMemoryBehavior, AAMemoryBehaviorImpl> {
8043 CallBase &CB = cast<CallBase>(getAnchorValue());
8049 else if (isAssumedWriteOnly())
8052 A.removeAttrs(getIRPosition(), AttrKinds);
8057 Attribute::Writable);
8058 return A.manifestAttrs(
8063 void trackStatistics()
const override {
8068 else if (isAssumedWriteOnly())
8076 auto AssumedState = getAssumed();
8082 if (
const auto *CB = dyn_cast<CallBase>(&
I)) {
8085 if (MemBehaviorAA) {
8086 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8087 return !isAtFixpoint();
8092 if (
I.mayReadFromMemory())
8093 removeAssumedBits(NO_READS);
8094 if (
I.mayWriteToMemory())
8095 removeAssumedBits(NO_WRITES);
8096 return !isAtFixpoint();
8099 bool UsedAssumedInformation =
false;
8100 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8101 UsedAssumedInformation))
8102 return indicatePessimisticFixpoint();
8121 const auto *FnMemAA =
8125 S.addKnownBits(FnMemAA->getKnown());
8126 if ((S.getAssumed() & FnMemAA->getAssumed()) == S.getAssumed())
8132 auto AssumedState = S.getAssumed();
8138 bool IsKnownNoCapture;
8140 bool IsAssumedNoCapture = AA::hasAssumedIRAttr<Attribute::NoCapture>(
8144 if (!IsAssumedNoCapture &&
8146 S.intersectAssumedBits(FnMemAssumedState);
8152 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
8154 LLVM_DEBUG(
dbgs() <<
"[AAMemoryBehavior] Use: " << *U <<
" in " << *UserI
8162 Follow = followUsersOfUseIn(
A, U, UserI);
8166 analyzeUseIn(
A, U, UserI);
8168 return !isAtFixpoint();
8171 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue()))
8172 return indicatePessimisticFixpoint();
8178bool AAMemoryBehaviorFloating::followUsersOfUseIn(
Attributor &
A,
const Use &U,
8182 if (isa<LoadInst>(UserI) || isa<ReturnInst>(UserI))
8187 const auto *CB = dyn_cast<CallBase>(UserI);
8197 if (
U.get()->getType()->isPointerTy()) {
8199 bool IsKnownNoCapture;
8200 return !AA::hasAssumedIRAttr<Attribute::NoCapture>(
8208void AAMemoryBehaviorFloating::analyzeUseIn(
Attributor &
A,
const Use &U,
8216 case Instruction::Load:
8218 removeAssumedBits(NO_READS);
8221 case Instruction::Store:
8226 removeAssumedBits(NO_WRITES);
8228 indicatePessimisticFixpoint();
8231 case Instruction::Call:
8232 case Instruction::CallBr:
8233 case Instruction::Invoke: {
8236 const auto *CB = cast<CallBase>(UserI);
8240 indicatePessimisticFixpoint();
8247 removeAssumedBits(NO_READS);
8254 if (
U.get()->getType()->isPointerTy())
8258 const auto *MemBehaviorAA =
8264 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8272 removeAssumedBits(NO_READS);
8274 removeAssumedBits(NO_WRITES);
8286 return "all memory";
8289 std::string S =
"memory:";
8295 S +=
"internal global,";
8297 S +=
"external global,";
8301 S +=
"inaccessible,";
8315 AccessKind2Accesses.fill(
nullptr);
8318 ~AAMemoryLocationImpl() {
8321 for (AccessSet *AS : AccessKind2Accesses)
8328 intersectAssumedBits(BEST_STATE);
8329 getKnownStateFromValue(
A, getIRPosition(), getState());
8330 AAMemoryLocation::initialize(
A);
8336 bool IgnoreSubsumingPositions =
false) {
8345 bool UseArgMemOnly =
true;
8347 if (AnchorFn &&
A.isRunOn(*AnchorFn))
8351 A.getAttrs(IRP, {Attribute::Memory},
Attrs, IgnoreSubsumingPositions);
8360 State.
addKnownBits(inverseLocation(NO_INACCESSIBLE_MEM,
true,
true));
8365 State.
addKnownBits(inverseLocation(NO_ARGUMENT_MEM,
true,
true));
8369 A.manifestAttrs(IRP,
8379 NO_INACCESSIBLE_MEM | NO_ARGUMENT_MEM,
true,
true));
8383 A.manifestAttrs(IRP,
8402 else if (isAssumedInaccessibleMemOnly())
8405 else if (isAssumedArgMemOnly())
8408 else if (isAssumedInaccessibleOrArgMemOnly())
8423 if (DeducedAttrs.
size() != 1)
8424 return ChangeStatus::UNCHANGED;
8432 bool checkForAllAccessesToMemoryKind(
8434 MemoryLocationsKind)>
8436 MemoryLocationsKind RequestedMLK)
const override {
8437 if (!isValidState())
8440 MemoryLocationsKind AssumedMLK = getAssumedNotAccessedLocation();
8441 if (AssumedMLK == NO_LOCATIONS)
8445 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS;
8446 CurMLK *= 2, ++
Idx) {
8447 if (CurMLK & RequestedMLK)
8450 if (
const AccessSet *Accesses = AccessKind2Accesses[
Idx])
8451 for (
const AccessInfo &AI : *Accesses)
8452 if (!Pred(AI.I, AI.Ptr, AI.Kind, CurMLK))
8464 bool Changed =
false;
8465 MemoryLocationsKind KnownMLK = getKnown();
8466 Instruction *
I = dyn_cast<Instruction>(&getAssociatedValue());
8467 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2)
8468 if (!(CurMLK & KnownMLK))
8469 updateStateAndAccessesMap(getState(), CurMLK,
I,
nullptr, Changed,
8470 getAccessKindFromInst(
I));
8471 return AAMemoryLocation::indicatePessimisticFixpoint();
8491 bool operator()(
const AccessInfo &
LHS,
const AccessInfo &
RHS)
const {
8495 return LHS.Ptr <
RHS.Ptr;
8496 if (
LHS.Kind !=
RHS.Kind)
8497 return LHS.Kind <
RHS.Kind;
8505 std::array<AccessSet *, llvm::CTLog2<VALID_STATE>()> AccessKind2Accesses;
8522 AK =
I->mayReadFromMemory() ? READ :
NONE;
8540 Changed |= Accesses->insert(AccessInfo{
I,
Ptr, AK}).second;
8541 if (MLK == NO_UNKOWN_MEM)
8543 State.removeAssumedBits(MLK);
8550 unsigned AccessAS = 0);
8556void AAMemoryLocationImpl::categorizePtrValue(
8559 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize pointer locations for "
8561 << getMemoryLocationsAsStr(State.getAssumed()) <<
"]\n");
8563 auto Pred = [&](
Value &Obj) {
8566 MemoryLocationsKind MLK = NO_LOCATIONS;
8576 if (isa<UndefValue>(&Obj))
8578 if (isa<Argument>(&Obj)) {
8585 MLK = NO_ARGUMENT_MEM;
8586 }
else if (
auto *GV = dyn_cast<GlobalValue>(&Obj)) {
8590 if (
auto *GVar = dyn_cast<GlobalVariable>(GV))
8591 if (GVar->isConstant())
8594 if (GV->hasLocalLinkage())
8595 MLK = NO_GLOBAL_INTERNAL_MEM;
8597 MLK = NO_GLOBAL_EXTERNAL_MEM;
8598 }
else if (isa<ConstantPointerNull>(&Obj) &&
8602 }
else if (isa<AllocaInst>(&Obj)) {
8604 }
else if (
const auto *CB = dyn_cast<CallBase>(&Obj)) {
8605 bool IsKnownNoAlias;
8606 if (AA::hasAssumedIRAttr<Attribute::NoAlias>(
8609 MLK = NO_MALLOCED_MEM;
8611 MLK = NO_UNKOWN_MEM;
8613 MLK = NO_UNKOWN_MEM;
8616 assert(MLK != NO_LOCATIONS &&
"No location specified!");
8617 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Ptr value can be categorized: "
8618 << Obj <<
" -> " << getMemoryLocationsAsStr(MLK) <<
"\n");
8619 updateStateAndAccessesMap(State, MLK, &
I, &Obj, Changed,
8620 getAccessKindFromInst(&
I));
8629 dbgs() <<
"[AAMemoryLocation] Pointer locations not categorized\n");
8630 updateStateAndAccessesMap(State, NO_UNKOWN_MEM, &
I,
nullptr, Changed,
8631 getAccessKindFromInst(&
I));
8636 dbgs() <<
"[AAMemoryLocation] Accessed locations with pointer locations: "
8637 << getMemoryLocationsAsStr(State.getAssumed()) <<
"\n");
8640void AAMemoryLocationImpl::categorizeArgumentPointerLocations(
8643 for (
unsigned ArgNo = 0, E = CB.
arg_size(); ArgNo < E; ++ArgNo) {
8652 const auto *ArgOpMemLocationAA =
8655 if (ArgOpMemLocationAA && ArgOpMemLocationAA->isAssumedReadNone())
8660 categorizePtrValue(
A, CB, *ArgOp, AccessedLocs, Changed);
8667 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize accessed locations for "
8671 AccessedLocs.intersectAssumedBits(NO_LOCATIONS);
8673 if (
auto *CB = dyn_cast<CallBase>(&
I)) {
8679 <<
" [" << CBMemLocationAA <<
"]\n");
8680 if (!CBMemLocationAA) {
8681 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr,
8682 Changed, getAccessKindFromInst(&
I));
8683 return NO_UNKOWN_MEM;
8686 if (CBMemLocationAA->isAssumedReadNone())
8687 return NO_LOCATIONS;
8689 if (CBMemLocationAA->isAssumedInaccessibleMemOnly()) {
8690 updateStateAndAccessesMap(AccessedLocs, NO_INACCESSIBLE_MEM, &
I,
nullptr,
8691 Changed, getAccessKindFromInst(&
I));
8692 return AccessedLocs.getAssumed();
8695 uint32_t CBAssumedNotAccessedLocs =
8696 CBMemLocationAA->getAssumedNotAccessedLocation();
8699 uint32_t CBAssumedNotAccessedLocsNoArgMem =
8700 CBAssumedNotAccessedLocs | NO_ARGUMENT_MEM | NO_GLOBAL_MEM;
8702 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2) {
8703 if (CBAssumedNotAccessedLocsNoArgMem & CurMLK)
8705 updateStateAndAccessesMap(AccessedLocs, CurMLK, &
I,
nullptr, Changed,
8706 getAccessKindFromInst(&
I));
8711 bool HasGlobalAccesses = ((~CBAssumedNotAccessedLocs) & NO_GLOBAL_MEM);
8712 if (HasGlobalAccesses) {
8715 updateStateAndAccessesMap(AccessedLocs, MLK, &
I,
Ptr, Changed,
8716 getAccessKindFromInst(&
I));
8719 if (!CBMemLocationAA->checkForAllAccessesToMemoryKind(
8720 AccessPred, inverseLocation(NO_GLOBAL_MEM,
false,
false)))
8721 return AccessedLocs.getWorstState();
8725 dbgs() <<
"[AAMemoryLocation] Accessed state before argument handling: "
8726 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8729 bool HasArgAccesses = ((~CBAssumedNotAccessedLocs) & NO_ARGUMENT_MEM);
8731 categorizeArgumentPointerLocations(
A, *CB, AccessedLocs, Changed);
8734 dbgs() <<
"[AAMemoryLocation] Accessed state after argument handling: "
8735 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8737 return AccessedLocs.getAssumed();
8742 dbgs() <<
"[AAMemoryLocation] Categorize memory access with pointer: "
8743 <<
I <<
" [" << *
Ptr <<
"]\n");
8744 categorizePtrValue(
A,
I, *
Ptr, AccessedLocs, Changed,
8745 Ptr->getType()->getPointerAddressSpace());
8746 return AccessedLocs.getAssumed();
8749 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Failed to categorize instruction: "
8751 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr, Changed,
8752 getAccessKindFromInst(&
I));
8753 return AccessedLocs.getAssumed();
8757struct AAMemoryLocationFunction final :
public AAMemoryLocationImpl {
8759 : AAMemoryLocationImpl(IRP,
A) {}
8764 const auto *MemBehaviorAA =
8768 return indicateOptimisticFixpoint();
8770 "AAMemoryLocation was not read-none but AAMemoryBehavior was!");
8771 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
8772 return ChangeStatus::UNCHANGED;
8776 auto AssumedState = getAssumed();
8777 bool Changed =
false;
8780 MemoryLocationsKind MLK = categorizeAccessedLocations(
A,
I, Changed);
8781 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Accessed locations for " <<
I
8782 <<
": " << getMemoryLocationsAsStr(MLK) <<
"\n");
8783 removeAssumedBits(inverseLocation(MLK,
false,
false));
8786 return getAssumedNotAccessedLocation() != VALID_STATE;
8789 bool UsedAssumedInformation =
false;
8790 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8791 UsedAssumedInformation))
8792 return indicatePessimisticFixpoint();
8794 Changed |= AssumedState != getAssumed();
8795 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8799 void trackStatistics()
const override {
8802 else if (isAssumedArgMemOnly())
8804 else if (isAssumedInaccessibleMemOnly())
8806 else if (isAssumedInaccessibleOrArgMemOnly())
8812struct AAMemoryLocationCallSite final : AAMemoryLocationImpl {
8814 : AAMemoryLocationImpl(IRP,
A) {}
8827 return indicatePessimisticFixpoint();
8828 bool Changed =
false;
8831 updateStateAndAccessesMap(getState(), MLK,
I,
Ptr, Changed,
8832 getAccessKindFromInst(
I));
8835 if (!FnAA->checkForAllAccessesToMemoryKind(AccessPred, ALL_LOCATIONS))
8836 return indicatePessimisticFixpoint();
8837 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8841 void trackStatistics()
const override {
8855 const std::string getAsStr(
Attributor *
A)
const override {
8856 std::string Str(
"AADenormalFPMath[");
8859 DenormalState Known = getKnown();
8860 if (Known.Mode.isValid())
8861 OS <<
"denormal-fp-math=" << Known.Mode;
8865 if (Known.ModeF32.isValid())
8866 OS <<
" denormal-fp-math-f32=" << Known.ModeF32;
8872struct AADenormalFPMathFunction final : AADenormalFPMathImpl {
8874 : AADenormalFPMathImpl(IRP,
A) {}
8886 Known = DenormalState{
Mode, ModeF32};
8897 <<
"->" << getAssociatedFunction()->
getName() <<
'\n');
8905 CallerInfo->getState());
8909 bool AllCallSitesKnown =
true;
8910 if (!
A.checkForAllCallSites(CheckCallSite, *
this,
true, AllCallSitesKnown))
8911 return indicatePessimisticFixpoint();
8913 if (Change == ChangeStatus::CHANGED && isModeFixed())
8919 LLVMContext &Ctx = getAssociatedFunction()->getContext();
8924 AttrToRemove.
push_back(
"denormal-fp-math");
8930 if (Known.ModeF32 != Known.Mode) {
8932 Attribute::get(Ctx,
"denormal-fp-math-f32", Known.ModeF32.str()));
8934 AttrToRemove.
push_back(
"denormal-fp-math-f32");
8937 auto &IRP = getIRPosition();
8940 return A.removeAttrs(IRP, AttrToRemove) |
8941 A.manifestAttrs(IRP, AttrToAdd,
true);
8944 void trackStatistics()
const override {
8960 if (
A.hasSimplificationCallback(getIRPosition())) {
8961 indicatePessimisticFixpoint();
8966 intersectKnown(getConstantRangeFromSCEV(
A, getCtxI()));
8969 intersectKnown(getConstantRangeFromLVI(
A, getCtxI()));
8973 const std::string getAsStr(
Attributor *
A)
const override {
8977 getKnown().print(
OS);
8979 getAssumed().print(
OS);
8987 if (!getAnchorScope())
9000 const SCEV *S = SE->
getSCEV(&getAssociatedValue());
9011 if (!getAnchorScope())
9018 const SCEV *S = getSCEV(
A,
I);
9030 if (!getAnchorScope())
9049 bool isValidCtxInstructionForOutsideAnalysis(
Attributor &
A,
9051 bool AllowAACtxI)
const {
9052 if (!CtxI || (!AllowAACtxI && CtxI == getCtxI()))
9063 if (
auto *
I = dyn_cast<Instruction>(&getAssociatedValue())) {
9077 const Instruction *CtxI =
nullptr)
const override {
9078 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9084 return getKnown().intersectWith(SCEVR).intersectWith(LVIR);
9090 const Instruction *CtxI =
nullptr)
const override {
9095 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9097 return getAssumed();
9101 return getAssumed().intersectWith(SCEVR).intersectWith(LVIR);
9109 Ty, AssumedConstantRange.
getLower())),
9111 Ty, AssumedConstantRange.
getUpper()))};
9133 mdconst::extract<ConstantInt>(KnownRanges->
getOperand(0));
9135 mdconst::extract<ConstantInt>(KnownRanges->
getOperand(1));
9138 return Known.contains(Assumed) && Known != Assumed;
9145 auto *OldRangeMD =
I->getMetadata(LLVMContext::MD_range);
9146 if (isBetterRange(AssumedConstantRange, OldRangeMD)) {
9148 I->setMetadata(LLVMContext::MD_range,
9149 getMDNodeForConstantRange(
I->getType(),
I->getContext(),
9150 AssumedConstantRange));
9163 auto &
V = getAssociatedValue();
9167 assert(
I == getCtxI() &&
"Should not annotate an instruction which is "
9168 "not the context instruction");
9169 if (isa<CallInst>(
I) || isa<LoadInst>(
I))
9170 if (setRangeMetadataIfisBetterRange(
I, AssumedConstantRange))
9171 Changed = ChangeStatus::CHANGED;
9179struct AAValueConstantRangeArgument final
9180 : AAArgumentFromCallSiteArguments<
9181 AAValueConstantRange, AAValueConstantRangeImpl, IntegerRangeState,
9183 using Base = AAArgumentFromCallSiteArguments<
9190 void trackStatistics()
const override {
9195struct AAValueConstantRangeReturned
9196 : AAReturnedFromReturnedValues<AAValueConstantRange,
9197 AAValueConstantRangeImpl,
9198 AAValueConstantRangeImpl::StateType,
9202 AAValueConstantRangeImpl,
9210 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9211 indicatePessimisticFixpoint();
9215 void trackStatistics()
const override {
9220struct AAValueConstantRangeFloating : AAValueConstantRangeImpl {
9222 : AAValueConstantRangeImpl(IRP,
A) {}
9226 AAValueConstantRangeImpl::initialize(
A);
9230 Value &
V = getAssociatedValue();
9232 if (
auto *
C = dyn_cast<ConstantInt>(&V)) {
9234 indicateOptimisticFixpoint();
9238 if (isa<UndefValue>(&V)) {
9241 indicateOptimisticFixpoint();
9245 if (isa<CallBase>(&V))
9248 if (isa<BinaryOperator>(&V) || isa<CmpInst>(&V) || isa<CastInst>(&V))
9252 if (
LoadInst *LI = dyn_cast<LoadInst>(&V))
9253 if (
auto *RangeMD = LI->getMetadata(LLVMContext::MD_range)) {
9260 if (isa<SelectInst>(V) || isa<PHINode>(V))
9264 indicatePessimisticFixpoint();
9267 << getAssociatedValue() <<
"\n");
9270 bool calculateBinaryOperator(
9278 bool UsedAssumedInformation =
false;
9279 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9282 if (!SimplifiedLHS.has_value())
9284 if (!*SimplifiedLHS)
9286 LHS = *SimplifiedLHS;
9288 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9291 if (!SimplifiedRHS.has_value())
9293 if (!*SimplifiedRHS)
9295 RHS = *SimplifiedRHS;
9303 DepClassTy::REQUIRED);
9307 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9311 DepClassTy::REQUIRED);
9315 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9317 auto AssumedRange = LHSAARange.binaryOp(BinOp->
getOpcode(), RHSAARange);
9319 T.unionAssumed(AssumedRange);
9323 return T.isValidState();
9326 bool calculateCastInst(
9335 bool UsedAssumedInformation =
false;
9336 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9339 if (!SimplifiedOpV.has_value())
9341 if (!*SimplifiedOpV)
9343 OpV = *SimplifiedOpV;
9350 DepClassTy::REQUIRED);
9354 T.unionAssumed(OpAA->getAssumed().castOp(CastI->
getOpcode(),
9356 return T.isValidState();
9367 bool UsedAssumedInformation =
false;
9368 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9371 if (!SimplifiedLHS.has_value())
9373 if (!*SimplifiedLHS)
9375 LHS = *SimplifiedLHS;
9377 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9380 if (!SimplifiedRHS.has_value())
9382 if (!*SimplifiedRHS)
9384 RHS = *SimplifiedRHS;
9392 DepClassTy::REQUIRED);
9398 DepClassTy::REQUIRED);
9402 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9403 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9406 if (LHSAARange.isEmptySet() || RHSAARange.isEmptySet())
9409 bool MustTrue =
false, MustFalse =
false;
9411 auto AllowedRegion =
9414 if (AllowedRegion.intersectWith(LHSAARange).isEmptySet())
9420 assert((!MustTrue || !MustFalse) &&
9421 "Either MustTrue or MustFalse should be false!");
9430 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] " << *CmpI <<
" after "
9431 << (MustTrue ?
"true" : (MustFalse ?
"false" :
"unknown"))
9432 <<
": " <<
T <<
"\n\t" << *LHSAA <<
"\t<op>\n\t"
9436 return T.isValidState();
9445 if (!
I || isa<CallBase>(
I)) {
9448 bool UsedAssumedInformation =
false;
9449 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9452 if (!SimplifiedOpV.has_value())
9454 if (!*SimplifiedOpV)
9456 Value *VPtr = *SimplifiedOpV;
9461 DepClassTy::REQUIRED);
9465 T.unionAssumed(AA->getAssumedConstantRange(
A, CtxI));
9469 return T.isValidState();
9473 if (
auto *BinOp = dyn_cast<BinaryOperator>(
I)) {
9474 if (!calculateBinaryOperator(
A, BinOp,
T, CtxI, QuerriedAAs))
9476 }
else if (
auto *CmpI = dyn_cast<CmpInst>(
I)) {
9477 if (!calculateCmpInst(
A, CmpI,
T, CtxI, QuerriedAAs))
9479 }
else if (
auto *CastI = dyn_cast<CastInst>(
I)) {
9480 if (!calculateCastInst(
A, CastI,
T, CtxI, QuerriedAAs))
9486 T.indicatePessimisticFixpoint();
9494 if (QueriedAA !=
this)
9497 if (
T.getAssumed() == getState().getAssumed())
9499 T.indicatePessimisticFixpoint();
9502 return T.isValidState();
9505 if (!VisitValueCB(getAssociatedValue(), getCtxI()))
9506 return indicatePessimisticFixpoint();
9511 return ChangeStatus::UNCHANGED;
9512 if (++NumChanges > MaxNumChanges) {
9513 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] performed " << NumChanges
9514 <<
" but only " << MaxNumChanges
9515 <<
" are allowed to avoid cyclic reasoning.");
9516 return indicatePessimisticFixpoint();
9518 return ChangeStatus::CHANGED;
9522 void trackStatistics()
const override {
9531 static constexpr int MaxNumChanges = 5;
9534struct AAValueConstantRangeFunction : AAValueConstantRangeImpl {
9536 : AAValueConstantRangeImpl(IRP,
A) {}
9540 llvm_unreachable(
"AAValueConstantRange(Function|CallSite)::updateImpl will "
9548struct AAValueConstantRangeCallSite : AAValueConstantRangeFunction {
9550 : AAValueConstantRangeFunction(IRP,
A) {}
9556struct AAValueConstantRangeCallSiteReturned
9557 : AACalleeToCallSite<AAValueConstantRange, AAValueConstantRangeImpl,
9558 AAValueConstantRangeImpl::StateType,
9562 AAValueConstantRangeImpl::StateType,
9568 if (
CallInst *CI = dyn_cast<CallInst>(&getAssociatedValue()))
9569 if (
auto *RangeMD = CI->getMetadata(LLVMContext::MD_range))
9572 AAValueConstantRangeImpl::initialize(
A);
9576 void trackStatistics()
const override {
9580struct AAValueConstantRangeCallSiteArgument : AAValueConstantRangeFloating {
9582 : AAValueConstantRangeFloating(IRP,
A) {}
9586 return ChangeStatus::UNCHANGED;
9590 void trackStatistics()
const override {
9607 if (
A.hasSimplificationCallback(getIRPosition()))
9608 indicatePessimisticFixpoint();
9610 AAPotentialConstantValues::initialize(
A);
9614 bool &ContainsUndef,
bool ForSelf) {
9616 bool UsedAssumedInformation =
false;
9618 UsedAssumedInformation)) {
9626 *
this, IRP, DepClassTy::REQUIRED);
9627 if (!PotentialValuesAA || !PotentialValuesAA->getState().isValidState())
9629 ContainsUndef = PotentialValuesAA->getState().undefIsContained();
9630 S = PotentialValuesAA->getState().getAssumedSet();
9637 ContainsUndef =
false;
9638 for (
auto &It : Values) {
9639 if (isa<UndefValue>(It.getValue())) {
9640 ContainsUndef =
true;
9643 auto *CI = dyn_cast<ConstantInt>(It.getValue());
9646 S.insert(CI->getValue());
9648 ContainsUndef &= S.empty();
9654 const std::string getAsStr(
Attributor *
A)
const override {
9663 return indicatePessimisticFixpoint();
9667struct AAPotentialConstantValuesArgument final
9668 : AAArgumentFromCallSiteArguments<AAPotentialConstantValues,
9669 AAPotentialConstantValuesImpl,
9670 PotentialConstantIntValuesState> {
9672 AAPotentialConstantValuesImpl,
9678 void trackStatistics()
const override {
9683struct AAPotentialConstantValuesReturned
9684 : AAReturnedFromReturnedValues<AAPotentialConstantValues,
9685 AAPotentialConstantValuesImpl> {
9687 AAPotentialConstantValuesImpl>;
9692 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9693 indicatePessimisticFixpoint();
9694 Base::initialize(
A);
9698 void trackStatistics()
const override {
9703struct AAPotentialConstantValuesFloating : AAPotentialConstantValuesImpl {
9705 : AAPotentialConstantValuesImpl(IRP,
A) {}
9709 AAPotentialConstantValuesImpl::initialize(
A);
9713 Value &
V = getAssociatedValue();
9715 if (
auto *
C = dyn_cast<ConstantInt>(&V)) {
9716 unionAssumed(
C->getValue());
9717 indicateOptimisticFixpoint();
9721 if (isa<UndefValue>(&V)) {
9722 unionAssumedWithUndef();
9723 indicateOptimisticFixpoint();
9727 if (isa<BinaryOperator>(&V) || isa<ICmpInst>(&V) || isa<CastInst>(&V))
9730 if (isa<SelectInst>(V) || isa<PHINode>(V) || isa<LoadInst>(V))
9733 indicatePessimisticFixpoint();
9736 << getAssociatedValue() <<
"\n");
9750 case Instruction::Trunc:
9751 return Src.trunc(ResultBitWidth);
9752 case Instruction::SExt:
9753 return Src.sext(ResultBitWidth);
9754 case Instruction::ZExt:
9755 return Src.zext(ResultBitWidth);
9756 case Instruction::BitCast:
9763 bool &SkipOperation,
bool &Unsupported) {
9770 switch (BinOpcode) {
9774 case Instruction::Add:
9776 case Instruction::Sub:
9778 case Instruction::Mul:
9780 case Instruction::UDiv:
9782 SkipOperation =
true;
9786 case Instruction::SDiv:
9788 SkipOperation =
true;
9792 case Instruction::URem:
9794 SkipOperation =
true;
9798 case Instruction::SRem:
9800 SkipOperation =
true;
9804 case Instruction::Shl:
9806 case Instruction::LShr:
9808 case Instruction::AShr:
9810 case Instruction::And:
9812 case Instruction::Or:
9814 case Instruction::Xor:
9819 bool calculateBinaryOperatorAndTakeUnion(
const BinaryOperator *BinOp,
9821 bool SkipOperation =
false;
9824 calculateBinaryOperator(BinOp,
LHS,
RHS, SkipOperation, Unsupported);
9829 unionAssumed(Result);
9830 return isValidState();
9834 auto AssumedBefore = getAssumed();
9838 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9839 SetTy LHSAAPVS, RHSAAPVS;
9841 LHSContainsUndef,
false) ||
9843 RHSContainsUndef,
false))
9844 return indicatePessimisticFixpoint();
9847 bool MaybeTrue =
false, MaybeFalse =
false;
9849 if (LHSContainsUndef && RHSContainsUndef) {
9852 unionAssumedWithUndef();
9853 }
else if (LHSContainsUndef) {
9854 for (
const APInt &R : RHSAAPVS) {
9855 bool CmpResult = calculateICmpInst(ICI, Zero, R);
9856 MaybeTrue |= CmpResult;
9857 MaybeFalse |= !CmpResult;
9858 if (MaybeTrue & MaybeFalse)
9859 return indicatePessimisticFixpoint();
9861 }
else if (RHSContainsUndef) {
9862 for (
const APInt &L : LHSAAPVS) {
9863 bool CmpResult = calculateICmpInst(ICI, L, Zero);
9864 MaybeTrue |= CmpResult;
9865 MaybeFalse |= !CmpResult;
9866 if (MaybeTrue & MaybeFalse)
9867 return indicatePessimisticFixpoint();
9870 for (
const APInt &L : LHSAAPVS) {
9871 for (
const APInt &R : RHSAAPVS) {
9872 bool CmpResult = calculateICmpInst(ICI, L, R);
9873 MaybeTrue |= CmpResult;
9874 MaybeFalse |= !CmpResult;
9875 if (MaybeTrue & MaybeFalse)
9876 return indicatePessimisticFixpoint();
9881 unionAssumed(
APInt( 1, 1));
9883 unionAssumed(
APInt( 1, 0));
9884 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
9885 : ChangeStatus::CHANGED;
9889 auto AssumedBefore = getAssumed();
9893 bool UsedAssumedInformation =
false;
9894 std::optional<Constant *>
C =
A.getAssumedConstant(
9895 *
SI->getCondition(), *
this, UsedAssumedInformation);
9898 bool OnlyLeft =
false, OnlyRight =
false;
9899 if (
C && *
C && (*C)->isOneValue())
9901 else if (
C && *
C && (*C)->isZeroValue())
9904 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9905 SetTy LHSAAPVS, RHSAAPVS;
9908 LHSContainsUndef,
false))
9909 return indicatePessimisticFixpoint();
9913 RHSContainsUndef,
false))
9914 return indicatePessimisticFixpoint();
9916 if (OnlyLeft || OnlyRight) {
9918 auto *OpAA = OnlyLeft ? &LHSAAPVS : &RHSAAPVS;
9919 auto Undef = OnlyLeft ? LHSContainsUndef : RHSContainsUndef;
9922 unionAssumedWithUndef();
9924 for (
const auto &It : *OpAA)
9928 }
else if (LHSContainsUndef && RHSContainsUndef) {
9930 unionAssumedWithUndef();
9932 for (
const auto &It : LHSAAPVS)
9934 for (
const auto &It : RHSAAPVS)
9937 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
9938 : ChangeStatus::CHANGED;
9942 auto AssumedBefore = getAssumed();
9944 return indicatePessimisticFixpoint();
9949 bool SrcContainsUndef =
false;
9952 SrcContainsUndef,
false))
9953 return indicatePessimisticFixpoint();
9955 if (SrcContainsUndef)
9956 unionAssumedWithUndef();
9958 for (
const APInt &S : SrcPVS) {
9959 APInt T = calculateCastInst(CI, S, ResultBitWidth);
9963 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
9964 : ChangeStatus::CHANGED;
9968 auto AssumedBefore = getAssumed();
9972 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9973 SetTy LHSAAPVS, RHSAAPVS;
9975 LHSContainsUndef,
false) ||
9977 RHSContainsUndef,
false))
9978 return indicatePessimisticFixpoint();
9983 if (LHSContainsUndef && RHSContainsUndef) {
9984 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, Zero))
9985 return indicatePessimisticFixpoint();
9986 }
else if (LHSContainsUndef) {
9987 for (
const APInt &R : RHSAAPVS) {
9988 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, R))
9989 return indicatePessimisticFixpoint();
9991 }
else if (RHSContainsUndef) {
9992 for (
const APInt &L : LHSAAPVS) {
9993 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, Zero))
9994 return indicatePessimisticFixpoint();
9997 for (
const APInt &L : LHSAAPVS) {
9998 for (
const APInt &R : RHSAAPVS) {
9999 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, R))
10000 return indicatePessimisticFixpoint();
10004 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10005 : ChangeStatus::CHANGED;
10009 auto AssumedBefore = getAssumed();
10011 bool ContainsUndef;
10013 ContainsUndef,
true))
10014 return indicatePessimisticFixpoint();
10015 if (ContainsUndef) {
10016 unionAssumedWithUndef();
10021 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10022 : ChangeStatus::CHANGED;
10027 Value &
V = getAssociatedValue();
10030 if (
auto *ICI = dyn_cast<ICmpInst>(
I))
10031 return updateWithICmpInst(
A, ICI);
10033 if (
auto *SI = dyn_cast<SelectInst>(
I))
10034 return updateWithSelectInst(
A, SI);
10036 if (
auto *CI = dyn_cast<CastInst>(
I))
10037 return updateWithCastInst(
A, CI);
10039 if (
auto *BinOp = dyn_cast<BinaryOperator>(
I))
10040 return updateWithBinaryOperator(
A, BinOp);
10042 if (isa<PHINode>(
I) || isa<LoadInst>(
I))
10043 return updateWithInstruction(
A,
I);
10045 return indicatePessimisticFixpoint();
10049 void trackStatistics()
const override {
10054struct AAPotentialConstantValuesFunction : AAPotentialConstantValuesImpl {
10056 : AAPotentialConstantValuesImpl(IRP,
A) {}
10061 "AAPotentialConstantValues(Function|CallSite)::updateImpl will "
10066 void trackStatistics()
const override {
10071struct AAPotentialConstantValuesCallSite : AAPotentialConstantValuesFunction {
10073 : AAPotentialConstantValuesFunction(IRP,
A) {}
10076 void trackStatistics()
const override {
10081struct AAPotentialConstantValuesCallSiteReturned
10082 : AACalleeToCallSite<AAPotentialConstantValues,
10083 AAPotentialConstantValuesImpl> {
10084 AAPotentialConstantValuesCallSiteReturned(
const IRPosition &IRP,
10087 AAPotentialConstantValuesImpl>(IRP,
A) {}
10090 void trackStatistics()
const override {
10095struct AAPotentialConstantValuesCallSiteArgument
10096 : AAPotentialConstantValuesFloating {
10097 AAPotentialConstantValuesCallSiteArgument(
const IRPosition &IRP,
10099 : AAPotentialConstantValuesFloating(IRP,
A) {}
10103 AAPotentialConstantValuesImpl::initialize(
A);
10104 if (isAtFixpoint())
10107 Value &
V = getAssociatedValue();
10109 if (
auto *
C = dyn_cast<ConstantInt>(&V)) {
10110 unionAssumed(
C->getValue());
10111 indicateOptimisticFixpoint();
10115 if (isa<UndefValue>(&V)) {
10116 unionAssumedWithUndef();
10117 indicateOptimisticFixpoint();
10124 Value &
V = getAssociatedValue();
10125 auto AssumedBefore = getAssumed();
10129 return indicatePessimisticFixpoint();
10130 const auto &S = AA->getAssumed();
10132 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10133 : ChangeStatus::CHANGED;
10137 void trackStatistics()
const override {
10146 bool IgnoreSubsumingPositions) {
10147 assert(ImpliedAttributeKind == Attribute::NoUndef &&
10148 "Unexpected attribute kind");
10149 if (
A.hasAttr(IRP, {Attribute::NoUndef}, IgnoreSubsumingPositions,
10150 Attribute::NoUndef))
10170 Value &V = getAssociatedValue();
10171 if (isa<UndefValue>(V))
10172 indicatePessimisticFixpoint();
10173 assert(!isImpliedByIR(
A, getIRPosition(), Attribute::NoUndef));
10179 const Value *UseV =
U->get();
10188 bool TrackUse =
false;
10191 if (isa<CastInst>(*
I) || isa<GetElementPtrInst>(*
I))
10197 const std::string getAsStr(
Attributor *
A)
const override {
10198 return getAssumed() ?
"noundef" :
"may-undef-or-poison";
10205 bool UsedAssumedInformation =
false;
10206 if (
A.isAssumedDead(getIRPosition(),
nullptr,
nullptr,
10207 UsedAssumedInformation))
10208 return ChangeStatus::UNCHANGED;
10212 if (!
A.getAssumedSimplified(getIRPosition(), *
this, UsedAssumedInformation,
10215 return ChangeStatus::UNCHANGED;
10216 return AANoUndef::manifest(
A);
10220struct AANoUndefFloating :
public AANoUndefImpl {
10222 : AANoUndefImpl(IRP,
A) {}
10226 AANoUndefImpl::initialize(
A);
10227 if (!getState().isAtFixpoint() && getAnchorScope() &&
10228 !getAnchorScope()->isDeclaration())
10230 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10235 auto VisitValueCB = [&](
const IRPosition &IRP) ->
bool {
10236 bool IsKnownNoUndef;
10237 return AA::hasAssumedIRAttr<Attribute::NoUndef>(
10238 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoUndef);
10242 bool UsedAssumedInformation =
false;
10243 Value *AssociatedValue = &getAssociatedValue();
10245 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10250 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
10258 if (AVIRP == getIRPosition() || !VisitValueCB(AVIRP))
10259 return indicatePessimisticFixpoint();
10260 return ChangeStatus::UNCHANGED;
10263 for (
const auto &VAC : Values)
10265 return indicatePessimisticFixpoint();
10267 return ChangeStatus::UNCHANGED;
10274struct AANoUndefReturned final
10275 : AAReturnedFromReturnedValues<AANoUndef, AANoUndefImpl> {
10277 : AAReturnedFromReturnedValues<
AANoUndef, AANoUndefImpl>(IRP,
A) {}
10283struct AANoUndefArgument final
10284 : AAArgumentFromCallSiteArguments<AANoUndef, AANoUndefImpl> {
10286 : AAArgumentFromCallSiteArguments<
AANoUndef, AANoUndefImpl>(IRP,
A) {}
10292struct AANoUndefCallSiteArgument final : AANoUndefFloating {
10294 : AANoUndefFloating(IRP,
A) {}
10300struct AANoUndefCallSiteReturned final
10301 : AACalleeToCallSite<AANoUndef, AANoUndefImpl> {
10303 : AACalleeToCallSite<
AANoUndef, AANoUndefImpl>(IRP,
A) {}
10318 if (isa<UndefValue>(V)) {
10319 indicateOptimisticFixpoint();
10324 A.getAttrs(getIRPosition(), {Attribute::NoFPClass},
Attrs,
false);
10325 for (
const auto &Attr : Attrs) {
10336 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10343 auto *CB = dyn_cast<CallBase>(
I);
10352 if (
auto *NoFPAA =
A.getAAFor<
AANoFPClass>(*
this, IRP, DepClassTy::NONE))
10353 State.addKnownBits(NoFPAA->getState().getKnown());
10357 const std::string getAsStr(
Attributor *
A)
const override {
10358 std::string
Result =
"nofpclass";
10360 OS << getKnownNoFPClass() <<
'/' << getAssumedNoFPClass();
10370struct AANoFPClassFloating :
public AANoFPClassImpl {
10372 : AANoFPClassImpl(IRP,
A) {}
10377 bool UsedAssumedInformation =
false;
10378 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10380 Values.
push_back({getAssociatedValue(), getCtxI()});
10386 DepClassTy::REQUIRED);
10387 if (!AA ||
this == AA) {
10388 T.indicatePessimisticFixpoint();
10394 return T.isValidState();
10397 for (
const auto &VAC : Values)
10398 if (!VisitValueCB(*
VAC.getValue(),
VAC.getCtxI()))
10399 return indicatePessimisticFixpoint();
10405 void trackStatistics()
const override {
10410struct AANoFPClassReturned final
10411 : AAReturnedFromReturnedValues<AANoFPClass, AANoFPClassImpl,
10412 AANoFPClassImpl::StateType, false,
10413 Attribute::None, false> {
10415 : AAReturnedFromReturnedValues<
AANoFPClass, AANoFPClassImpl,
10416 AANoFPClassImpl::StateType,
false,
10420 void trackStatistics()
const override {
10425struct AANoFPClassArgument final
10426 : AAArgumentFromCallSiteArguments<AANoFPClass, AANoFPClassImpl> {
10428 : AAArgumentFromCallSiteArguments<
AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10434struct AANoFPClassCallSiteArgument final : AANoFPClassFloating {
10436 : AANoFPClassFloating(IRP,
A) {}
10439 void trackStatistics()
const override {
10444struct AANoFPClassCallSiteReturned final
10445 : AACalleeToCallSite<AANoFPClass, AANoFPClassImpl> {
10447 : AACalleeToCallSite<
AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10450 void trackStatistics()
const override {
10459 return CalledFunctions;
10462 bool hasUnknownCallee()
const override {
return HasUnknownCallee; }
10464 bool hasNonAsmUnknownCallee()
const override {
10465 return HasUnknownCalleeNonAsm;
10468 const std::string getAsStr(
Attributor *
A)
const override {
10469 return "CallEdges[" + std::to_string(HasUnknownCallee) +
"," +
10470 std::to_string(CalledFunctions.size()) +
"]";
10473 void trackStatistics()
const override {}
10477 if (CalledFunctions.insert(Fn)) {
10478 Change = ChangeStatus::CHANGED;
10484 void setHasUnknownCallee(
bool NonAsm,
ChangeStatus &Change) {
10485 if (!HasUnknownCallee)
10486 Change = ChangeStatus::CHANGED;
10487 if (NonAsm && !HasUnknownCalleeNonAsm)
10488 Change = ChangeStatus::CHANGED;
10489 HasUnknownCalleeNonAsm |= NonAsm;
10490 HasUnknownCallee =
true;
10498 bool HasUnknownCallee =
false;
10501 bool HasUnknownCalleeNonAsm =
false;
10504struct AACallEdgesCallSite :
public AACallEdgesImpl {
10506 : AACallEdgesImpl(IRP,
A) {}
10512 if (
Function *Fn = dyn_cast<Function>(&V)) {
10513 addCalledFunction(Fn, Change);
10515 LLVM_DEBUG(
dbgs() <<
"[AACallEdges] Unrecognized value: " << V <<
"\n");
10516 setHasUnknownCallee(
true, Change);
10526 if (isa<Constant>(V)) {
10527 VisitValue(*V, CtxI);
10531 bool UsedAssumedInformation =
false;
10537 for (
auto &VAC : Values)
10538 VisitValue(*
VAC.getValue(),
VAC.getCtxI());
10541 CallBase *CB = cast<CallBase>(getCtxI());
10544 if (
IA->hasSideEffects() &&
10547 setHasUnknownCallee(
false, Change);
10554 *
this, getIRPosition(), DepClassTy::OPTIONAL))
10555 if (IndirectCallAA->foreachCallee(
10556 [&](
Function *Fn) { return VisitValue(*Fn, CB); }))
10565 for (
const Use *U : CallbackUses)
10566 ProcessCalledOperand(
U->get(), CB);
10572struct AACallEdgesFunction :
public AACallEdgesImpl {
10574 : AACallEdgesImpl(IRP,
A) {}
10581 CallBase &CB = cast<CallBase>(Inst);
10587 if (CBEdges->hasNonAsmUnknownCallee())
10588 setHasUnknownCallee(
true, Change);
10589 if (CBEdges->hasUnknownCallee())
10590 setHasUnknownCallee(
false, Change);
10592 for (
Function *
F : CBEdges->getOptimisticEdges())
10593 addCalledFunction(
F, Change);
10599 bool UsedAssumedInformation =
false;
10600 if (!
A.checkForAllCallLikeInstructions(ProcessCallInst, *
this,
10601 UsedAssumedInformation,
10605 setHasUnknownCallee(
true, Change);
10614struct AAInterFnReachabilityFunction
10615 :
public CachedReachabilityAA<AAInterFnReachability, Function> {
10616 using Base = CachedReachabilityAA<AAInterFnReachability, Function>;
10620 bool instructionCanReach(
10623 assert(
From.getFunction() == getAnchorScope() &&
"Queried the wrong AA!");
10624 auto *NonConstThis =
const_cast<AAInterFnReachabilityFunction *
>(
this);
10626 RQITy StackRQI(
A,
From, To, ExclusionSet,
false);
10627 typename RQITy::Reachable
Result;
10628 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
10629 return NonConstThis->isReachableImpl(
A, StackRQI,
10631 return Result == RQITy::Reachable::Yes;
10635 bool IsTemporaryRQI)
override {
10638 if (EntryI != RQI.From &&
10639 !instructionCanReach(
A, *EntryI, *RQI.To,
nullptr))
10640 return rememberResult(
A, RQITy::Reachable::No, RQI,
false,
10643 auto CheckReachableCallBase = [&](
CallBase *CB) {
10646 if (!CBEdges || !CBEdges->getState().isValidState())
10649 if (CBEdges->hasUnknownCallee())
10652 for (
Function *Fn : CBEdges->getOptimisticEdges()) {
10663 if (Fn == getAnchorScope()) {
10664 if (EntryI == RQI.From)
10671 DepClassTy::OPTIONAL);
10674 if (!InterFnReachability ||
10684 DepClassTy::OPTIONAL);
10690 if (CheckReachableCallBase(cast<CallBase>(&CBInst)))
10693 A, *RQI.From, CBInst, RQI.ExclusionSet);
10696 bool UsedExclusionSet =
true;
10697 bool UsedAssumedInformation =
false;
10698 if (!
A.checkForAllCallLikeInstructions(CheckCallBase, *
this,
10699 UsedAssumedInformation,
10701 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
10704 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
10708 void trackStatistics()
const override {}
10712template <
typename AAType>
10713static std::optional<Constant *>
10724 std::optional<Constant *> COpt = AA->getAssumedConstant(
A);
10726 if (!COpt.has_value()) {
10728 return std::nullopt;
10730 if (
auto *
C = *COpt) {
10741 std::optional<Value *> V;
10742 for (
auto &It : Values) {
10744 if (V.has_value() && !*V)
10747 if (!V.has_value())
10761 if (
A.hasSimplificationCallback(getIRPosition())) {
10762 indicatePessimisticFixpoint();
10765 Value *Stripped = getAssociatedValue().stripPointerCasts();
10766 if (isa<Constant>(Stripped) && !isa<ConstantExpr>(Stripped)) {
10767 addValue(
A, getState(), *Stripped, getCtxI(),
AA::AnyScope,
10769 indicateOptimisticFixpoint();
10772 AAPotentialValues::initialize(
A);
10776 const std::string getAsStr(
Attributor *
A)
const override {
10783 template <
typename AAType>
10784 static std::optional<Value *> askOtherAA(
Attributor &
A,
10789 std::optional<Constant *>
C = askForAssumedConstant<AAType>(
A, AA, IRP, Ty);
10791 return std::nullopt;
10803 if (
auto *CB = dyn_cast_or_null<CallBase>(CtxI)) {
10804 for (
const auto &U : CB->
args()) {
10814 Type &Ty = *getAssociatedType();
10815 std::optional<Value *> SimpleV =
10816 askOtherAA<AAValueConstantRange>(
A, *
this, ValIRP, Ty);
10817 if (SimpleV.has_value() && !*SimpleV) {
10819 *
this, ValIRP, DepClassTy::OPTIONAL);
10820 if (PotentialConstantsAA && PotentialConstantsAA->isValidState()) {
10821 for (
const auto &It : PotentialConstantsAA->getAssumedSet())
10822 State.
unionAssumed({{*ConstantInt::get(&Ty, It),
nullptr}, S});
10823 if (PotentialConstantsAA->undefIsContained())
10828 if (!SimpleV.has_value())
10835 if (isa<ConstantInt>(VPtr))
10840 State.unionAssumed({{*VPtr, CtxI}, S});
10850 return II.I ==
I &&
II.S == S;
10865 bool UsedAssumedInformation =
false;
10867 if (!
A.getAssumedSimplifiedValues(IRP,
this, Values, CS,
10868 UsedAssumedInformation))
10871 for (
auto &It : Values)
10872 ValueScopeMap[It] += CS;
10874 for (
auto &It : ValueScopeMap)
10875 addValue(
A, getState(), *It.first.getValue(), It.first.getCtxI(),
10882 auto NewS = StateType::getBestState(getState());
10883 for (
const auto &It : getAssumedSet()) {
10886 addValue(
A, NewS, *It.first.getValue(), It.first.getCtxI(),
10889 assert(!undefIsContained() &&
"Undef should be an explicit value!");
10897 getState() = StateType::getBestState(getState());
10898 getState().unionAssumed({{getAssociatedValue(), getCtxI()},
AA::AnyScope});
10905 return indicatePessimisticFixpoint();
10913 if (!getAssumedSimplifiedValues(
A, Values, S))
10915 Value &OldV = getAssociatedValue();
10916 if (isa<UndefValue>(OldV))
10918 Value *NewV = getSingleValue(
A, *
this, getIRPosition(), Values);
10919 if (!NewV || NewV == &OldV)
10924 if (
A.changeAfterManifest(getIRPosition(), *NewV))
10930 bool getAssumedSimplifiedValues(
10932 AA::ValueScope S,
bool RecurseForSelectAndPHI =
false)
const override {
10933 if (!isValidState())
10935 bool UsedAssumedInformation =
false;
10936 for (
const auto &It : getAssumedSet())
10937 if (It.second & S) {
10938 if (RecurseForSelectAndPHI && (isa<PHINode>(It.first.getValue()) ||
10939 isa<SelectInst>(It.first.getValue()))) {
10940 if (
A.getAssumedSimplifiedValues(
10942 this, Values, S, UsedAssumedInformation))
10947 assert(!undefIsContained() &&
"Undef should be an explicit value!");
10952struct AAPotentialValuesFloating : AAPotentialValuesImpl {
10954 : AAPotentialValuesImpl(IRP,
A) {}
10958 auto AssumedBefore = getAssumed();
10960 genericValueTraversal(
A, &getAssociatedValue());
10962 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
10963 : ChangeStatus::CHANGED;
10967 struct LivenessInfo {
10968 const AAIsDead *LivenessAA =
nullptr;
10969 bool AnyDead =
false;
10982 bool UsedAssumedInformation =
false;
10984 auto GetSimplifiedValues = [&](
Value &
V,
10986 if (!
A.getAssumedSimplifiedValues(
10992 return Values.
empty();
10994 if (GetSimplifiedValues(*
LHS, LHSValues))
10996 if (GetSimplifiedValues(*
RHS, RHSValues))
11008 F ?
A.getInfoCache().getTargetLibraryInfoForFunction(*
F) :
nullptr;
11016 auto CheckPair = [&](
Value &LHSV,
Value &RHSV) {
11017 if (isa<UndefValue>(LHSV) || isa<UndefValue>(RHSV)) {
11019 nullptr,
II.S, getAnchorScope());
11025 if (&LHSV == &RHSV &&
11029 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11036 if (TypedLHS && TypedRHS) {
11038 if (NewV && NewV != &Cmp) {
11039 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11049 bool LHSIsNull = isa<ConstantPointerNull>(LHSV);
11050 bool RHSIsNull = isa<ConstantPointerNull>(RHSV);
11051 if (!LHSIsNull && !RHSIsNull)
11057 assert((LHSIsNull || RHSIsNull) &&
11058 "Expected nullptr versus non-nullptr comparison at this point");
11061 unsigned PtrIdx = LHSIsNull;
11062 bool IsKnownNonNull;
11063 bool IsAssumedNonNull = AA::hasAssumedIRAttr<Attribute::NonNull>(
11065 DepClassTy::REQUIRED, IsKnownNonNull);
11066 if (!IsAssumedNonNull)
11072 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11077 for (
auto &LHSValue : LHSValues)
11078 for (
auto &RHSValue : RHSValues)
11079 if (!CheckPair(*LHSValue.getValue(), *RHSValue.getValue()))
11087 bool UsedAssumedInformation =
false;
11089 std::optional<Constant *>
C =
11090 A.getAssumedConstant(*
SI.getCondition(), *
this, UsedAssumedInformation);
11091 bool NoValueYet = !
C.has_value();
11092 if (NoValueYet || isa_and_nonnull<UndefValue>(*
C))
11094 if (
auto *CI = dyn_cast_or_null<ConstantInt>(*
C)) {
11099 }
else if (&SI == &getAssociatedValue()) {
11104 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11106 if (!SimpleV.has_value())
11109 addValue(
A, getState(), **SimpleV, CtxI,
II.S, getAnchorScope());
11121 bool UsedAssumedInformation =
false;
11123 PotentialValueOrigins, *
this,
11124 UsedAssumedInformation,
11126 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Failed to get potentially "
11127 "loaded values for load instruction "
11138 if (!
I || isa<AssumeInst>(
I))
11140 if (
auto *SI = dyn_cast<StoreInst>(
I))
11141 return A.isAssumedDead(
SI->getOperandUse(0),
this,
11143 UsedAssumedInformation,
11145 return A.isAssumedDead(*
I,
this,
nullptr,
11146 UsedAssumedInformation,
11149 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Load is onl used by assumes "
11150 "and we cannot delete all the stores: "
11161 bool AllLocal = ScopeIsLocal;
11166 if (!DynamicallyUnique) {
11167 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Not all potentially loaded "
11168 "values are dynamically unique: "
11173 for (
auto *PotentialCopy : PotentialCopies) {
11175 Worklist.
push_back({{*PotentialCopy, CtxI},
II.S});
11180 if (!AllLocal && ScopeIsLocal)
11185 bool handlePHINode(
11189 auto GetLivenessInfo = [&](
const Function &
F) -> LivenessInfo & {
11190 LivenessInfo &LI = LivenessAAs[&
F];
11191 if (!LI.LivenessAA)
11197 if (&
PHI == &getAssociatedValue()) {
11198 LivenessInfo &LI = GetLivenessInfo(*
PHI.getFunction());
11201 *
PHI.getFunction());
11205 for (
unsigned u = 0, e =
PHI.getNumIncomingValues(); u < e; u++) {
11207 if (LI.LivenessAA &&
11208 LI.LivenessAA->isEdgeDead(IncomingBB,
PHI.getParent())) {
11218 if (CyclePHI && isa<Instruction>(V) &&
11219 (!
C ||
C->contains(cast<Instruction>(V)->getParent())))
11227 bool UsedAssumedInformation =
false;
11228 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11230 if (!SimpleV.has_value())
11234 addValue(
A, getState(), **SimpleV, &
PHI,
II.S, getAnchorScope());
11243 bool SomeSimplified =
false;
11244 bool UsedAssumedInformation =
false;
11249 const auto &SimplifiedOp =
A.getAssumedSimplified(
11254 if (!SimplifiedOp.has_value())
11258 NewOps[
Idx] = *SimplifiedOp;
11262 SomeSimplified |= (NewOps[
Idx] !=
Op);
11268 if (!SomeSimplified)
11275 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
11281 if (!NewV || NewV == &
I)
11284 LLVM_DEBUG(
dbgs() <<
"Generic inst " <<
I <<
" assumed simplified to "
11294 if (
auto *CI = dyn_cast<CmpInst>(&
I))
11296 CI->getPredicate(),
II, Worklist);
11298 switch (
I.getOpcode()) {
11299 case Instruction::Select:
11300 return handleSelectInst(
A, cast<SelectInst>(
I),
II, Worklist);
11301 case Instruction::PHI:
11302 return handlePHINode(
A, cast<PHINode>(
I),
II, Worklist, LivenessAAs);
11303 case Instruction::Load:
11304 return handleLoadInst(
A, cast<LoadInst>(
I),
II, Worklist);
11306 return handleGenericInst(
A,
I,
II, Worklist);
11333 LLVM_DEBUG(
dbgs() <<
"Generic value traversal reached iteration limit: "
11334 << Iteration <<
"!\n");
11335 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11341 Value *NewV =
nullptr;
11342 if (
V->getType()->isPointerTy()) {
11345 if (
auto *CB = dyn_cast<CallBase>(V))
11355 if (NewV && NewV != V) {
11356 Worklist.
push_back({{*NewV, CtxI}, S});
11360 if (
auto *
I = dyn_cast<Instruction>(V)) {
11365 if (V != InitialV || isa<Argument>(V))
11370 if (V == InitialV && CtxI == getCtxI()) {
11371 indicatePessimisticFixpoint();
11375 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11376 }
while (!Worklist.
empty());
11380 for (
auto &It : LivenessAAs)
11381 if (It.second.AnyDead)
11382 A.recordDependence(*It.second.LivenessAA, *
this, DepClassTy::OPTIONAL);
11386 void trackStatistics()
const override {
11391struct AAPotentialValuesArgument final : AAPotentialValuesImpl {
11392 using Base = AAPotentialValuesImpl;
11398 auto &Arg = cast<Argument>(getAssociatedValue());
11400 indicatePessimisticFixpoint();
11405 auto AssumedBefore = getAssumed();
11407 unsigned ArgNo = getCalleeArgNo();
11409 bool UsedAssumedInformation =
false;
11413 if (CSArgIRP.getPositionKind() == IRP_INVALID)
11416 if (!
A.getAssumedSimplifiedValues(CSArgIRP,
this, Values,
11418 UsedAssumedInformation))
11421 return isValidState();
11424 if (!
A.checkForAllCallSites(CallSitePred, *
this,
11426 UsedAssumedInformation))
11427 return indicatePessimisticFixpoint();
11429 Function *Fn = getAssociatedFunction();
11430 bool AnyNonLocal =
false;
11431 for (
auto &It : Values) {
11432 if (isa<Constant>(It.getValue())) {
11433 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11438 return indicatePessimisticFixpoint();
11440 if (
auto *Arg = dyn_cast<Argument>(It.getValue()))
11442 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11448 AnyNonLocal =
true;
11450 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11452 giveUpOnIntraprocedural(
A);
11454 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11455 : ChangeStatus::CHANGED;
11459 void trackStatistics()
const override {
11464struct AAPotentialValuesReturned :
public AAPotentialValuesFloating {
11465 using Base = AAPotentialValuesFloating;
11472 if (!
F ||
F->isDeclaration() ||
F->getReturnType()->isVoidTy()) {
11473 indicatePessimisticFixpoint();
11480 ReturnedArg = &Arg;
11483 if (!
A.isFunctionIPOAmendable(*
F) ||
11484 A.hasSimplificationCallback(getIRPosition())) {
11486 indicatePessimisticFixpoint();
11488 indicateOptimisticFixpoint();
11494 auto AssumedBefore = getAssumed();
11495 bool UsedAssumedInformation =
false;
11498 Function *AnchorScope = getAnchorScope();
11504 UsedAssumedInformation,
11510 addValue(
A, getState(), *
VAC.getValue(),
11511 VAC.getCtxI() ?
VAC.getCtxI() : CtxI, S, AnchorScope);
11517 HandleReturnedValue(*ReturnedArg,
nullptr,
true);
11520 bool AddValues =
true;
11521 if (isa<PHINode>(RetI.getOperand(0)) ||
11522 isa<SelectInst>(RetI.getOperand(0))) {
11523 addValue(
A, getState(), *RetI.getOperand(0), &RetI,
AA::AnyScope,
11527 return HandleReturnedValue(*RetI.getOperand(0), &RetI, AddValues);
11530 if (!
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11531 UsedAssumedInformation,
11533 return indicatePessimisticFixpoint();
11536 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11537 : ChangeStatus::CHANGED;
11542 Function *AnchorScope)
const override {
11544 if (
auto *CB = dyn_cast<CallBase>(&V))
11547 Base::addValue(
A, State, V, CtxI, S, AnchorScope);
11552 return ChangeStatus::UNCHANGED;
11554 if (!getAssumedSimplifiedValues(
A, Values, AA::ValueScope::Intraprocedural,
11556 return ChangeStatus::UNCHANGED;
11557 Value *NewVal = getSingleValue(
A, *
this, getIRPosition(), Values);
11559 return ChangeStatus::UNCHANGED;
11562 if (
auto *Arg = dyn_cast<Argument>(NewVal)) {
11564 "Number of function with unique return");
11565 Changed |=
A.manifestAttrs(
11572 Value *RetOp = RetI.getOperand(0);
11573 if (isa<UndefValue>(RetOp) || RetOp == NewVal)
11576 if (
A.changeUseAfterManifest(RetI.getOperandUse(0), *NewVal))
11577 Changed = ChangeStatus::CHANGED;
11580 bool UsedAssumedInformation =
false;
11581 (void)
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11582 UsedAssumedInformation,
11592 void trackStatistics()
const override{
11599struct AAPotentialValuesFunction : AAPotentialValuesImpl {
11601 : AAPotentialValuesImpl(IRP,
A) {}
11610 void trackStatistics()
const override {
11615struct AAPotentialValuesCallSite : AAPotentialValuesFunction {
11617 : AAPotentialValuesFunction(IRP,
A) {}
11620 void trackStatistics()
const override {
11625struct AAPotentialValuesCallSiteReturned : AAPotentialValuesImpl {
11627 : AAPotentialValuesImpl(IRP,
A) {}
11631 auto AssumedBefore = getAssumed();
11635 return indicatePessimisticFixpoint();
11637 bool UsedAssumedInformation =
false;
11638 auto *CB = cast<CallBase>(getCtxI());
11641 UsedAssumedInformation))
11642 return indicatePessimisticFixpoint();
11647 UsedAssumedInformation))
11648 return indicatePessimisticFixpoint();
11652 bool AnyNonLocal =
false;
11653 for (
auto &It : Values) {
11654 Value *
V = It.getValue();
11655 std::optional<Value *> CallerV =
A.translateArgumentToCallSiteContent(
11656 V, *CB, *
this, UsedAssumedInformation);
11657 if (!CallerV.has_value()) {
11661 V = *CallerV ? *CallerV :
V;
11667 if (
auto *Arg = dyn_cast<Argument>(V))
11673 addValue(
A, getState(), *V, CB,
AA::AnyScope, getAnchorScope());
11675 AnyNonLocal =
true;
11683 UsedAssumedInformation))
11684 return indicatePessimisticFixpoint();
11685 AnyNonLocal =
false;
11687 for (
auto &It : Values) {
11688 Value *
V = It.getValue();
11690 return indicatePessimisticFixpoint();
11692 addValue(
A, getState(), *V, CB,
AA::AnyScope, getAnchorScope());
11694 AnyNonLocal =
true;
11700 giveUpOnIntraprocedural(
A);
11702 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11703 : ChangeStatus::CHANGED;
11707 return AAPotentialValues::indicatePessimisticFixpoint();
11711 void trackStatistics()
const override {
11716struct AAPotentialValuesCallSiteArgument : AAPotentialValuesFloating {
11718 : AAPotentialValuesFloating(IRP,
A) {}
11721 void trackStatistics()
const override {
11737 if (getKnown().isUniversal())
11738 return ChangeStatus::UNCHANGED;
11742 getAssumed().getSet().
end());
11744 return A.manifestAttrs(IRP,
11747 llvm::join(Set,
",")),
11752 return isValidState() && setContains(Assumption);
11756 const std::string getAsStr(
Attributor *
A)
const override {
11757 const SetContents &Known = getKnown();
11758 const SetContents &Assumed = getAssumed();
11762 const std::string KnownStr = llvm::join(Set,
",");
11764 std::string AssumedStr =
"Universal";
11765 if (!Assumed.isUniversal()) {
11766 Set.assign(Assumed.getSet().begin(), Assumed.getSet().end());
11767 AssumedStr = llvm::join(Set,
",");
11769 return "Known [" + KnownStr +
"]," +
" Assumed [" + AssumedStr +
"]";
11784struct AAAssumptionInfoFunction final : AAAssumptionInfoImpl {
11786 : AAAssumptionInfoImpl(IRP,
A,
11791 bool Changed =
false;
11796 DepClassTy::REQUIRED);
11800 Changed |= getIntersection(AssumptionAA->getAssumed());
11801 return !getAssumed().empty() || !getKnown().empty();
11804 bool UsedAssumedInformation =
false;
11809 if (!
A.checkForAllCallSites(CallSitePred, *
this,
true,
11810 UsedAssumedInformation))
11811 return indicatePessimisticFixpoint();
11813 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11816 void trackStatistics()
const override {}
11820struct AAAssumptionInfoCallSite final : AAAssumptionInfoImpl {
11823 : AAAssumptionInfoImpl(IRP,
A, getInitialAssumptions(IRP)) {}
11834 auto *AssumptionAA =
11837 return indicatePessimisticFixpoint();
11838 bool Changed = getIntersection(AssumptionAA->getAssumed());
11839 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11843 void trackStatistics()
const override {}
11855 return Assumptions;
11870struct AAUnderlyingObjectsImpl
11876 const std::string getAsStr(
Attributor *
A)
const override {
11877 return std::string(
"UnderlyingObjects ") +
11879 ? (std::string(
"inter #") +
11880 std::to_string(InterAssumedUnderlyingObjects.size()) +
11881 " objs" + std::string(
", intra #") +
11882 std::to_string(IntraAssumedUnderlyingObjects.size()) +
11888 void trackStatistics()
const override {}
11892 auto &
Ptr = getAssociatedValue();
11896 bool UsedAssumedInformation =
false;
11901 Scope, UsedAssumedInformation))
11904 bool Changed =
false;
11906 for (
unsigned I = 0;
I < Values.
size(); ++
I) {
11907 auto &
VAC = Values[
I];
11908 auto *Obj =
VAC.getValue();
11910 if (UO && UO !=
VAC.getValue() && SeenObjects.
insert(UO).second) {
11913 auto Pred = [&Values](
Value &
V) {
11918 if (!OtherAA || !OtherAA->forallUnderlyingObjects(Pred, Scope))
11920 "The forall call should not return false at this position");
11925 if (isa<SelectInst>(Obj)) {
11926 Changed |= handleIndirect(
A, *Obj, UnderlyingObjects, Scope);
11929 if (
auto *
PHI = dyn_cast<PHINode>(Obj)) {
11932 for (
unsigned u = 0, e =
PHI->getNumIncomingValues(); u < e; u++) {
11933 Changed |= handleIndirect(
A, *
PHI->getIncomingValue(u),
11934 UnderlyingObjects, Scope);
11939 Changed |= UnderlyingObjects.
insert(Obj);
11945 bool Changed =
false;
11949 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11952 bool forallUnderlyingObjects(
11955 if (!isValidState())
11956 return Pred(getAssociatedValue());
11959 ? IntraAssumedUnderlyingObjects
11960 : InterAssumedUnderlyingObjects;
11961 for (
Value *Obj : AssumedUnderlyingObjects)
11974 bool Changed =
false;
11977 auto Pred = [&](
Value &
V) {
11978 Changed |= UnderlyingObjects.
insert(&V);
11981 if (!AA || !AA->forallUnderlyingObjects(Pred, Scope))
11983 "The forall call should not return false at this position");
11993struct AAUnderlyingObjectsFloating final : AAUnderlyingObjectsImpl {
11995 : AAUnderlyingObjectsImpl(IRP,
A) {}
11998struct AAUnderlyingObjectsArgument final : AAUnderlyingObjectsImpl {
12000 : AAUnderlyingObjectsImpl(IRP,
A) {}
12003struct AAUnderlyingObjectsCallSite final : AAUnderlyingObjectsImpl {
12005 : AAUnderlyingObjectsImpl(IRP,
A) {}
12008struct AAUnderlyingObjectsCallSiteArgument final : AAUnderlyingObjectsImpl {
12010 : AAUnderlyingObjectsImpl(IRP,
A) {}
12013struct AAUnderlyingObjectsReturned final : AAUnderlyingObjectsImpl {
12015 : AAUnderlyingObjectsImpl(IRP,
A) {}
12018struct AAUnderlyingObjectsCallSiteReturned final : AAUnderlyingObjectsImpl {
12020 : AAUnderlyingObjectsImpl(IRP,
A) {}
12023struct AAUnderlyingObjectsFunction final : AAUnderlyingObjectsImpl {
12025 : AAUnderlyingObjectsImpl(IRP,
A) {}
12040 Instruction *UInst = dyn_cast<Instruction>(
U.getUser());
12046 LLVM_DEBUG(
dbgs() <<
"[AAGlobalValueInfo] Check use: " << *
U.get() <<
" in "
12047 << *UInst <<
"\n");
12049 if (
auto *Cmp = dyn_cast<ICmpInst>(
U.getUser())) {
12050 int Idx = &
Cmp->getOperandUse(0) == &
U;
12051 if (isa<Constant>(
Cmp->getOperand(
Idx)))
12053 return U == &getAnchorValue();
12057 if (isa<ReturnInst>(UInst)) {
12059 Worklist.
push_back(ACS.getInstruction());
12062 bool UsedAssumedInformation =
false;
12064 if (!
A.checkForAllCallSites(CallSitePred, *UInst->
getFunction(),
12066 UsedAssumedInformation))
12073 auto *CB = dyn_cast<CallBase>(UInst);
12084 if (!Fn || !
A.isFunctionIPOAmendable(*Fn))
12093 unsigned NumUsesBefore =
Uses.size();
12099 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
12102 case UseCaptureKind::NO_CAPTURE:
12103 return checkUse(
A, U, Follow, Worklist);
12104 case UseCaptureKind::MAY_CAPTURE:
12105 return checkUse(
A, U, Follow, Worklist);
12106 case UseCaptureKind::PASSTHROUGH:
12112 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
12113 Uses.insert(&OldU);
12117 while (!Worklist.
empty()) {
12119 if (!Visited.
insert(V).second)
12121 if (!
A.checkForAllUses(UsePred, *
this, *V,
12123 DepClassTy::OPTIONAL,
12124 true, EquivalentUseCB)) {
12125 return indicatePessimisticFixpoint();
12129 return Uses.size() == NumUsesBefore ? ChangeStatus::UNCHANGED
12130 : ChangeStatus::CHANGED;
12133 bool isPotentialUse(
const Use &U)
const override {
12134 return !isValidState() ||
Uses.contains(&U);
12139 return ChangeStatus::UNCHANGED;
12143 const std::string getAsStr(
Attributor *
A)
const override {
12144 return "[" + std::to_string(
Uses.size()) +
" uses]";
12147 void trackStatistics()
const override {
12165 auto *MD = getCtxI()->getMetadata(LLVMContext::MD_callees);
12166 if (!MD && !
A.isClosedWorldModule())
12170 for (
const auto &
Op : MD->operands())
12171 if (
Function *Callee = mdconst::dyn_extract_or_null<Function>(
Op))
12172 PotentialCallees.insert(Callee);
12173 }
else if (
A.isClosedWorldModule()) {
12175 A.getInfoCache().getIndirectlyCallableFunctions(
A);
12176 PotentialCallees.insert(IndirectlyCallableFunctions.
begin(),
12177 IndirectlyCallableFunctions.
end());
12180 if (PotentialCallees.empty())
12181 indicateOptimisticFixpoint();
12185 CallBase *CB = cast<CallBase>(getCtxI());
12190 bool AllCalleesKnownNow = AllCalleesKnown;
12192 auto CheckPotentialCalleeUse = [&](
Function &PotentialCallee,
12193 bool &UsedAssumedInformation) {
12196 if (!GIAA || GIAA->isPotentialUse(CalleeUse))
12198 UsedAssumedInformation = !GIAA->isAtFixpoint();
12202 auto AddPotentialCallees = [&]() {
12203 for (
auto *PotentialCallee : PotentialCallees) {
12204 bool UsedAssumedInformation =
false;
12205 if (CheckPotentialCalleeUse(*PotentialCallee, UsedAssumedInformation))
12206 AssumedCalleesNow.
insert(PotentialCallee);
12212 bool UsedAssumedInformation =
false;
12215 AA::ValueScope::AnyScope,
12216 UsedAssumedInformation)) {
12217 if (PotentialCallees.empty())
12218 return indicatePessimisticFixpoint();
12219 AddPotentialCallees();
12224 auto CheckPotentialCallee = [&](
Function &Fn) {
12225 if (!PotentialCallees.empty() && !PotentialCallees.count(&Fn))
12228 auto &CachedResult = FilterResults[&Fn];
12229 if (CachedResult.has_value())
12230 return CachedResult.value();
12232 bool UsedAssumedInformation =
false;
12233 if (!CheckPotentialCalleeUse(Fn, UsedAssumedInformation)) {
12234 if (!UsedAssumedInformation)
12235 CachedResult =
false;
12244 for (
int I = NumCBArgs;
I < NumFnArgs; ++
I) {
12245 bool IsKnown =
false;
12246 if (AA::hasAssumedIRAttr<Attribute::NoUndef>(
12248 DepClassTy::OPTIONAL, IsKnown)) {
12250 CachedResult =
false;
12255 CachedResult =
true;
12261 for (
auto &VAC : Values) {
12262 if (isa<UndefValue>(
VAC.getValue()))
12264 if (isa<ConstantPointerNull>(
VAC.getValue()) &&
12265 VAC.getValue()->getType()->getPointerAddressSpace() == 0)
12268 if (
auto *VACFn = dyn_cast<Function>(
VAC.getValue())) {
12269 if (CheckPotentialCallee(*VACFn))
12270 AssumedCalleesNow.
insert(VACFn);
12273 if (!PotentialCallees.empty()) {
12274 AddPotentialCallees();
12277 AllCalleesKnownNow =
false;
12280 if (AssumedCalleesNow == AssumedCallees &&
12281 AllCalleesKnown == AllCalleesKnownNow)
12282 return ChangeStatus::UNCHANGED;
12284 std::swap(AssumedCallees, AssumedCalleesNow);
12285 AllCalleesKnown = AllCalleesKnownNow;
12286 return ChangeStatus::CHANGED;
12292 if (!AllCalleesKnown && AssumedCallees.empty())
12293 return ChangeStatus::UNCHANGED;
12295 CallBase *CB = cast<CallBase>(getCtxI());
12296 bool UsedAssumedInformation =
false;
12297 if (
A.isAssumedDead(*CB,
this,
nullptr,
12298 UsedAssumedInformation))
12299 return ChangeStatus::UNCHANGED;
12303 if (
FP->getType()->getPointerAddressSpace())
12314 if (AssumedCallees.empty()) {
12315 assert(AllCalleesKnown &&
12316 "Expected all callees to be known if there are none.");
12317 A.changeToUnreachableAfterManifest(CB);
12318 return ChangeStatus::CHANGED;
12322 if (AllCalleesKnown && AssumedCallees.size() == 1) {
12323 auto *NewCallee = AssumedCallees.front();
12326 return ChangeStatus::CHANGED;
12333 A.deleteAfterManifest(*CB);
12334 return ChangeStatus::CHANGED;
12344 bool SpecializedForAnyCallees =
false;
12345 bool SpecializedForAllCallees = AllCalleesKnown;
12349 for (
Function *NewCallee : AssumedCallees) {
12350 if (!
A.shouldSpecializeCallSiteForCallee(*
this, *CB, *NewCallee)) {
12351 SkippedAssumedCallees.
push_back(NewCallee);
12352 SpecializedForAllCallees =
false;
12355 SpecializedForAnyCallees =
true;
12361 A.registerManifestAddedBasicBlock(*ThenTI->
getParent());
12362 A.registerManifestAddedBasicBlock(*IP->getParent());
12363 auto *SplitTI = cast<BranchInst>(LastCmp->
getNextNode());
12368 A.registerManifestAddedBasicBlock(*ElseBB);
12370 SplitTI->replaceUsesOfWith(CBBB, ElseBB);
12378 auto *CBClone = cast<CallBase>(CB->
clone());
12379 CBClone->insertBefore(ThenTI);
12380 NewCall = &cast<CallInst>(
promoteCall(*CBClone, NewCallee, &RetBC));
12388 auto AttachCalleeMetadata = [&](
CallBase &IndirectCB) {
12389 if (!AllCalleesKnown)
12390 return ChangeStatus::UNCHANGED;
12391 MDBuilder MDB(IndirectCB.getContext());
12392 MDNode *Callees = MDB.createCallees(SkippedAssumedCallees);
12393 IndirectCB.setMetadata(LLVMContext::MD_callees, Callees);
12394 return ChangeStatus::CHANGED;
12397 if (!SpecializedForAnyCallees)
12398 return AttachCalleeMetadata(*CB);
12401 if (SpecializedForAllCallees) {
12405 IP->eraseFromParent();
12407 auto *CBClone = cast<CallInst>(CB->
clone());
12408 CBClone->setName(CB->
getName());
12409 CBClone->insertBefore(*IP->getParent(), IP);
12410 NewCalls.
push_back({CBClone,
nullptr});
12411 AttachCalleeMetadata(*CBClone);
12418 CB->
getParent()->getFirstInsertionPt());
12419 for (
auto &It : NewCalls) {
12421 Instruction *CallRet = It.second ? It.second : It.first;
12433 A.deleteAfterManifest(*CB);
12434 Changed = ChangeStatus::CHANGED;
12440 const std::string getAsStr(
Attributor *
A)
const override {
12441 return std::string(AllCalleesKnown ?
"eliminate" :
"specialize") +
12442 " indirect call site with " + std::to_string(AssumedCallees.size()) +
12446 void trackStatistics()
const override {
12447 if (AllCalleesKnown) {
12449 Eliminated, CallSites,
12450 "Number of indirect call sites eliminated via specialization")
12453 "Number of indirect call sites specialized")
12458 return isValidState() && AllCalleesKnown &&
all_of(AssumedCallees, CB);
12475 bool AllCalleesKnown =
true;
12486 assert(isValidState() &&
"the AA is invalid");
12487 return AssumedAddressSpace;
12492 assert(getAssociatedType()->isPtrOrPtrVectorTy() &&
12493 "Associated value is not a pointer");
12497 int32_t OldAddressSpace = AssumedAddressSpace;
12499 DepClassTy::REQUIRED);
12500 auto Pred = [&](
Value &Obj) {
12501 if (isa<UndefValue>(&Obj))
12506 if (!AUO->forallUnderlyingObjects(Pred))
12507 return indicatePessimisticFixpoint();
12509 return OldAddressSpace == AssumedAddressSpace ? ChangeStatus::UNCHANGED
12510 : ChangeStatus::CHANGED;
12515 Value *AssociatedValue = &getAssociatedValue();
12516 Value *OriginalValue = peelAddrspacecast(AssociatedValue);
12520 return ChangeStatus::UNCHANGED;
12522 Type *NewPtrTy = PointerType::get(getAssociatedType()->getContext(),
12524 bool UseOriginalValue =
12528 bool Changed =
false;
12532 if (UseOriginalValue) {
12533 A.changeUseAfterManifest(U, *OriginalValue);
12538 A.changeUseAfterManifest(U, *
CastInst);
12541 auto Pred = [&](
const Use &
U,
bool &) {
12542 if (
U.get() != AssociatedValue)
12544 auto *Inst = dyn_cast<Instruction>(
U.getUser());
12551 if (isa<LoadInst>(Inst))
12552 MakeChange(Inst,
const_cast<Use &
>(U));
12553 if (isa<StoreInst>(Inst)) {
12555 if (
U.getOperandNo() == 1)
12556 MakeChange(Inst,
const_cast<Use &
>(U));
12563 (void)
A.checkForAllUses(Pred, *
this, getAssociatedValue(),
12566 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
12570 const std::string getAsStr(
Attributor *
A)
const override {
12571 if (!isValidState())
12572 return "addrspace(<invalid>)";
12573 return "addrspace(" +
12574 (AssumedAddressSpace == NoAddressSpace
12576 : std::to_string(AssumedAddressSpace)) +
12581 int32_t AssumedAddressSpace = NoAddressSpace;
12583 bool takeAddressSpace(int32_t AS) {
12584 if (AssumedAddressSpace == NoAddressSpace) {
12585 AssumedAddressSpace = AS;
12588 return AssumedAddressSpace == AS;
12592 if (
auto *
I = dyn_cast<AddrSpaceCastInst>(V))
12593 return peelAddrspacecast(
I->getPointerOperand());
12594 if (
auto *
C = dyn_cast<ConstantExpr>(V))
12595 if (
C->getOpcode() == Instruction::AddrSpaceCast)
12596 return peelAddrspacecast(
C->getOperand(0));
12601struct AAAddressSpaceFloating final : AAAddressSpaceImpl {
12603 : AAAddressSpaceImpl(IRP,
A) {}
12605 void trackStatistics()
const override {
12610struct AAAddressSpaceReturned final : AAAddressSpaceImpl {
12612 : AAAddressSpaceImpl(IRP,
A) {}
12618 (void)indicatePessimisticFixpoint();
12621 void trackStatistics()
const override {
12626struct AAAddressSpaceCallSiteReturned final : AAAddressSpaceImpl {
12628 : AAAddressSpaceImpl(IRP,
A) {}
12630 void trackStatistics()
const override {
12635struct AAAddressSpaceArgument final : AAAddressSpaceImpl {
12637 : AAAddressSpaceImpl(IRP,
A) {}
12642struct AAAddressSpaceCallSiteArgument final : AAAddressSpaceImpl {
12644 : AAAddressSpaceImpl(IRP,
A) {}
12650 (void)indicatePessimisticFixpoint();
12653 void trackStatistics()
const override {
12665 std::optional<TypeSize> getAllocatedSize()
const override {
12666 assert(isValidState() &&
"the AA is invalid");
12667 return AssumedAllocatedSize;
12670 std::optional<TypeSize> findInitialAllocationSize(
Instruction *
I,
12674 switch (
I->getOpcode()) {
12675 case Instruction::Alloca: {
12680 return std::nullopt;
12690 if (!isa<AllocaInst>(
I))
12691 return indicatePessimisticFixpoint();
12693 bool IsKnownNoCapture;
12694 if (!AA::hasAssumedIRAttr<Attribute::NoCapture>(
12695 A,
this, IRP, DepClassTy::OPTIONAL, IsKnownNoCapture))
12696 return indicatePessimisticFixpoint();
12699 A.getOrCreateAAFor<
AAPointerInfo>(IRP, *
this, DepClassTy::REQUIRED);
12702 return indicatePessimisticFixpoint();
12705 return indicatePessimisticFixpoint();
12708 const auto AllocationSize = findInitialAllocationSize(
I,
DL);
12711 if (!AllocationSize)
12712 return indicatePessimisticFixpoint();
12716 if (*AllocationSize == 0)
12717 return indicatePessimisticFixpoint();
12723 return indicatePessimisticFixpoint();
12725 if (BinSize == 0) {
12726 auto NewAllocationSize = std::optional<TypeSize>(
TypeSize(0,
false));
12727 if (!changeAllocationSize(NewAllocationSize))
12728 return ChangeStatus::UNCHANGED;
12729 return ChangeStatus::CHANGED;
12733 const auto &It = PI->
begin();
12736 if (It->first.Offset != 0)
12737 return indicatePessimisticFixpoint();
12739 uint64_t SizeOfBin = It->first.Offset + It->first.Size;
12741 if (SizeOfBin >= *AllocationSize)
12742 return indicatePessimisticFixpoint();
12744 auto NewAllocationSize =
12745 std::optional<TypeSize>(
TypeSize(SizeOfBin * 8,
false));
12747 if (!changeAllocationSize(NewAllocationSize))
12748 return ChangeStatus::UNCHANGED;
12750 return ChangeStatus::CHANGED;
12756 assert(isValidState() &&
12757 "Manifest should only be called if the state is valid.");
12761 auto FixedAllocatedSizeInBits = getAllocatedSize()->getFixedValue();
12763 unsigned long NumBytesToAllocate = (FixedAllocatedSizeInBits + 7) / 8;
12765 switch (
I->getOpcode()) {
12767 case Instruction::Alloca: {
12773 auto *NumBytesToValue =
12774 ConstantInt::get(
I->getContext(),
APInt(32, NumBytesToAllocate));
12777 insertPt = std::next(insertPt);
12783 return ChangeStatus::CHANGED;
12791 return ChangeStatus::UNCHANGED;
12795 const std::string getAsStr(
Attributor *
A)
const override {
12796 if (!isValidState())
12797 return "allocationinfo(<invalid>)";
12798 return "allocationinfo(" +
12799 (AssumedAllocatedSize == HasNoAllocationSize
12801 : std::to_string(AssumedAllocatedSize->getFixedValue())) +
12806 std::optional<TypeSize> AssumedAllocatedSize = HasNoAllocationSize;
12810 bool changeAllocationSize(std::optional<TypeSize>
Size) {
12811 if (AssumedAllocatedSize == HasNoAllocationSize ||
12812 AssumedAllocatedSize !=
Size) {
12813 AssumedAllocatedSize =
Size;
12820struct AAAllocationInfoFloating : AAAllocationInfoImpl {
12822 : AAAllocationInfoImpl(IRP,
A) {}
12824 void trackStatistics()
const override {
12829struct AAAllocationInfoReturned : AAAllocationInfoImpl {
12831 : AAAllocationInfoImpl(IRP,
A) {}
12837 (void)indicatePessimisticFixpoint();
12840 void trackStatistics()
const override {
12845struct AAAllocationInfoCallSiteReturned : AAAllocationInfoImpl {
12847 : AAAllocationInfoImpl(IRP,
A) {}
12849 void trackStatistics()
const override {
12854struct AAAllocationInfoArgument : AAAllocationInfoImpl {
12856 : AAAllocationInfoImpl(IRP,
A) {}
12858 void trackStatistics()
const override {
12863struct AAAllocationInfoCallSiteArgument : AAAllocationInfoImpl {
12865 : AAAllocationInfoImpl(IRP,
A) {}
12870 (void)indicatePessimisticFixpoint();
12873 void trackStatistics()
const override {
12920#define SWITCH_PK_INV(CLASS, PK, POS_NAME) \
12921 case IRPosition::PK: \
12922 llvm_unreachable("Cannot create " #CLASS " for a " POS_NAME " position!");
12924#define SWITCH_PK_CREATE(CLASS, IRP, PK, SUFFIX) \
12925 case IRPosition::PK: \
12926 AA = new (A.Allocator) CLASS##SUFFIX(IRP, A); \
12930#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
12931 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
12932 CLASS *AA = nullptr; \
12933 switch (IRP.getPositionKind()) { \
12934 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
12935 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
12936 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
12937 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
12938 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
12939 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
12940 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
12941 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
12946#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
12947 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
12948 CLASS *AA = nullptr; \
12949 switch (IRP.getPositionKind()) { \
12950 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
12951 SWITCH_PK_INV(CLASS, IRP_FUNCTION, "function") \
12952 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
12953 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
12954 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
12955 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
12956 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
12957 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
12962#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS) \
12963 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
12964 CLASS *AA = nullptr; \
12965 switch (IRP.getPositionKind()) { \
12966 SWITCH_PK_CREATE(CLASS, IRP, POS, SUFFIX) \
12968 llvm_unreachable("Cannot create " #CLASS " for position otherthan " #POS \
12974#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
12975 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
12976 CLASS *AA = nullptr; \
12977 switch (IRP.getPositionKind()) { \
12978 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
12979 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
12980 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
12981 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
12982 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
12983 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
12984 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
12985 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
12990#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
12991 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
12992 CLASS *AA = nullptr; \
12993 switch (IRP.getPositionKind()) { \
12994 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
12995 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
12996 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
12997 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
12998 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
12999 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13000 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13001 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13006#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13007 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13008 CLASS *AA = nullptr; \
13009 switch (IRP.getPositionKind()) { \
13010 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13011 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13012 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13013 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13014 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13015 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13016 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13017 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13067#undef CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION
13068#undef CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION
13069#undef CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION
13070#undef CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION
13071#undef CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION
13072#undef CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION
13073#undef SWITCH_PK_CREATE
13074#undef SWITCH_PK_INV
amdgpu AMDGPU Register Bank Select
This file implements a class to represent arbitrary precision integral constant values and operations...
ReachingDefAnalysis InstSet & ToRemove
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
This file contains the simple types necessary to represent the attributes associated with functions a...
#define STATS_DECLTRACK(NAME, TYPE, MSG)
static std::optional< Constant * > askForAssumedConstant(Attributor &A, const AbstractAttribute &QueryingAA, const IRPosition &IRP, Type &Ty)
static cl::opt< unsigned, true > MaxPotentialValues("attributor-max-potential-values", cl::Hidden, cl::desc("Maximum number of potential values to be " "tracked for each position."), cl::location(llvm::PotentialConstantIntValuesState::MaxPotentialValues), cl::init(7))
static const Value * getPointerOperand(const Instruction *I, bool AllowVolatile)
Get pointer operand of memory accessing instruction.
static void clampReturnedValueStates(Attributor &A, const AAType &QueryingAA, StateType &S, const IRPosition::CallBaseContext *CBContext=nullptr)
Clamp the information known for all returned values of a function (identified by QueryingAA) into S.
#define STATS_DECLTRACK_FN_ATTR(NAME)
#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxPotentialValuesIterations("attributor-max-potential-values-iterations", cl::Hidden, cl::desc("Maximum number of iterations we keep dismantling potential values."), cl::init(64))
#define STATS_DECLTRACK_CS_ATTR(NAME)
#define PIPE_OPERATOR(CLASS)
static bool mayBeInCycle(const CycleInfo *CI, const Instruction *I, bool HeaderOnly, Cycle **CPtr=nullptr)
#define STATS_DECLTRACK_ARG_ATTR(NAME)
static const Value * stripAndAccumulateOffsets(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Val, const DataLayout &DL, APInt &Offset, bool GetMinOffset, bool AllowNonInbounds, bool UseAssumed=false)
#define STATS_DECLTRACK_CSRET_ATTR(NAME)
static cl::opt< bool > ManifestInternal("attributor-manifest-internal", cl::Hidden, cl::desc("Manifest Attributor internal string attributes."), cl::init(false))
static Value * constructPointer(Value *Ptr, int64_t Offset, IRBuilder< NoFolder > &IRB)
Helper function to create a pointer based on Ptr, and advanced by Offset bytes.
#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define BUILD_STAT_NAME(NAME, TYPE)
static bool isDenselyPacked(Type *Ty, const DataLayout &DL)
Checks if a type could have padding bytes.
#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static const Value * getMinimalBaseOfPointer(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Ptr, int64_t &BytesOffset, const DataLayout &DL, bool AllowNonInbounds=false)
#define STATS_DECLTRACK_FNRET_ATTR(NAME)
#define STATS_DECLTRACK_CSARG_ATTR(NAME)
#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS)
#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxHeapToStackSize("max-heap-to-stack-size", cl::init(128), cl::Hidden)
#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define STATS_DECLTRACK_FLOATING_ATTR(NAME)
#define STATS_DECL(NAME, TYPE, MSG)
BlockVerifier::State From
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static bool isReachableImpl(SmallVectorImpl< BasicBlock * > &Worklist, const StopSetT &StopSet, const SmallPtrSetImpl< BasicBlock * > *ExclusionSet, const DominatorTree *DT, const LoopInfo *LI)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file declares an analysis pass that computes CycleInfo for LLVM IR, specialized from GenericCycl...
Returns the sub type a function will return at a given Idx Should correspond to the result type of an ExtractValue instruction executed with just that one unsigned Idx
Given that RA is a live propagate it s liveness to any other values it uses(according to Uses). void DeadArgumentEliminationPass
Performs the initial survey of the specified function
Given that RA is a live value
This file defines DenseMapInfo traits for DenseMap.
Rewrite Partial Register Uses
static LoopDeletionResult merge(LoopDeletionResult A, LoopDeletionResult B)
This file implements a map that provides insertion order iteration.
ConstantRange Range(APInt(BitWidth, Low), APInt(BitWidth, High))
uint64_t IntrinsicInst * II
static StringRef getName(Value *V)
static cl::opt< RegAllocEvictionAdvisorAnalysis::AdvisorMode > Mode("regalloc-enable-advisor", cl::Hidden, cl::init(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Default), cl::desc("Enable regalloc advisor mode"), cl::values(clEnumValN(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Default, "default", "Default"), clEnumValN(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Release, "release", "precompiled"), clEnumValN(RegAllocEvictionAdvisorAnalysis::AdvisorMode::Development, "development", "for training")))
This builds on the llvm/ADT/GraphTraits.h file to find the strongly connected components (SCCs) of a ...
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines generic set operations that may be used on set's of different types,...
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static SymbolRef::Type getType(const Symbol *Sym)
static void initialize(TargetLibraryInfoImpl &TLI, const Triple &T, ArrayRef< StringLiteral > StandardNames)
Initialize the set of available library functions based on the specified target triple.
static unsigned getBitWidth(Type *Ty, const DataLayout &DL)
Returns the bitwidth of the given scalar or pointer type.
static unsigned getSize(unsigned Kind)
AACallGraphNode * operator*() const
A manager for alias analyses.
bool isNoAlias(const MemoryLocation &LocA, const MemoryLocation &LocB)
A trivial helper function to check to see if the specified pointers are no-alias.
Class for arbitrary precision integers.
int64_t getSExtValue() const
Get sign extended value.
CallBase * getInstruction() const
Return the underlying instruction.
bool isCallbackCall() const
Return true if this ACS represents a callback call.
bool isDirectCall() const
Return true if this ACS represents a direct call.
static void getCallbackUses(const CallBase &CB, SmallVectorImpl< const Use * > &CallbackUses)
Add operand uses of CB that represent callback uses into CallbackUses.
int getCallArgOperandNo(Argument &Arg) const
Return the operand index of the underlying instruction associated with Arg.
This class represents a conversion between pointers from one address space to another.
an instruction to allocate memory on the stack
Align getAlign() const
Return the alignment of the memory that is being allocated by the instruction.
unsigned getAddressSpace() const
Return the address space for the allocation.
std::optional< TypeSize > getAllocationSize(const DataLayout &DL) const
Get allocation size in bytes.
This class represents an incoming formal argument to a Function.
bool hasPointeeInMemoryValueAttr() const
Return true if this argument has the byval, sret, inalloca, preallocated, or byref attribute.
bool hasReturnedAttr() const
Return true if this argument has the returned attribute.
bool hasByValAttr() const
Return true if this argument has the byval attribute.
const Function * getParent() const
unsigned getArgNo() const
Return the index of this formal argument in its containing function.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
A function analysis which provides an AssumptionCache.
A cache of @llvm.assume calls within a function.
static Attribute get(LLVMContext &Context, AttrKind Kind, uint64_t Val=0)
Return a uniquified Attribute object.
static Attribute getWithDereferenceableBytes(LLVMContext &Context, uint64_t Bytes)
FPClassTest getNoFPClass() const
Return the FPClassTest for nofpclass.
Attribute::AttrKind getKindAsEnum() const
Return the attribute's kind as an enum (Attribute::AttrKind).
MemoryEffects getMemoryEffects() const
Returns memory effects.
static Attribute getWithDereferenceableOrNullBytes(LLVMContext &Context, uint64_t Bytes)
static Attribute getWithNoFPClass(LLVMContext &Context, FPClassTest Mask)
AttrKind
This enumeration lists the attributes that can be associated with parameters, function results,...
static bool isEnumAttrKind(AttrKind Kind)
static Attribute getWithMemoryEffects(LLVMContext &Context, MemoryEffects ME)
static Attribute getWithAlignment(LLVMContext &Context, Align Alignment)
Return a uniquified Attribute object that has the specific alignment set.
LLVM Basic Block Representation.
const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
const Instruction & front() const
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
const Function * getParent() const
Return the enclosing method, or null if none.
InstListType::iterator iterator
Instruction iterators...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
BinaryOps getOpcode() const
Conditional or Unconditional Branch instruction.
unsigned getNumSuccessors() const
static BranchInst * Create(BasicBlock *IfTrue, InsertPosition InsertBefore=nullptr)
BasicBlock * getSuccessor(unsigned i) const
Value * getCondition() const
Allocate memory in an ever growing pool, as if by bump-pointer.
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
User::op_iterator arg_begin()
Return the iterator pointing to the beginning of the argument list.
bool isMustTailCall() const
Tests if this call site must be tail call optimized.
bool isIndirectCall() const
Return true if the callsite is an indirect call.
bool isCallee(Value::const_user_iterator UI) const
Determine whether the passed iterator points to the callee operand's Use.
Value * getCalledOperand() const
const Use & getCalledOperandUse() const
const Use & getArgOperandUse(unsigned i) const
Wrappers for getting the Use of a call argument.
Value * getArgOperand(unsigned i) const
User::op_iterator arg_end()
Return the iterator pointing to the end of the argument list.
bool isBundleOperand(unsigned Idx) const
Return true if the operand at index Idx is a bundle operand.
bool isConvergent() const
Determine if the invoke is convergent.
FunctionType * getFunctionType() const
Intrinsic::ID getIntrinsicID() const
Returns the intrinsic ID of the intrinsic called or Intrinsic::not_intrinsic if the called function i...
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
unsigned getArgOperandNo(const Use *U) const
Given a use for a arg operand, get the arg operand number that corresponds to it.
unsigned arg_size() const
bool isArgOperand(const Use *U) const
Function * getCaller()
Helper to get the caller (the parent function).
This class represents a function call, abstracting a target machine's calling convention.
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
This is the base class for all instructions that perform data casts.
Instruction::CastOps getOpcode() const
Return the opcode of this CastInst.
bool isIntegerCast() const
There are several places where we need to know if a cast instruction only deals with integer source a...
Type * getDestTy() const
Return the destination type, as a convenience.
This class is the base class for the comparison instructions.
bool isEquality() const
Determine if this is an equals/not equals predicate.
bool isFalseWhenEqual() const
This is just a convenience.
Predicate
This enumeration lists the possible predicates for CmpInst subclasses.
bool isTrueWhenEqual() const
This is just a convenience.
Predicate getPredicate() const
Return the predicate for this instruction.
A constant value that is initialized with an expression using other constant values.
static Constant * getExtractElement(Constant *Vec, Constant *Idx, Type *OnlyIfReducedTy=nullptr)
This is the shared class of boolean and integer constants.
static ConstantInt * getTrue(LLVMContext &Context)
This class represents a range of values.
const APInt & getLower() const
Return the lower value for this range.
bool isFullSet() const
Return true if this set contains all of the elements possible for this data-type.
bool isEmptySet() const
Return true if this set contains no members.
APInt getSignedMin() const
Return the smallest signed value contained in the ConstantRange.
bool isSingleElement() const
Return true if this set contains exactly one member.
static ConstantRange makeAllowedICmpRegion(CmpInst::Predicate Pred, const ConstantRange &Other)
Produce the smallest range such that all values that may satisfy the given predicate with any value c...
const APInt & getUpper() const
Return the upper value for this range.
APInt getSignedMax() const
Return the largest signed value contained in the ConstantRange.
This is an important base class in LLVM.
Analysis pass which computes a CycleInfo.
This class represents an Operation in the Expression.
A parsed version of the target data layout string in and methods for querying it.
iterator find(const_arg_type_t< KeyT > Val)
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
bool contains(const_arg_type_t< KeyT > Val) const
Return true if the specified key is in the map, false otherwise.
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Implements a dense probed hash-table based set.
Analysis pass which computes a DominatorTree.
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
An instruction for ordering other memory operations.
A handy container for a FunctionType+Callee-pointer pair, which can be passed around as a single enti...
const BasicBlock & getEntryBlock() const
iterator_range< arg_iterator > args()
const Function & getFunction() const
Argument * getArg(unsigned i) const
bool hasFnAttribute(Attribute::AttrKind Kind) const
Return true if the function has the attribute.
CycleT * getCycle(const BlockT *Block) const
Find the innermost cycle containing a given block.
A possibly irreducible generalization of a Loop.
bool isDeclaration() const
Return true if the primary definition of this global value is outside of the current translation unit...
bool hasLocalLinkage() const
This instruction compares its operands according to the predicate given to the constructor.
static bool compare(const APInt &LHS, const APInt &RHS, ICmpInst::Predicate Pred)
Return result of LHS Pred RHS comparison.
Value * CreatePtrAdd(Value *Ptr, Value *Offset, const Twine &Name="", GEPNoWrapFlags NW=GEPNoWrapFlags::none())
ConstantInt * getInt64(uint64_t C)
Get a constant 64-bit value.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
Instruction * clone() const
Create a copy of 'this' instruction that is identical in all ways except the following:
bool isLifetimeStartOrEnd() const LLVM_READONLY
Return true if the instruction is a llvm.lifetime.start or llvm.lifetime.end marker.
bool mayReadOrWriteMemory() const
Return true if this instruction may read or write memory.
bool mayWriteToMemory() const LLVM_READONLY
Return true if this instruction may modify memory.
void insertBefore(Instruction *InsertPos)
Insert an unlinked instruction into a basic block immediately before the specified instruction.
InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
const Function * getFunction() const
Return the function this instruction belongs to.
BasicBlock * getSuccessor(unsigned Idx) const LLVM_READONLY
Return the specified successor. This instruction must be a terminator.
bool mayHaveSideEffects() const LLVM_READONLY
Return true if the instruction may have side effects.
bool isTerminator() const
bool mayReadFromMemory() const LLVM_READONLY
Return true if this instruction may read memory.
const Instruction * getNextNonDebugInstruction(bool SkipPseudoOp=false) const
Return a pointer to the next non-debug instruction in the same basic block as 'this',...
unsigned getOpcode() const
Returns a member of one of the enums like Instruction::Add.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
const DataLayout & getDataLayout() const
Get the data layout of the module this instruction belongs to.
A wrapper class for inspecting calls to intrinsic functions.
This is an important class for using LLVM in a threaded context.
Analysis to compute lazy value information.
This pass computes, caches, and vends lazy value constraint information.
ConstantRange getConstantRange(Value *V, Instruction *CxtI, bool UndefAllowed)
Return the ConstantRange constraint that is known to hold for the specified value at the specified in...
An instruction for reading from memory.
Analysis pass that exposes the LoopInfo for a function.
LoopT * getLoopFor(const BlockT *BB) const
Return the inner most loop that BB lives in.
const MDOperand & getOperand(unsigned I) const
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
unsigned getNumOperands() const
Return number of MDNode operands.
This class implements a map that also provides access to all stored values in a deterministic order.
static MemoryEffectsBase readOnly()
Create MemoryEffectsBase that can read any memory.
bool doesNotAccessMemory() const
Whether this function accesses no memory.
static MemoryEffectsBase argMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
Create MemoryEffectsBase that can only access argument memory.
static MemoryEffectsBase inaccessibleMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
Create MemoryEffectsBase that can only access inaccessible memory.
bool onlyAccessesInaccessibleMem() const
Whether this function only (at most) accesses inaccessible memory.
ModRefInfo getModRef(Location Loc) const
Get ModRefInfo for the given Location.
bool onlyAccessesArgPointees() const
Whether this function only (at most) accesses argument memory.
bool onlyReadsMemory() const
Whether this function only (at most) reads memory.
static MemoryEffectsBase writeOnly()
Create MemoryEffectsBase that can write any memory.
static MemoryEffectsBase inaccessibleOrArgMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
Create MemoryEffectsBase that can only access inaccessible or argument memory.
static MemoryEffectsBase none()
Create MemoryEffectsBase that cannot read or write any memory.
bool onlyAccessesInaccessibleOrArgMem() const
Whether this function only (at most) accesses argument and inaccessible memory.
static MemoryEffectsBase unknown()
Create MemoryEffectsBase that can read and write any memory.
static std::optional< MemoryLocation > getOrNone(const Instruction *Inst)
A Module instance is used to store all the information related to an LLVM module.
Evaluate the size and offset of an object pointed to by a Value*.
static SizeOffsetValue unknown()
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
static PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
Return a value (possibly void), from a function.
Value * getReturnValue() const
Convenience accessor. Returns null if there is no return value.
This class represents an analyzed expression in the program.
Analysis pass that exposes the ScalarEvolution for a function.
The main scalar evolution driver.
const SCEV * getSCEVAtScope(const SCEV *S, const Loop *L)
Return a SCEV expression for the specified value at the specified scope in the program.
const SCEV * getSCEV(Value *V)
Return a SCEV expression for the full generality of the specified expression.
unsigned getSmallConstantMaxTripCount(const Loop *L)
Returns the upper bound of the loop trip count as a normal unsigned value.
ConstantRange getUnsignedRange(const SCEV *S)
Determine the unsigned range for a particular SCEV.
This class represents the LLVM 'select' instruction.
A vector that has set insertion semantics.
size_type size() const
Determine the number of elements in the SetVector.
bool insert(const value_type &X)
Insert a new element into the SetVector.
bool erase(PtrType Ptr)
Remove pointer from the set.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
A SetVector that performs no allocations if smaller than a certain size.
SmallSet - This maintains a set of unique values, optimizing for the case when the set is small (less...
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void append(ItTy in_start, ItTy in_end)
Add the specified range to the end of the SmallVector.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
An instruction for storing to memory.
StringRef - Represent a constant reference to a string, i.e.
Used to lazily calculate structure layout information for a target machine, based on the DataLayout s...
TypeSize getElementOffset(unsigned Idx) const
TypeSize getElementOffsetInBits(unsigned Idx) const
Class to represent struct types.
unsigned getNumElements() const
Random access to the elements.
Type * getElementType(unsigned N) const
Analysis pass providing the TargetTransformInfo.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
unsigned getIntegerBitWidth() const
bool isPointerTy() const
True if this is an instance of PointerType.
static IntegerType * getInt1Ty(LLVMContext &C)
unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
bool isSized(SmallPtrSetImpl< Type * > *Visited=nullptr) const
Return true if it makes sense to take the size of this type.
static IntegerType * getInt8Ty(LLVMContext &C)
bool isPtrOrPtrVectorTy() const
Return true if this is a pointer type or a vector of pointer types.
static IntegerType * getInt32Ty(LLVMContext &C)
bool isIntegerTy() const
True if this is an instance of IntegerType.
bool isVoidTy() const
Return true if this is 'void'.
'undef' values are things that do not have specified contents.
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
This function has undefined behavior.
A Use represents the edge between a Value definition and its users.
User * getUser() const
Returns the User that contains this Use.
bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
const Use & getOperandUse(unsigned i) const
Value * getOperand(unsigned i) const
unsigned getNumOperands() const
bool isDroppable() const
A droppable user is a user for which uses can be dropped without affecting correctness and should be ...
ValueT lookup(const KeyT &Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
static constexpr uint64_t MaximumAlignment
const Value * stripAndAccumulateConstantOffsets(const DataLayout &DL, APInt &Offset, bool AllowNonInbounds, bool AllowInvariantGroup=false, function_ref< bool(Value &Value, APInt &Offset)> ExternalAnalysis=nullptr) const
Accumulate the constant offset this value has compared to a base pointer.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
LLVMContext & getContext() const
All values hold a context through their type.
iterator_range< use_iterator > uses()
StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
constexpr ScalarTy getFixedValue() const
constexpr bool isScalable() const
Returns whether the quantity is scaled by a runtime quantity (vscale).
An efficient, type-erasing, non-owning reference to a callable.
const ParentTy * getParent() const
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
A raw_ostream that writes to an std::string.
Enumerate the SCCs of a directed graph in reverse topological order of the SCC DAG.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
bool isAssumedReadNone(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readnone.
bool isAssumedReadOnly(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readonly.
raw_ostream & operator<<(raw_ostream &OS, const RangeTy &R)
std::optional< Value * > combineOptionalValuesInAAValueLatice(const std::optional< Value * > &A, const std::optional< Value * > &B, Type *Ty)
Return the combination of A and B such that the result is a possible value of both.
bool isValidAtPosition(const ValueAndContext &VAC, InformationCache &InfoCache)
Return true if the value of VAC is a valid at the position of VAC, that is a constant,...
bool isAssumedThreadLocalObject(Attributor &A, Value &Obj, const AbstractAttribute &QueryingAA)
Return true if Obj is assumed to be a thread local object.
bool isDynamicallyUnique(Attributor &A, const AbstractAttribute &QueryingAA, const Value &V, bool ForAnalysisOnly=true)
Return true if V is dynamically unique, that is, there are no two "instances" of V at runtime with di...
bool getPotentialCopiesOfStoredValue(Attributor &A, StoreInst &SI, SmallSetVector< Value *, 4 > &PotentialCopies, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values of the one stored by SI into PotentialCopies.
bool isGPU(const Module &M)
Return true iff M target a GPU (and we can use GPU AS reasoning).
ValueScope
Flags to distinguish intra-procedural queries from potentially inter-procedural queries.
bool isValidInScope(const Value &V, const Function *Scope)
Return true if V is a valid value in Scope, that is a constant or an instruction/argument of Scope.
bool isPotentiallyReachable(Attributor &A, const Instruction &FromI, const Instruction &ToI, const AbstractAttribute &QueryingAA, const AA::InstExclusionSetTy *ExclusionSet=nullptr, std::function< bool(const Function &F)> GoBackwardsCB=nullptr)
Return true if ToI is potentially reachable from FromI without running into any instruction in Exclus...
bool isNoSyncInst(Attributor &A, const Instruction &I, const AbstractAttribute &QueryingAA)
Return true if I is a nosync instruction.
bool getPotentiallyLoadedValues(Attributor &A, LoadInst &LI, SmallSetVector< Value *, 4 > &PotentialValues, SmallSetVector< Instruction *, 4 > &PotentialValueOrigins, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values LI could read into PotentialValues.
Value * getWithType(Value &V, Type &Ty)
Try to convert V to type Ty without introducing new instructions.
constexpr char Attrs[]
Key for Kernel::Metadata::mAttrs.
AddressSpace getAddressSpace(T *V)
@ C
The default llvm calling convention, compatible with C.
@ Unsupported
This operation is completely unsupported on the target.
@ Undef
Value of the register doesn't matter.
@ SingleThread
Synchronized with respect to signal handlers executing in the same thread.
@ CE
Windows NT (Windows on ARM)
initializer< Ty > init(const Ty &Val)
LocationClass< Ty > location(Ty &L)
unsigned combineHashValue(unsigned a, unsigned b)
Simplistic combination of 32-bit hash values into 32-bit hash values.
ElementType
The element type of an SRV or UAV resource.
Scope
Defines the scope in which this symbol should be visible: Default – Visible in the public interface o...
const_iterator begin(StringRef path, Style style=Style::native)
Get begin iterator over path.
const_iterator end(StringRef path)
Get end iterator over path.
This is an optimization pass for GlobalISel generic memory operations.
pred_iterator pred_end(BasicBlock *BB)
bool operator<(int64_t V1, const APSInt &V2)
LLVM_ATTRIBUTE_ALWAYS_INLINE DynamicAPInt gcd(const DynamicAPInt &A, const DynamicAPInt &B)
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
bool isLegalToPromote(const CallBase &CB, Function *Callee, const char **FailureReason=nullptr)
Return true if the given indirect call site can be made to call Callee.
Constant * getInitialValueOfAllocation(const Value *V, const TargetLibraryInfo *TLI, Type *Ty)
If this is a call to an allocation function that initializes memory to a fixed value,...
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
unsigned getPointerAddressSpace(const Type *T)
auto successors(const MachineBasicBlock *BB)
bool isRemovableAlloc(const CallBase *V, const TargetLibraryInfo *TLI)
Return true if this is a call to an allocation function that does not have side effects that we are r...
APFloat abs(APFloat X)
Returns the absolute value of the argument.
raw_fd_ostream & outs()
This returns a reference to a raw_fd_ostream for standard output.
bool operator!=(uint64_t V1, const APInt &V2)
UseCaptureKind DetermineUseCaptureKind(const Use &U, llvm::function_ref< bool(Value *, const DataLayout &)> IsDereferenceableOrNull)
Determine what kind of capture behaviour U may exhibit.
Value * getAllocAlignment(const CallBase *V, const TargetLibraryInfo *TLI)
Gets the alignment argument for an aligned_alloc-like function, using either built-in knowledge based...
Value * simplifyInstructionWithOperands(Instruction *I, ArrayRef< Value * > NewOps, const SimplifyQuery &Q)
Like simplifyInstruction but the operands of I are replaced with NewOps.
Value * GetPointerBaseWithConstantOffset(Value *Ptr, int64_t &Offset, const DataLayout &DL, bool AllowNonInbounds=true)
Analyze the specified pointer to see if it can be expressed as a base pointer plus a constant offset.
const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=6)
This method strips off any GEP address adjustments, pointer casts or llvm.threadlocal....
scc_iterator< T > scc_begin(const T &G)
Construct the begin iterator for a deduced graph type T.
PotentialValuesState< std::pair< AA::ValueAndContext, AA::ValueScope > > PotentialLLVMValuesState
bool isNoAliasCall(const Value *V)
Return true if this pointer is returned by a noalias function.
raw_ostream & WriteGraph(raw_ostream &O, const GraphType &G, bool ShortNames=false, const Twine &Title="")
auto unique(Range &&R, Predicate P)
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
ConstantRange getConstantRangeFromMetadata(const MDNode &RangeMD)
Parse out a conservative ConstantRange from !range metadata.
const Value * getPointerOperand(const Value *V)
A helper function that returns the pointer operand of a load, store or GEP instruction.
Value * simplifyInstruction(Instruction *I, const SimplifyQuery &Q)
See if we can compute a simplified version of this instruction.
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
pred_iterator pred_begin(BasicBlock *BB)
unsigned Log2_32(uint32_t Value)
Return the floor log base 2 of the specified value, -1 if the value is zero.
constexpr bool isPowerOf2_32(uint32_t Value)
Return true if the argument is a power of two > 0.
void sort(IteratorTy Start, IteratorTy End)
MemoryEffectsBase< IRMemLocation > MemoryEffects
Summary of how a function affects memory in the program.
bool NullPointerIsDefined(const Function *F, unsigned AS=0)
Check whether null pointer dereferencing is considered undefined behavior for a given function or an ...
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool isPointerTy(const Type *T)
bool wouldInstructionBeTriviallyDead(const Instruction *I, const TargetLibraryInfo *TLI=nullptr)
Return true if the result produced by the instruction would have no side effects if it was not used.
bool isSafeToSpeculativelyExecute(const Instruction *I, const Instruction *CtxI=nullptr, AssumptionCache *AC=nullptr, const DominatorTree *DT=nullptr, const TargetLibraryInfo *TLI=nullptr, bool UseVariableInfo=true)
Return true if the instruction does not have any effects besides calculating the result and does not ...
bool set_union(S1Ty &S1, const S2Ty &S2)
set_union(A, B) - Compute A := A u B, return whether A changed.
void RemapInstruction(Instruction *I, ValueToValueMapTy &VM, RemapFlags Flags=RF_None, ValueMapTypeRemapper *TypeMapper=nullptr, ValueMaterializer *Materializer=nullptr)
Convert the instruction operands from referencing the current values into those specified by VM.
CallBase & promoteCall(CallBase &CB, Function *Callee, CastInst **RetBitCast=nullptr)
Promote the given indirect call site to unconditionally call Callee.
bool hasAssumption(const Function &F, const KnownAssumptionString &AssumptionStr)
Return true if F has the assumption AssumptionStr attached.
RetainedKnowledge getKnowledgeFromUse(const Use *U, ArrayRef< Attribute::AttrKind > AttrKinds)
Return a valid Knowledge associated to the Use U if its Attribute kind is in AttrKinds.
bool isKnownNonZero(const Value *V, const SimplifyQuery &Q, unsigned Depth=0)
Return true if the given value is known to be non-zero when defined.
AtomicOrdering
Atomic ordering for LLVM's memory model.
ChangeStatus clampStateAndIndicateChange< DerefState >(DerefState &S, const DerefState &R)
Value * simplifyCmpInst(unsigned Predicate, Value *LHS, Value *RHS, const SimplifyQuery &Q)
Given operands for a CmpInst, fold the result or return null.
PotentialValuesState< APInt > PotentialConstantIntValuesState
DWARFExpression::Operation Op
bool isGuaranteedNotToBeUndefOrPoison(const Value *V, AssumptionCache *AC=nullptr, const Instruction *CtxI=nullptr, const DominatorTree *DT=nullptr, unsigned Depth=0)
Return true if this function can prove that V does not have undef bits and is never poison.
Value * getFreedOperand(const CallBase *CB, const TargetLibraryInfo *TLI)
If this if a call to a free function, return the freed operand.
ChangeStatus clampStateAndIndicateChange(StateType &S, const StateType &R)
Helper function to clamp a state S of type StateType with the information in R and indicate/return if...
constexpr unsigned BitWidth
std::optional< APInt > getAllocSize(const CallBase *CB, const TargetLibraryInfo *TLI, function_ref< const Value *(const Value *)> Mapper=[](const Value *V) { return V;})
Return the size of the requested allocation.
DenseSet< StringRef > getAssumptions(const Function &F)
Return the set of all assumptions for the function F.
Align assumeAligned(uint64_t Value)
Treats the value 0 as a 1, so Align is always at least 1.
Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
@ OPTIONAL
The target may be valid if the source is not.
@ NONE
Do not track a dependence between source and target.
@ REQUIRED
The target cannot be valid if the source is not.
bool mayContainIrreducibleControl(const Function &F, const LoopInfo *LI)
T bit_floor(T Value)
Returns the largest integral power of two no greater than Value if Value is nonzero.
KnownFPClass computeKnownFPClass(const Value *V, const APInt &DemandedElts, FPClassTest InterestedClasses, unsigned Depth, const SimplifyQuery &SQ)
Determine which floating-point classes are valid for V, and return them in KnownFPClass bit sets.
bool isIdentifiedObject(const Value *V)
Return true if this pointer refers to a distinct and identifiable object.
constexpr StringRef AssumptionAttrKey
The key we use for assumption attributes.
Implement std::hash so that hash_code can be used in STL containers.
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
A type to track pointer/struct usage and accesses for AAPointerInfo.
AAPointerInfo::const_bin_iterator end() const
ChangeStatus addAccess(Attributor &A, const AAPointerInfo::RangeList &Ranges, Instruction &I, std::optional< Value * > Content, AAPointerInfo::AccessKind Kind, Type *Ty, Instruction *RemoteI=nullptr)
Add a new Access to the state at offset Offset and with size Size.
DenseMap< const Instruction *, SmallVector< unsigned > > RemoteIMap
AAPointerInfo::const_bin_iterator begin() const
bool forallInterferingAccesses(Instruction &I, function_ref< bool(const AAPointerInfo::Access &, bool)> CB, AA::RangeTy &Range) const
See AAPointerInfo::forallInterferingAccesses.
State & operator=(State &&R)
State(State &&SIS)=default
const AAPointerInfo::Access & getAccess(unsigned Index) const
bool forallInterferingAccesses(AA::RangeTy Range, function_ref< bool(const AAPointerInfo::Access &, bool)> CB) const
See AAPointerInfo::forallInterferingAccesses.
SmallVector< AAPointerInfo::Access > AccessList
bool isAtFixpoint() const override
See AbstractState::isAtFixpoint().
static State getWorstState(const State &SIS)
Return the worst possible representable state.
int64_t numOffsetBins() const
AAPointerInfo::OffsetBinsTy OffsetBins
ChangeStatus indicateOptimisticFixpoint() override
See AbstractState::indicateOptimisticFixpoint().
State & operator=(const State &R)
ChangeStatus indicatePessimisticFixpoint() override
See AbstractState::indicatePessimisticFixpoint().
const State & getAssumed() const
static State getBestState(const State &SIS)
Return the best possible representable state.
bool isValidState() const override
See AbstractState::isValidState().
----------------—AAIntraFnReachability Attribute-----------------------—
ReachabilityQueryInfo(const ReachabilityQueryInfo &RQI)
unsigned Hash
Precomputed hash for this RQI.
ReachabilityQueryInfo(const Instruction *From, const ToTy *To)
ReachabilityQueryInfo(Attributor &A, const Instruction &From, const ToTy &To, const AA::InstExclusionSetTy *ES, bool MakeUnique)
Constructor replacement to ensure unique and stable sets are used for the cache.
unsigned computeHashValue() const
An abstract interface for address space information.
static const char ID
Unique ID (due to the unique address)
An abstract interface for all align attributes.
static const char ID
Unique ID (due to the unique address)
Align getKnownAlign() const
Return known alignment.
An abstract attribute for getting assumption information.
static const char ID
Unique ID (due to the unique address)
An abstract state for querying live call edges.
static const char ID
Unique ID (due to the unique address)
An abstract Attribute for specializing "dynamic" components of "denormal-fp-math" and "denormal-fp-ma...
static const char ID
Unique ID (due to the unique address)
An abstract interface for all dereferenceable attribute.
uint32_t getKnownDereferenceableBytes() const
Return known dereferenceable bytes.
uint32_t getAssumedDereferenceableBytes() const
Return assumed dereferenceable bytes.
static const char ID
Unique ID (due to the unique address)
An abstract interface for llvm::GlobalValue information interference.
static const char ID
Unique ID (due to the unique address)
static const char ID
Unique ID (due to the unique address)
An abstract interface for indirect call information interference.
static const char ID
Unique ID (due to the unique address)
An abstract interface to track if a value leaves it's defining function instance.
static const char ID
Unique ID (due to the unique address)
An abstract Attribute for computing reachability between functions.
static const char ID
Unique ID (due to the unique address)
bool canReach(Attributor &A, const Function &Fn) const
If the function represented by this possition can reach Fn.
virtual bool instructionCanReach(Attributor &A, const Instruction &Inst, const Function &Fn, const AA::InstExclusionSetTy *ExclusionSet=nullptr) const =0
Can Inst reach Fn.
An abstract interface to determine reachability of point A to B.
static const char ID
Unique ID (due to the unique address)
virtual bool isAssumedReachable(Attributor &A, const Instruction &From, const Instruction &To, const AA::InstExclusionSetTy *ExclusionSet=nullptr) const =0
Returns true if 'From' instruction is assumed to reach, 'To' instruction.
An abstract interface for liveness abstract attribute.
static const char ID
Unique ID (due to the unique address)
An abstract interface for memory access kind related attributes (readnone/readonly/writeonly).
bool isAssumedReadOnly() const
Return true if we assume that the underlying value is not accessed (=written) in its respective scope...
bool isKnownReadNone() const
Return true if we know that the underlying value is not read or accessed in its respective scope.
static const char ID
Unique ID (due to the unique address)
bool isAssumedReadNone() const
Return true if we assume that the underlying value is not read or accessed in its respective scope.
An abstract interface for all memory location attributes (readnone/argmemonly/inaccessiblememonly/ina...
static std::string getMemoryLocationsAsStr(MemoryLocationsKind MLK)
Return the locations encoded by MLK as a readable string.
static const char ID
Unique ID (due to the unique address)
StateType::base_t MemoryLocationsKind
An abstract interface for all nonnull attributes.
static const char ID
Unique ID (due to the unique address)
An abstract interface for all noalias attributes.
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static const char ID
Unique ID (due to the unique address)
An abstract interface for all nocapture attributes.
@ NO_CAPTURE_MAYBE_RETURNED
If we do not capture the value in memory or through integers we can only communicate it back as a der...
static const char ID
Unique ID (due to the unique address)
bool isAssumedNoCaptureMaybeReturned() const
Return true if we assume that the underlying value is not captured in its respective scope but we all...
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static void determineFunctionCaptureCapabilities(const IRPosition &IRP, const Function &F, BitIntegerState &State)
Update State according to the capture capabilities of F for position IRP.
static const char ID
Unique ID (due to the unique address)
An AbstractAttribute for nofree.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for norecurse.
static const char ID
Unique ID (due to the unique address)
An AbstractAttribute for noreturn.
static const char ID
Unique ID (due to the unique address)
static const char ID
Unique ID (due to the unique address)
static bool isAlignedBarrier(const CallBase &CB, bool ExecutedAligned)
Helper function to determine if CB is an aligned (GPU) barrier.
static bool isNonRelaxedAtomic(const Instruction *I)
Helper function used to determine whether an instruction is non-relaxed atomic.
static bool isNoSyncIntrinsic(const Instruction *I)
Helper function specific for intrinsics which are potentially volatile.
An abstract interface for all noundef attributes.
static const char ID
Unique ID (due to the unique address)
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static const char ID
Unique ID (due to the unique address)
An abstract Attribute for determining the necessity of the convergent attribute.
static const char ID
Unique ID (due to the unique address)
bool isAssumedNotConvergent() const
Return true if "non-convergent" is assumed.
An abstract interface for all nonnull attributes.
static const char ID
Unique ID (due to the unique address)
static bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See AbstractAttribute::isImpliedByIR(...).
A container for a list of ranges.
static void set_difference(const RangeList &L, const RangeList &R, RangeList &D)
Copy ranges from L that are not in R, into D.
An abstract interface for struct information.
virtual const_bin_iterator begin() const =0
static const char ID
Unique ID (due to the unique address)
virtual int64_t numOffsetBins() const =0
An abstract interface for potential values analysis.
static const char ID
Unique ID (due to the unique address)
static const char ID
Unique ID (due to the unique address)
static Value * getSingleValue(Attributor &A, const AbstractAttribute &AA, const IRPosition &IRP, SmallVectorImpl< AA::ValueAndContext > &Values)
Extract the single value in Values if any.
An abstract interface for privatizability.
virtual std::optional< Type * > getPrivatizableType() const =0
Return the type we can choose for a private copy of the underlying value.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for undefined behavior.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for getting all assumption underlying objects.
static const char ID
Unique ID (due to the unique address)
An abstract interface for range value analysis.
static const char ID
Unique ID (due to the unique address)
An abstract interface for value simplify abstract attribute.
static const char ID
Unique ID (due to the unique address)
An abstract attribute for willreturn.
static const char ID
Unique ID (due to the unique address)
Helper to represent an access offset and size, with logic to deal with uncertainty and check for over...
static constexpr int64_t Unknown
static RangeTy getUnknown()
Base struct for all "concrete attribute" deductions.
void print(raw_ostream &OS) const
Helper functions, for debug purposes only.
virtual StateType & getState()=0
Return the internal abstract state for inspection.
An interface to query the internal state of an abstract attribute.
virtual ChangeStatus indicatePessimisticFixpoint()=0
Indicate that the abstract state should converge to the pessimistic state.
virtual bool isValidState() const =0
Return if this abstract state is in a valid state.
virtual ChangeStatus indicateOptimisticFixpoint()=0
Indicate that the abstract state should converge to the optimistic state.
Helper for AA::PointerInfo::Access DenseMap/Set usage ignoring everythign but the instruction.
static unsigned getHashValue(const Access &A)
static Access getTombstoneKey()
static bool isEqual(const Access &LHS, const Access &RHS)
static Access getEmptyKey()
This struct is a compact representation of a valid (non-zero power of two) alignment.
uint64_t value() const
This is a hole in the type system and should not be abused.
Helper struct used in the communication between an abstract attribute (AA) that wants to change the s...
std::function< void(const ArgumentReplacementInfo &, AbstractCallSite, SmallVectorImpl< Value * > &)> ACSRepairCBTy
Abstract call site (ACS) repair callback type.
const Argument & getReplacedArg() const
std::function< void(const ArgumentReplacementInfo &, Function &, Function::arg_iterator)> CalleeRepairCBTy
Callee repair callback type.
The fixpoint analysis framework that orchestrates the attribute deduction.
std::function< std::optional< Value * >(const IRPosition &, const AbstractAttribute *, bool &)> SimplifictionCallbackTy
Register CB as a simplification callback.
Specialization of the integer state for a bit-wise encoding.
BitIntegerState & removeAssumedBits(base_t BitsEncoding)
Remove the bits in BitsEncoding from the "assumed bits" if not known.
BitIntegerState & addKnownBits(base_t Bits)
Add the bits in BitsEncoding to the "known bits".
Simple wrapper for a single bit (boolean) state.
Represent subnormal handling kind for floating point instruction inputs and outputs.
static constexpr DenormalMode getDefault()
Return the assumed default mode for a function without denormal-fp-math.
static constexpr DenormalMode getInvalid()
static Access getTombstoneKey()
static unsigned getHashValue(const Access &A)
static Access getEmptyKey()
static bool isEqual(const Access &LHS, const Access &RHS)
static bool isEqual(const AA::RangeTy &A, const AA::RangeTy B)
static AA::RangeTy getTombstoneKey()
static unsigned getHashValue(const AA::RangeTy &Range)
static AA::RangeTy getEmptyKey()
static ReachabilityQueryInfo< ToTy > EmptyKey
static ReachabilityQueryInfo< ToTy > TombstoneKey
static ReachabilityQueryInfo< ToTy > * getEmptyKey()
static ReachabilityQueryInfo< ToTy > * getTombstoneKey()
static bool isEqual(const ReachabilityQueryInfo< ToTy > *LHS, const ReachabilityQueryInfo< ToTy > *RHS)
static unsigned getHashValue(const ReachabilityQueryInfo< ToTy > *RQI)
An information struct used to provide DenseMap with the various necessary components for a given valu...
State for dereferenceable attribute.
IncIntegerState DerefBytesState
State representing for dereferenceable bytes.
ChangeStatus manifest(Attributor &A) override
See AbstractAttribute::manifest(...).
Helper to describe and deal with positions in the LLVM-IR.
Function * getAssociatedFunction() const
Return the associated function, if any.
static const IRPosition callsite_returned(const CallBase &CB)
Create a position describing the returned value of CB.
static const IRPosition returned(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the returned value of F.
Argument * getAssociatedArgument() const
Return the associated argument, if any.
static const IRPosition value(const Value &V, const CallBaseContext *CBContext=nullptr)
Create a position describing the value of V.
int getCalleeArgNo() const
Return the callee argument number of the associated value if it is an argument or call site argument,...
static const IRPosition inst(const Instruction &I, const CallBaseContext *CBContext=nullptr)
Create a position describing the instruction I.
static const IRPosition callsite_argument(const CallBase &CB, unsigned ArgNo)
Create a position describing the argument of CB at position ArgNo.
@ IRP_ARGUMENT
An attribute for a function argument.
@ IRP_RETURNED
An attribute for the function return value.
@ IRP_CALL_SITE
An attribute for a call site (function scope).
@ IRP_CALL_SITE_RETURNED
An attribute for a call site return value.
@ IRP_FUNCTION
An attribute for a function (scope).
@ IRP_CALL_SITE_ARGUMENT
An attribute for a call site argument.
@ IRP_INVALID
An invalid position.
Instruction * getCtxI() const
Return the context instruction, if any.
static const IRPosition argument(const Argument &Arg, const CallBaseContext *CBContext=nullptr)
Create a position describing the argument Arg.
Type * getAssociatedType() const
Return the type this abstract attribute is associated with.
static const IRPosition function(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the function scope of F.
const CallBaseContext * getCallBaseContext() const
Get the call base context from the position.
Value & getAssociatedValue() const
Return the value this abstract attribute is associated with.
Value & getAnchorValue() const
Return the value this abstract attribute is anchored with.
int getCallSiteArgNo() const
Return the call site argument number of the associated value if it is an argument or call site argume...
static const IRPosition function_scope(const IRPosition &IRP, const CallBaseContext *CBContext=nullptr)
Create a position with function scope matching the "context" of IRP.
Kind getPositionKind() const
Return the associated position kind.
bool isArgumentPosition() const
Return true if the position is an argument or call site argument.
static const IRPosition callsite_function(const CallBase &CB)
Create a position describing the function scope of CB.
Function * getAnchorScope() const
Return the Function surrounding the anchor value.
Incoming for lane maks phi as machine instruction, incoming register Reg and incoming block Block are...
State for an integer range.
ConstantRange getKnown() const
Return the known state encoding.
ConstantRange getAssumed() const
Return the assumed state encoding.
bool isValidState() const override
See AbstractState::isValidState() NOTE: For now we simply pretend that the worst possible state is in...
bool isAtFixpoint() const override
See AbstractState::isAtFixpoint()
ChangeStatus indicateOptimisticFixpoint() override
See AbstractState::indicateOptimisticFixpoint(...)
base_t getAssumed() const
Return the assumed state encoding.
static constexpr base_t getWorstState()
Return the worst possible representable state.
ChangeStatus indicatePessimisticFixpoint() override
See AbstractState::indicatePessimisticFixpoint(...)
Helper that allows to insert a new assumption string in the known assumption set by creating a (stati...
FPClassTest KnownFPClasses
Floating-point classes the value could be one of.
This struct is a compact representation of a valid (power of two) or undefined (0) alignment.
A "must be executed context" for a given program point PP is the set of instructions,...
iterator & end()
Return an universal end iterator.
bool findInContextOf(const Instruction *I, const Instruction *PP)
Helper to look for I in the context of PP.
iterator & begin(const Instruction *PP)
Return an iterator to explore the context around PP.
bool checkForAllContext(const Instruction *PP, function_ref< bool(const Instruction *)> Pred)
}
Various options to control the behavior of getObjectSize.
static unsigned MaxPotentialValues
Maximum number of potential values to be tracked.
void unionAssumed(const MemberTy &C)
Union assumed set with the passed value.
static PotentialValuesState getBestState()
Return empty set as the best state of potential values.
const SetTy & getAssumedSet() const
Return this set.
Represent one information held inside an operand bundle of an llvm.assume.
A MapVector that performs no allocations if smaller than a certain size.
Helper to tie a abstract state implementation to an abstract attribute.
StateType & getState() override
See AbstractAttribute::getState(...).
bool unionAssumed(std::optional< Value * > Other)
Merge Other into the currently assumed simplified value.