54#include "llvm/IR/IntrinsicsAMDGPU.h"
55#include "llvm/IR/IntrinsicsNVPTX.h"
82#define DEBUG_TYPE "attributor"
86 cl::desc(
"Manifest Attributor internal string attributes."),
99 cl::desc(
"Maximum number of potential values to be "
100 "tracked for each position."),
105 "attributor-max-potential-values-iterations",
cl::Hidden,
107 "Maximum number of iterations we keep dismantling potential values."),
110STATISTIC(NumAAs,
"Number of abstract attributes created");
111STATISTIC(NumIndirectCallsPromoted,
"Number of indirect calls promoted");
126#define BUILD_STAT_MSG_IR_ATTR(TYPE, NAME) \
127 ("Number of " #TYPE " marked '" #NAME "'")
128#define BUILD_STAT_NAME(NAME, TYPE) NumIR##TYPE##_##NAME
129#define STATS_DECL_(NAME, MSG) STATISTIC(NAME, MSG);
130#define STATS_DECL(NAME, TYPE, MSG) \
131 STATS_DECL_(BUILD_STAT_NAME(NAME, TYPE), MSG);
132#define STATS_TRACK(NAME, TYPE) ++(BUILD_STAT_NAME(NAME, TYPE));
133#define STATS_DECLTRACK(NAME, TYPE, MSG) \
134 {STATS_DECL(NAME, TYPE, MSG) STATS_TRACK(NAME, TYPE)}
135#define STATS_DECLTRACK_ARG_ATTR(NAME) \
136 STATS_DECLTRACK(NAME, Arguments, BUILD_STAT_MSG_IR_ATTR(arguments, NAME))
137#define STATS_DECLTRACK_CSARG_ATTR(NAME) \
138 STATS_DECLTRACK(NAME, CSArguments, \
139 BUILD_STAT_MSG_IR_ATTR(call site arguments, NAME))
140#define STATS_DECLTRACK_FN_ATTR(NAME) \
141 STATS_DECLTRACK(NAME, Function, BUILD_STAT_MSG_IR_ATTR(functions, NAME))
142#define STATS_DECLTRACK_CS_ATTR(NAME) \
143 STATS_DECLTRACK(NAME, CS, BUILD_STAT_MSG_IR_ATTR(call site, NAME))
144#define STATS_DECLTRACK_FNRET_ATTR(NAME) \
145 STATS_DECLTRACK(NAME, FunctionReturn, \
146 BUILD_STAT_MSG_IR_ATTR(function returns, NAME))
147#define STATS_DECLTRACK_CSRET_ATTR(NAME) \
148 STATS_DECLTRACK(NAME, CSReturn, \
149 BUILD_STAT_MSG_IR_ATTR(call site returns, NAME))
150#define STATS_DECLTRACK_FLOATING_ATTR(NAME) \
151 STATS_DECLTRACK(NAME, Floating, \
152 ("Number of floating values known to be '" #NAME "'"))
157#define PIPE_OPERATOR(CLASS) \
158 raw_ostream &operator<<(raw_ostream &OS, const CLASS &AA) { \
159 return OS << static_cast<const AbstractAttribute &>(AA); \
216 bool HeaderOnly,
Cycle **CPtr =
nullptr) {
219 auto *BB =
I->getParent();
225 return !HeaderOnly || BB ==
C->getHeader();
236 if (
DL.getTypeSizeInBits(Ty) !=
DL.getTypeAllocSizeInBits(Ty))
261 StartPos +=
DL.getTypeAllocSizeInBits(ElTy);
271 bool AllowVolatile) {
272 if (!AllowVolatile &&
I->isVolatile())
276 return LI->getPointerOperand();
280 return SI->getPointerOperand();
284 return CXI->getPointerOperand();
288 return RMWI->getPointerOperand();
310 bool GetMinOffset,
bool AllowNonInbounds,
311 bool UseAssumed =
false) {
313 auto AttributorAnalysis = [&](
Value &V,
APInt &ROffset) ->
bool {
320 if (!ValueConstantRangeAA)
324 if (
Range.isFullSet())
330 ROffset =
Range.getSignedMin();
332 ROffset =
Range.getSignedMax();
343 const Value *Ptr, int64_t &BytesOffset,
348 true, AllowNonInbounds);
356template <
typename AAType,
typename StateType =
typename AAType::StateType,
358 bool RecurseForSelectAndPHI =
true>
360 Attributor &
A,
const AAType &QueryingAA, StateType &S,
362 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp return value states for "
363 << QueryingAA <<
" into " << S <<
"\n");
365 assert((QueryingAA.getIRPosition().getPositionKind() ==
367 QueryingAA.getIRPosition().getPositionKind() ==
369 "Can only clamp returned value states for a function returned or call "
370 "site returned position!");
374 std::optional<StateType>
T;
377 auto CheckReturnValue = [&](
Value &RV) ->
bool {
391 <<
" AA: " <<
AA->getAsStr(&
A) <<
" @ " << RVPos <<
"\n");
392 const StateType &AAS =
AA->getState();
394 T = StateType::getBestState(AAS);
396 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" RV State: " <<
T
398 return T->isValidState();
401 if (!
A.checkForAllReturnedValues(CheckReturnValue, QueryingAA,
403 RecurseForSelectAndPHI))
404 S.indicatePessimisticFixpoint();
411template <
typename AAType,
typename BaseType,
412 typename StateType =
typename BaseType::StateType,
413 bool PropagateCallBaseContext =
false,
415 bool RecurseForSelectAndPHI =
true>
416struct AAReturnedFromReturnedValues :
public BaseType {
417 AAReturnedFromReturnedValues(
const IRPosition &IRP, Attributor &
A)
422 StateType S(StateType::getBestState(this->getState()));
424 RecurseForSelectAndPHI>(
426 PropagateCallBaseContext ? this->getCallBaseContext() : nullptr);
435template <
typename AAType,
typename StateType =
typename AAType::StateType,
437static void clampCallSiteArgumentStates(
Attributor &
A,
const AAType &QueryingAA,
439 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp call site argument states for "
440 << QueryingAA <<
" into " << S <<
"\n");
442 assert(QueryingAA.getIRPosition().getPositionKind() ==
444 "Can only clamp call site argument states for an argument position!");
448 std::optional<StateType>
T;
451 unsigned ArgNo = QueryingAA.getIRPosition().getCallSiteArgNo();
471 LLVM_DEBUG(
dbgs() <<
"[Attributor] ACS: " << *ACS.getInstruction()
472 <<
" AA: " <<
AA->getAsStr(&
A) <<
" @" << ACSArgPos
474 const StateType &AAS =
AA->getState();
476 T = StateType::getBestState(AAS);
478 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" CSA State: " <<
T
480 return T->isValidState();
483 bool UsedAssumedInformation =
false;
484 if (!
A.checkForAllCallSites(CallSiteCheck, QueryingAA,
true,
485 UsedAssumedInformation))
486 S.indicatePessimisticFixpoint();
493template <
typename AAType,
typename BaseType,
494 typename StateType =
typename AAType::StateType,
496bool getArgumentStateFromCallBaseContext(
Attributor &
A,
500 "Expected an 'argument' position !");
506 assert(ArgNo >= 0 &&
"Invalid Arg No!");
520 const StateType &CBArgumentState =
521 static_cast<const StateType &
>(
AA->getState());
523 LLVM_DEBUG(
dbgs() <<
"[Attributor] Briding Call site context to argument"
524 <<
"Position:" << Pos <<
"CB Arg state:" << CBArgumentState
528 State ^= CBArgumentState;
533template <
typename AAType,
typename BaseType,
534 typename StateType =
typename AAType::StateType,
535 bool BridgeCallBaseContext =
false,
537struct AAArgumentFromCallSiteArguments :
public BaseType {
538 AAArgumentFromCallSiteArguments(
const IRPosition &IRP, Attributor &
A)
543 StateType S = StateType::getBestState(this->getState());
545 if (BridgeCallBaseContext) {
547 getArgumentStateFromCallBaseContext<AAType,
BaseType, StateType,
549 A, *
this, this->getIRPosition(), S);
553 clampCallSiteArgumentStates<AAType, StateType, IRAttributeKind>(
A, *
this,
563template <
typename AAType,
typename BaseType,
564 typename StateType =
typename BaseType::StateType,
565 bool IntroduceCallBaseContext =
false,
567struct AACalleeToCallSite :
public BaseType {
568 AACalleeToCallSite(
const IRPosition &IRP, Attributor &
A) :
BaseType(IRP,
A) {}
572 auto IRPKind = this->getIRPosition().getPositionKind();
575 "Can only wrap function returned positions for call site "
576 "returned positions!");
577 auto &S = this->getState();
580 if (IntroduceCallBaseContext)
581 LLVM_DEBUG(
dbgs() <<
"[Attributor] Introducing call base context:" << CB
586 for (
const Function *Callee : Callees) {
590 IntroduceCallBaseContext ? &CB :
nullptr)
592 *
Callee, IntroduceCallBaseContext ? &CB : nullptr);
594 if (Attribute::isEnumAttrKind(IRAttributeKind)) {
597 A,
this, FnPos, DepClassTy::REQUIRED, IsKnown))
603 A.getAAFor<AAType>(*
this, FnPos, DepClassTy::REQUIRED);
607 if (S.isAtFixpoint())
608 return S.isValidState();
612 if (!
A.checkForAllCallees(CalleePred, *
this, CB))
613 return S.indicatePessimisticFixpoint();
619template <
class AAType,
typename StateType =
typename AAType::StateType>
625 auto EIt = Explorer.
begin(CtxI), EEnd = Explorer.
end(CtxI);
626 for (
unsigned u = 0;
u <
Uses.size(); ++
u) {
630 if (Found &&
AA.followUseInMBEC(
A, U, UserI, State))
645template <
class AAType,
typename StateType =
typename AAType::StateType>
646static void followUsesInMBEC(AAType &
AA,
Attributor &
A, StateType &S,
648 const Value &Val =
AA.getIRPosition().getAssociatedValue();
653 A.getInfoCache().getMustBeExecutedContextExplorer();
659 for (
const Use &U : Val.
uses())
662 followUsesInContext<AAType>(
AA,
A, *Explorer, &CtxI,
Uses, S);
664 if (S.isAtFixpoint())
670 if (Br->isConditional())
709 StateType ParentState;
713 ParentState.indicateOptimisticFixpoint();
715 for (
const BasicBlock *BB : Br->successors()) {
716 StateType ChildState;
718 size_t BeforeSize =
Uses.size();
719 followUsesInContext(
AA,
A, *Explorer, &BB->front(),
Uses, ChildState);
722 for (
auto It =
Uses.begin() + BeforeSize; It !=
Uses.end();)
725 ParentState &= ChildState;
799 R.indicatePessimisticFixpoint();
816 BS.indicateOptimisticFixpoint();
822 BS.indicatePessimisticFixpoint();
892 template <
typename F>
899 if (!
Range.mayOverlap(ItRange))
901 bool IsExact =
Range == ItRange && !
Range.offsetOrSizeAreUnknown();
902 for (
auto Index : It.getSecond()) {
912 template <
typename F>
923 for (
unsigned Index : LocalList->getSecond()) {
926 if (
Range.offsetAndSizeAreUnknown())
942 RemoteI = RemoteI ? RemoteI : &
I;
946 bool AccExists =
false;
948 for (
auto Index : LocalList) {
950 if (
A.getLocalInst() == &
I) {
959 <<
"[AAPointerInfo] Inserting access in new offset bins\n";);
961 for (
auto Key : ToAdd) {
968 AccessList.emplace_back(&
I, RemoteI, Ranges, Content, Kind, Ty);
970 "New Access should have been at AccIndex");
971 LocalList.push_back(AccIndex);
980 auto Before = Current;
982 if (Current == Before)
985 auto &ExistingRanges = Before.getRanges();
986 auto &NewRanges = Current.getRanges();
993 <<
"[AAPointerInfo] Removing access from old offset bins\n";);
1000 "Expected bin to actually contain the Access.");
1001 Bin.erase(AccIndex);
1022struct AAPointerInfoImpl
1023 :
public StateWrapper<AA::PointerInfo::State, AAPointerInfo> {
1028 const std::string getAsStr(
Attributor *
A)
const override {
1029 return std::string(
"PointerInfo ") +
1030 (isValidState() ? (std::string(
"#") +
1031 std::to_string(OffsetBins.size()) +
" bins")
1036 [](int64_t O) {
return std::to_string(O); }),
1044 return AAPointerInfo::manifest(
A);
1047 const_bin_iterator
begin()
const override {
return State::begin(); }
1048 const_bin_iterator
end()
const override {
return State::end(); }
1049 int64_t numOffsetBins()
const override {
return State::numOffsetBins(); }
1050 bool reachesReturn()
const override {
1051 return !ReturnedOffsets.isUnassigned();
1053 void addReturnedOffsetsTo(OffsetInfo &OI)
const override {
1054 if (ReturnedOffsets.isUnknown()) {
1059 OffsetInfo MergedOI;
1060 for (
auto Offset : ReturnedOffsets) {
1061 OffsetInfo TmpOI = OI;
1063 MergedOI.merge(TmpOI);
1065 OI = std::move(MergedOI);
1068 ChangeStatus setReachesReturn(
const OffsetInfo &ReachedReturnedOffsets) {
1069 if (ReturnedOffsets.isUnknown())
1070 return ChangeStatus::UNCHANGED;
1071 if (ReachedReturnedOffsets.isUnknown()) {
1072 ReturnedOffsets.setUnknown();
1073 return ChangeStatus::CHANGED;
1075 if (ReturnedOffsets.merge(ReachedReturnedOffsets))
1076 return ChangeStatus::CHANGED;
1077 return ChangeStatus::UNCHANGED;
1080 bool forallInterferingAccesses(
1082 function_ref<
bool(
const AAPointerInfo::Access &,
bool)> CB)
1084 return State::forallInterferingAccesses(
Range, CB);
1087 bool forallInterferingAccesses(
1088 Attributor &
A,
const AbstractAttribute &QueryingAA, Instruction &
I,
1089 bool FindInterferingWrites,
bool FindInterferingReads,
1090 function_ref<
bool(
const Access &,
bool)> UserCB,
bool &HasBeenWrittenTo,
1092 function_ref<
bool(
const Access &)> SkipCB)
const override {
1093 HasBeenWrittenTo =
false;
1095 SmallPtrSet<const Access *, 8> DominatingWrites;
1103 const auto *ExecDomainAA =
A.lookupAAFor<AAExecutionDomain>(
1105 bool AllInSameNoSyncFn = IsAssumedNoSync;
1106 bool InstIsExecutedByInitialThreadOnly =
1107 ExecDomainAA && ExecDomainAA->isExecutedByInitialThreadOnly(
I);
1114 bool InstIsExecutedInAlignedRegion =
1115 FindInterferingReads && ExecDomainAA &&
1116 ExecDomainAA->isExecutedInAlignedRegion(
A,
I);
1118 if (InstIsExecutedInAlignedRegion || InstIsExecutedByInitialThreadOnly)
1119 A.recordDependence(*ExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1121 InformationCache &InfoCache =
A.getInfoCache();
1122 bool IsThreadLocalObj =
1131 auto CanIgnoreThreadingForInst = [&](
const Instruction &
I) ->
bool {
1132 if (IsThreadLocalObj || AllInSameNoSyncFn)
1134 const auto *FnExecDomainAA =
1135 I.getFunction() == &
Scope
1137 :
A.lookupAAFor<AAExecutionDomain>(
1140 if (!FnExecDomainAA)
1142 if (InstIsExecutedInAlignedRegion ||
1143 (FindInterferingWrites &&
1144 FnExecDomainAA->isExecutedInAlignedRegion(
A,
I))) {
1145 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1148 if (InstIsExecutedByInitialThreadOnly &&
1149 FnExecDomainAA->isExecutedByInitialThreadOnly(
I)) {
1150 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1159 auto CanIgnoreThreading = [&](
const Access &Acc) ->
bool {
1160 return CanIgnoreThreadingForInst(*Acc.getRemoteInst()) ||
1161 (Acc.getRemoteInst() != Acc.getLocalInst() &&
1162 CanIgnoreThreadingForInst(*Acc.getLocalInst()));
1166 bool IsKnownNoRecurse;
1174 bool InstInKernel =
A.getInfoCache().isKernel(Scope);
1175 bool ObjHasKernelLifetime =
false;
1176 const bool UseDominanceReasoning =
1177 FindInterferingWrites && IsKnownNoRecurse;
1178 const DominatorTree *DT =
1188 case AA::GPUAddressSpace::Shared:
1189 case AA::GPUAddressSpace::Constant:
1190 case AA::GPUAddressSpace::Local:
1202 std::function<bool(
const Function &)> IsLiveInCalleeCB;
1207 const Function *AIFn = AI->getFunction();
1208 ObjHasKernelLifetime =
A.getInfoCache().isKernel(*AIFn);
1209 bool IsKnownNoRecurse;
1212 IsKnownNoRecurse)) {
1213 IsLiveInCalleeCB = [AIFn](
const Function &Fn) {
return AIFn != &Fn; };
1218 ObjHasKernelLifetime = HasKernelLifetime(GV, *GV->getParent());
1219 if (ObjHasKernelLifetime)
1220 IsLiveInCalleeCB = [&
A](
const Function &Fn) {
1221 return !
A.getInfoCache().isKernel(Fn);
1229 auto AccessCB = [&](
const Access &Acc,
bool Exact) {
1230 Function *AccScope = Acc.getRemoteInst()->getFunction();
1231 bool AccInSameScope = AccScope == &
Scope;
1235 if (InstInKernel && ObjHasKernelLifetime && !AccInSameScope &&
1236 A.getInfoCache().isKernel(*AccScope))
1239 if (Exact && Acc.isMustAccess() && Acc.getRemoteInst() != &
I) {
1240 if (Acc.isWrite() || (
isa<LoadInst>(
I) && Acc.isWriteOrAssumption()))
1241 ExclusionSet.
insert(Acc.getRemoteInst());
1244 if ((!FindInterferingWrites || !Acc.isWriteOrAssumption()) &&
1245 (!FindInterferingReads || !Acc.isRead()))
1248 bool Dominates = FindInterferingWrites && DT && Exact &&
1249 Acc.isMustAccess() && AccInSameScope &&
1252 DominatingWrites.
insert(&Acc);
1256 AllInSameNoSyncFn &= Acc.getRemoteInst()->getFunction() == &
Scope;
1258 InterferingAccesses.
push_back({&Acc, Exact});
1261 if (!State::forallInterferingAccesses(
I, AccessCB,
Range))
1264 HasBeenWrittenTo = !DominatingWrites.
empty();
1268 for (
const Access *Acc : DominatingWrites) {
1269 if (!LeastDominatingWriteInst) {
1270 LeastDominatingWriteInst = Acc->getRemoteInst();
1271 }
else if (DT->
dominates(LeastDominatingWriteInst,
1272 Acc->getRemoteInst())) {
1273 LeastDominatingWriteInst = Acc->getRemoteInst();
1278 auto CanSkipAccess = [&](
const Access &Acc,
bool Exact) {
1279 if (SkipCB && SkipCB(Acc))
1281 if (!CanIgnoreThreading(Acc))
1287 bool ReadChecked = !FindInterferingReads;
1288 bool WriteChecked = !FindInterferingWrites;
1294 &ExclusionSet, IsLiveInCalleeCB))
1299 if (!WriteChecked) {
1301 &ExclusionSet, IsLiveInCalleeCB))
1302 WriteChecked =
true;
1316 if (!WriteChecked && HasBeenWrittenTo &&
1317 Acc.getRemoteInst()->getFunction() != &Scope) {
1319 const auto *FnReachabilityAA =
A.getAAFor<AAInterFnReachability>(
1321 if (FnReachabilityAA) {
1327 if (!FnReachabilityAA->instructionCanReach(
1328 A, *LeastDominatingWriteInst,
1329 *Acc.getRemoteInst()->getFunction(), &ExclusionSet))
1330 WriteChecked =
true;
1337 if (ReadChecked && WriteChecked)
1340 if (!DT || !UseDominanceReasoning)
1342 if (!DominatingWrites.count(&Acc))
1344 return LeastDominatingWriteInst != Acc.getRemoteInst();
1349 for (
auto &It : InterferingAccesses) {
1350 if ((!AllInSameNoSyncFn && !IsThreadLocalObj && !ExecDomainAA) ||
1351 !CanSkipAccess(*It.first, It.second)) {
1352 if (!UserCB(*It.first, It.second))
1360 const AAPointerInfo &OtherAA,
1362 using namespace AA::PointerInfo;
1364 return indicatePessimisticFixpoint();
1367 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1368 bool IsByval = OtherAAImpl.getAssociatedArgument()->hasByValAttr();
1369 Changed |= setReachesReturn(OtherAAImpl.ReturnedOffsets);
1372 const auto &State = OtherAAImpl.getState();
1373 for (
const auto &It : State) {
1374 for (
auto Index : It.getSecond()) {
1375 const auto &RAcc = State.getAccess(Index);
1376 if (IsByval && !RAcc.isRead())
1378 bool UsedAssumedInformation =
false;
1380 auto Content =
A.translateArgumentToCallSiteContent(
1381 RAcc.getContent(), CB, *
this, UsedAssumedInformation);
1382 AK =
AccessKind(AK & (IsByval ? AccessKind::AK_R : AccessKind::AK_RW));
1383 AK =
AccessKind(AK | (RAcc.isMayAccess() ? AK_MAY : AK_MUST));
1385 Changed |= addAccess(
A, RAcc.getRanges(), CB, Content, AK,
1386 RAcc.getType(), RAcc.getRemoteInst());
1392 ChangeStatus translateAndAddState(Attributor &
A,
const AAPointerInfo &OtherAA,
1393 const OffsetInfo &Offsets, CallBase &CB,
1395 using namespace AA::PointerInfo;
1397 return indicatePessimisticFixpoint();
1399 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1403 const auto &State = OtherAAImpl.getState();
1404 for (
const auto &It : State) {
1405 for (
auto Index : It.getSecond()) {
1406 const auto &RAcc = State.getAccess(Index);
1407 if (!IsMustAcc && RAcc.isAssumption())
1409 for (
auto Offset : Offsets) {
1413 if (!NewRanges.isUnknown()) {
1414 NewRanges.addToAllOffsets(Offset);
1419 Changed |= addAccess(
A, NewRanges, CB, RAcc.getContent(), AK,
1420 RAcc.getType(), RAcc.getRemoteInst());
1429 void trackPointerInfoStatistics(
const IRPosition &IRP)
const {}
1432 void dumpState(raw_ostream &O) {
1433 for (
auto &It : OffsetBins) {
1434 O <<
"[" << It.first.Offset <<
"-" << It.first.Offset + It.first.Size
1435 <<
"] : " << It.getSecond().size() <<
"\n";
1436 for (
auto AccIndex : It.getSecond()) {
1437 auto &Acc = AccessList[AccIndex];
1438 O <<
" - " << Acc.getKind() <<
" - " << *Acc.getLocalInst() <<
"\n";
1439 if (Acc.getLocalInst() != Acc.getRemoteInst())
1440 O <<
" --> " << *Acc.getRemoteInst()
1442 if (!Acc.isWrittenValueYetUndetermined()) {
1444 O <<
" - c: func " << Acc.getWrittenValue()->getName()
1446 else if (Acc.getWrittenValue())
1447 O <<
" - c: " << *Acc.getWrittenValue() <<
"\n";
1449 O <<
" - c: <unknown>\n";
1456struct AAPointerInfoFloating :
public AAPointerInfoImpl {
1458 AAPointerInfoFloating(
const IRPosition &IRP, Attributor &
A)
1459 : AAPointerInfoImpl(IRP,
A) {}
1462 bool handleAccess(Attributor &
A, Instruction &
I,
1463 std::optional<Value *> Content,
AccessKind Kind,
1466 using namespace AA::PointerInfo;
1468 const DataLayout &
DL =
A.getDataLayout();
1469 TypeSize AccessSize =
DL.getTypeStoreSize(&Ty);
1478 if (!VT || VT->getElementCount().isScalable() ||
1480 (*Content)->getType() != VT ||
1481 DL.getTypeStoreSize(VT->getElementType()).isScalable()) {
1492 int64_t ElementSize =
DL.getTypeStoreSize(ElementType).getFixedValue();
1497 for (
int i = 0, e = VT->getElementCount().getFixedValue(); i != e; ++i) {
1499 ConstContent, ConstantInt::get(
Int32Ty, i));
1506 for (
auto &ElementOffset : ElementOffsets)
1507 ElementOffset += ElementSize;
1520 bool collectConstantsForGEP(Attributor &
A,
const DataLayout &
DL,
1521 OffsetInfo &UsrOI,
const OffsetInfo &PtrOI,
1522 const GEPOperator *
GEP);
1525 void trackStatistics()
const override {
1526 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1530bool AAPointerInfoFloating::collectConstantsForGEP(Attributor &
A,
1531 const DataLayout &
DL,
1533 const OffsetInfo &PtrOI,
1534 const GEPOperator *
GEP) {
1535 unsigned BitWidth =
DL.getIndexTypeSizeInBits(
GEP->getType());
1536 SmallMapVector<Value *, APInt, 4> VariableOffsets;
1539 assert(!UsrOI.isUnknown() && !PtrOI.isUnknown() &&
1540 "Don't look for constant values if the offset has already been "
1541 "determined to be unknown.");
1543 if (!
GEP->collectOffset(
DL,
BitWidth, VariableOffsets, ConstantOffset)) {
1549 << (VariableOffsets.
empty() ?
"" :
"not") <<
" constant "
1553 Union.addToAll(ConstantOffset.getSExtValue());
1558 for (
const auto &VI : VariableOffsets) {
1559 auto *PotentialConstantsAA =
A.getAAFor<AAPotentialConstantValues>(
1561 if (!PotentialConstantsAA || !PotentialConstantsAA->isValidState()) {
1567 if (PotentialConstantsAA->undefIsContained())
1574 auto &AssumedSet = PotentialConstantsAA->getAssumedSet();
1575 if (AssumedSet.empty())
1579 for (
const auto &ConstOffset : AssumedSet) {
1580 auto CopyPerOffset =
Union;
1581 CopyPerOffset.addToAll(ConstOffset.getSExtValue() *
1582 VI.second.getZExtValue());
1583 Product.merge(CopyPerOffset);
1588 UsrOI = std::move(Union);
1592ChangeStatus AAPointerInfoFloating::updateImpl(Attributor &
A) {
1593 using namespace AA::PointerInfo;
1595 const DataLayout &
DL =
A.getDataLayout();
1596 Value &AssociatedValue = getAssociatedValue();
1598 DenseMap<Value *, OffsetInfo> OffsetInfoMap;
1599 OffsetInfoMap[&AssociatedValue].
insert(0);
1601 auto HandlePassthroughUser = [&](
Value *Usr,
Value *CurPtr,
bool &Follow) {
1612 "CurPtr does not exist in the map!");
1614 auto &UsrOI = OffsetInfoMap[Usr];
1615 auto &PtrOI = OffsetInfoMap[CurPtr];
1616 assert(!PtrOI.isUnassigned() &&
1617 "Cannot pass through if the input Ptr was not visited!");
1623 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
1625 User *Usr =
U.getUser();
1626 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Analyze " << *CurPtr <<
" in " << *Usr
1629 "The current pointer offset should have been seeded!");
1630 assert(!OffsetInfoMap[CurPtr].isUnassigned() &&
1631 "Current pointer should be assigned");
1635 return HandlePassthroughUser(Usr, CurPtr, Follow);
1637 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled constant user " << *CE
1645 auto &UsrOI = OffsetInfoMap[Usr];
1646 auto &PtrOI = OffsetInfoMap[CurPtr];
1648 if (UsrOI.isUnknown())
1651 if (PtrOI.isUnknown()) {
1657 Follow = collectConstantsForGEP(
A,
DL, UsrOI, PtrOI,
GEP);
1663 return HandlePassthroughUser(Usr, CurPtr, Follow);
1668 if (RI->getFunction() == getAssociatedFunction()) {
1669 auto &PtrOI = OffsetInfoMap[CurPtr];
1670 Changed |= setReachesReturn(PtrOI);
1683 auto &UsrOI = PhiIt->second;
1684 auto &PtrOI = OffsetInfoMap[CurPtr];
1688 if (PtrOI.isUnknown()) {
1689 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand offset unknown "
1690 << *CurPtr <<
" in " << *
PHI <<
"\n");
1691 Follow = !UsrOI.isUnknown();
1697 if (UsrOI == PtrOI) {
1698 assert(!PtrOI.isUnassigned() &&
1699 "Cannot assign if the current Ptr was not visited!");
1700 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant (so far)");
1710 auto It = OffsetInfoMap.
find(CurPtrBase);
1711 if (It == OffsetInfoMap.
end()) {
1712 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand is too complex "
1713 << *CurPtr <<
" in " << *
PHI
1714 <<
" (base: " << *CurPtrBase <<
")\n");
1728 A.getInfoCache().getAnalysisResultForFunction<CycleAnalysis>(
1729 *
PHI->getFunction());
1731 auto BaseOI = It->getSecond();
1732 BaseOI.addToAll(
Offset.getZExtValue());
1733 if (IsFirstPHIUser || BaseOI == UsrOI) {
1734 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant " << *CurPtr
1735 <<
" in " << *Usr <<
"\n");
1736 return HandlePassthroughUser(Usr, CurPtr, Follow);
1740 dbgs() <<
"[AAPointerInfo] PHI operand pointer offset mismatch "
1741 << *CurPtr <<
" in " << *
PHI <<
"\n");
1760 if (!handleAccess(
A, *LoadI,
nullptr, AK,
1761 OffsetInfoMap[CurPtr].Offsets,
Changed,
1767 return II->isAssumeLikeIntrinsic();
1778 }
while (FromI && FromI != ToI);
1783 auto IsValidAssume = [&](IntrinsicInst &IntrI) {
1784 if (IntrI.getIntrinsicID() != Intrinsic::assume)
1787 if (IntrI.getParent() == BB) {
1788 if (IsImpactedInRange(LoadI->getNextNode(), &IntrI))
1794 if ((*PredIt) != BB)
1799 if (SuccBB == IntrBB)
1805 if (IsImpactedInRange(LoadI->getNextNode(), BB->
getTerminator()))
1807 if (IsImpactedInRange(&IntrBB->
front(), &IntrI))
1813 std::pair<Value *, IntrinsicInst *> Assumption;
1814 for (
const Use &LoadU : LoadI->uses()) {
1816 if (!CmpI->isEquality() || !CmpI->isTrueWhenEqual())
1818 for (
const Use &CmpU : CmpI->uses()) {
1820 if (!IsValidAssume(*IntrI))
1822 int Idx = CmpI->getOperandUse(0) == LoadU;
1823 Assumption = {CmpI->getOperand(Idx), IntrI};
1828 if (Assumption.first)
1833 if (!Assumption.first || !Assumption.second)
1837 << *Assumption.second <<
": " << *LoadI
1838 <<
" == " << *Assumption.first <<
"\n");
1839 bool UsedAssumedInformation =
false;
1840 std::optional<Value *> Content =
nullptr;
1841 if (Assumption.first)
1843 A.getAssumedSimplified(*Assumption.first, *
this,
1845 return handleAccess(
1846 A, *Assumption.second, Content, AccessKind::AK_ASSUMPTION,
1847 OffsetInfoMap[CurPtr].Offsets,
Changed, *LoadI->getType());
1852 for (
auto *OtherOp : OtherOps) {
1853 if (OtherOp == CurPtr) {
1856 <<
"[AAPointerInfo] Escaping use in store like instruction " <<
I
1868 bool UsedAssumedInformation =
false;
1869 std::optional<Value *> Content =
nullptr;
1871 Content =
A.getAssumedSimplified(
1873 return handleAccess(
A,
I, Content, AK, OffsetInfoMap[CurPtr].Offsets,
1878 return HandleStoreLike(*StoreI, StoreI->getValueOperand(),
1879 *StoreI->getValueOperand()->getType(),
1880 {StoreI->getValueOperand()}, AccessKind::AK_W);
1882 return HandleStoreLike(*RMWI,
nullptr, *RMWI->getValOperand()->getType(),
1883 {RMWI->getValOperand()}, AccessKind::AK_RW);
1885 return HandleStoreLike(
1886 *CXI,
nullptr, *CXI->getNewValOperand()->getType(),
1887 {CXI->getCompareOperand(), CXI->getNewValOperand()},
1894 A.getInfoCache().getTargetLibraryInfoForFunction(*CB->
getFunction());
1899 const auto *CSArgPI =
A.getAAFor<AAPointerInfo>(
1905 Changed = translateAndAddState(
A, *CSArgPI, OffsetInfoMap[CurPtr], *CB,
1908 if (!CSArgPI->reachesReturn())
1909 return isValidState();
1912 if (!Callee ||
Callee->arg_size() <= ArgNo)
1914 bool UsedAssumedInformation =
false;
1915 auto ReturnedValue =
A.getAssumedSimplified(
1920 auto *Arg =
Callee->getArg(ArgNo);
1921 if (ReturnedArg && Arg != ReturnedArg)
1923 bool IsRetMustAcc = IsArgMustAcc && (ReturnedArg == Arg);
1924 const auto *CSRetPI =
A.getAAFor<AAPointerInfo>(
1928 OffsetInfo OI = OffsetInfoMap[CurPtr];
1929 CSArgPI->addReturnedOffsetsTo(OI);
1931 translateAndAddState(
A, *CSRetPI, OI, *CB, IsRetMustAcc) |
Changed;
1932 return isValidState();
1934 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Call user not handled " << *CB
1939 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] User not handled " << *Usr <<
"\n");
1942 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
1943 assert(OffsetInfoMap.
count(OldU) &&
"Old use should be known already!");
1944 assert(!OffsetInfoMap[OldU].isUnassigned() &&
"Old use should be assinged");
1945 if (OffsetInfoMap.
count(NewU)) {
1947 if (!(OffsetInfoMap[NewU] == OffsetInfoMap[OldU])) {
1948 dbgs() <<
"[AAPointerInfo] Equivalent use callback failed: "
1949 << OffsetInfoMap[NewU] <<
" vs " << OffsetInfoMap[OldU]
1953 return OffsetInfoMap[NewU] == OffsetInfoMap[OldU];
1956 return HandlePassthroughUser(NewU.get(), OldU.
get(), Unused);
1958 if (!
A.checkForAllUses(UsePred, *
this, AssociatedValue,
1960 true, EquivalentUseCB)) {
1961 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Check for all uses failed, abort!\n");
1962 return indicatePessimisticFixpoint();
1966 dbgs() <<
"Accesses by bin after update:\n";
1973struct AAPointerInfoReturned final : AAPointerInfoImpl {
1974 AAPointerInfoReturned(
const IRPosition &IRP, Attributor &
A)
1975 : AAPointerInfoImpl(IRP,
A) {}
1979 return indicatePessimisticFixpoint();
1983 void trackStatistics()
const override {
1984 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1988struct AAPointerInfoArgument final : AAPointerInfoFloating {
1989 AAPointerInfoArgument(
const IRPosition &IRP, Attributor &
A)
1990 : AAPointerInfoFloating(IRP,
A) {}
1993 void trackStatistics()
const override {
1994 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1998struct AAPointerInfoCallSiteArgument final : AAPointerInfoFloating {
1999 AAPointerInfoCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
2000 : AAPointerInfoFloating(IRP,
A) {}
2004 using namespace AA::PointerInfo;
2010 if (
auto Length =
MI->getLengthInBytes())
2011 LengthVal =
Length->getSExtValue();
2012 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
2015 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled memory intrinsic "
2017 return indicatePessimisticFixpoint();
2020 ArgNo == 0 ? AccessKind::AK_MUST_WRITE : AccessKind::AK_MUST_READ;
2022 Changed | addAccess(
A, {0, LengthVal}, *
MI,
nullptr,
Kind,
nullptr);
2025 dbgs() <<
"Accesses by bin after update:\n";
2036 Argument *Arg = getAssociatedArgument();
2040 A.getAAFor<AAPointerInfo>(*
this, ArgPos, DepClassTy::REQUIRED);
2041 if (ArgAA && ArgAA->getState().isValidState())
2042 return translateAndAddStateFromCallee(
A, *ArgAA,
2045 return indicatePessimisticFixpoint();
2048 bool IsKnownNoCapture;
2050 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNoCapture))
2051 return indicatePessimisticFixpoint();
2053 bool IsKnown =
false;
2055 return ChangeStatus::UNCHANGED;
2058 ReadOnly ? AccessKind::AK_MAY_READ : AccessKind::AK_MAY_READ_WRITE;
2064 void trackStatistics()
const override {
2065 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2069struct AAPointerInfoCallSiteReturned final : AAPointerInfoFloating {
2070 AAPointerInfoCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
2071 : AAPointerInfoFloating(IRP,
A) {}
2074 void trackStatistics()
const override {
2075 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2083struct AANoUnwindImpl : AANoUnwind {
2084 AANoUnwindImpl(
const IRPosition &IRP, Attributor &
A) : AANoUnwind(IRP,
A) {}
2090 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2094 const std::string getAsStr(Attributor *
A)
const override {
2095 return getAssumed() ?
"nounwind" :
"may-unwind";
2101 (unsigned)Instruction::Invoke, (
unsigned)Instruction::CallBr,
2102 (unsigned)Instruction::Call, (
unsigned)Instruction::CleanupRet,
2103 (unsigned)Instruction::CatchSwitch, (
unsigned)Instruction::Resume};
2106 if (!
I.mayThrow(
true))
2110 bool IsKnownNoUnwind;
2118 bool UsedAssumedInformation =
false;
2119 if (!
A.checkForAllInstructions(CheckForNoUnwind, *
this, Opcodes,
2120 UsedAssumedInformation))
2121 return indicatePessimisticFixpoint();
2123 return ChangeStatus::UNCHANGED;
2127struct AANoUnwindFunction final :
public AANoUnwindImpl {
2128 AANoUnwindFunction(
const IRPosition &IRP, Attributor &
A)
2129 : AANoUnwindImpl(IRP,
A) {}
2136struct AANoUnwindCallSite final
2137 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl> {
2138 AANoUnwindCallSite(
const IRPosition &IRP, Attributor &
A)
2139 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl>(IRP,
A) {}
2150 case Intrinsic::nvvm_barrier_cta_sync_aligned_all:
2151 case Intrinsic::nvvm_barrier_cta_sync_aligned_count:
2152 case Intrinsic::nvvm_barrier_cta_red_and_aligned_all:
2153 case Intrinsic::nvvm_barrier_cta_red_and_aligned_count:
2154 case Intrinsic::nvvm_barrier_cta_red_or_aligned_all:
2155 case Intrinsic::nvvm_barrier_cta_red_or_aligned_count:
2156 case Intrinsic::nvvm_barrier_cta_red_popc_aligned_all:
2157 case Intrinsic::nvvm_barrier_cta_red_popc_aligned_count:
2159 case Intrinsic::amdgcn_s_barrier:
2160 if (ExecutedAligned)
2183 switch (
I->getOpcode()) {
2184 case Instruction::AtomicRMW:
2187 case Instruction::Store:
2190 case Instruction::Load:
2195 "New atomic operations need to be known in the attributor.");
2207 return !
MI->isVolatile();
2223 const std::string getAsStr(Attributor *
A)
const override {
2224 return getAssumed() ?
"nosync" :
"may-sync";
2240 if (
I.mayReadOrWriteMemory())
2254 bool UsedAssumedInformation =
false;
2255 if (!
A.checkForAllReadWriteInstructions(CheckRWInstForNoSync, *
this,
2256 UsedAssumedInformation) ||
2257 !
A.checkForAllCallLikeInstructions(CheckForNoSync, *
this,
2258 UsedAssumedInformation))
2259 return indicatePessimisticFixpoint();
2264struct AANoSyncFunction final :
public AANoSyncImpl {
2265 AANoSyncFunction(
const IRPosition &IRP, Attributor &
A)
2266 : AANoSyncImpl(IRP,
A) {}
2273struct AANoSyncCallSite final : AACalleeToCallSite<AANoSync, AANoSyncImpl> {
2274 AANoSyncCallSite(
const IRPosition &IRP, Attributor &
A)
2275 : AACalleeToCallSite<AANoSync, AANoSyncImpl>(IRP,
A) {}
2285struct AANoFreeImpl :
public AANoFree {
2286 AANoFreeImpl(
const IRPosition &IRP, Attributor &
A) : AANoFree(IRP,
A) {}
2292 DepClassTy::NONE, IsKnown));
2302 DepClassTy::REQUIRED, IsKnown);
2305 bool UsedAssumedInformation =
false;
2306 if (!
A.checkForAllCallLikeInstructions(CheckForNoFree, *
this,
2307 UsedAssumedInformation))
2308 return indicatePessimisticFixpoint();
2309 return ChangeStatus::UNCHANGED;
2313 const std::string getAsStr(Attributor *
A)
const override {
2314 return getAssumed() ?
"nofree" :
"may-free";
2318struct AANoFreeFunction final :
public AANoFreeImpl {
2319 AANoFreeFunction(
const IRPosition &IRP, Attributor &
A)
2320 : AANoFreeImpl(IRP,
A) {}
2327struct AANoFreeCallSite final : AACalleeToCallSite<AANoFree, AANoFreeImpl> {
2328 AANoFreeCallSite(
const IRPosition &IRP, Attributor &
A)
2329 : AACalleeToCallSite<AANoFree, AANoFreeImpl>(IRP,
A) {}
2336struct AANoFreeFloating : AANoFreeImpl {
2337 AANoFreeFloating(
const IRPosition &IRP, Attributor &
A)
2338 : AANoFreeImpl(IRP,
A) {}
2345 const IRPosition &IRP = getIRPosition();
2350 DepClassTy::OPTIONAL, IsKnown))
2351 return ChangeStatus::UNCHANGED;
2353 Value &AssociatedValue = getIRPosition().getAssociatedValue();
2354 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
2366 DepClassTy::REQUIRED, IsKnown);
2383 if (!
A.checkForAllUses(Pred, *
this, AssociatedValue))
2384 return indicatePessimisticFixpoint();
2386 return ChangeStatus::UNCHANGED;
2391struct AANoFreeArgument final : AANoFreeFloating {
2392 AANoFreeArgument(
const IRPosition &IRP, Attributor &
A)
2393 : AANoFreeFloating(IRP,
A) {}
2400struct AANoFreeCallSiteArgument final : AANoFreeFloating {
2401 AANoFreeCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
2402 : AANoFreeFloating(IRP,
A) {}
2410 Argument *Arg = getAssociatedArgument();
2412 return indicatePessimisticFixpoint();
2416 DepClassTy::REQUIRED, IsKnown))
2417 return ChangeStatus::UNCHANGED;
2418 return indicatePessimisticFixpoint();
2426struct AANoFreeReturned final : AANoFreeFloating {
2427 AANoFreeReturned(
const IRPosition &IRP, Attributor &
A)
2428 : AANoFreeFloating(IRP,
A) {
2443 void trackStatistics()
const override {}
2447struct AANoFreeCallSiteReturned final : AANoFreeFloating {
2448 AANoFreeCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
2449 : AANoFreeFloating(IRP,
A) {}
2452 return ChangeStatus::UNCHANGED;
2463 bool IgnoreSubsumingPositions) {
2465 AttrKinds.
push_back(Attribute::NonNull);
2468 AttrKinds.
push_back(Attribute::Dereferenceable);
2469 if (
A.hasAttr(IRP, AttrKinds, IgnoreSubsumingPositions, Attribute::NonNull))
2476 if (!Fn->isDeclaration()) {
2486 bool UsedAssumedInformation =
false;
2487 if (!
A.checkForAllInstructions(
2489 Worklist.push_back({*cast<ReturnInst>(I).getReturnValue(), &I});
2493 UsedAssumedInformation,
false,
true))
2505 Attribute::NonNull)});
2510static int64_t getKnownNonNullAndDerefBytesForUse(
2511 Attributor &
A,
const AbstractAttribute &QueryingAA,
Value &AssociatedValue,
2512 const Use *U,
const Instruction *
I,
bool &IsNonNull,
bool &TrackUse) {
2515 const Value *UseV =
U->get();
2536 const DataLayout &
DL =
A.getInfoCache().getDL();
2540 U, {Attribute::NonNull, Attribute::Dereferenceable})) {
2557 bool IsKnownNonNull;
2560 IsNonNull |= IsKnownNonNull;
2563 return DerefAA ? DerefAA->getKnownDereferenceableBytes() : 0;
2567 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
2568 Loc->Size.isScalable() ||
I->isVolatile())
2574 if (
Base &&
Base == &AssociatedValue) {
2575 int64_t DerefBytes = Loc->Size.getValue() +
Offset;
2577 return std::max(int64_t(0), DerefBytes);
2584 int64_t DerefBytes = Loc->Size.getValue();
2586 return std::max(int64_t(0), DerefBytes);
2592struct AANonNullImpl : AANonNull {
2593 AANonNullImpl(
const IRPosition &IRP, Attributor &
A) : AANonNull(IRP,
A) {}
2597 Value &
V = *getAssociatedValue().stripPointerCasts();
2599 indicatePessimisticFixpoint();
2603 if (Instruction *CtxI = getCtxI())
2604 followUsesInMBEC(*
this,
A, getState(), *CtxI);
2608 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
2609 AANonNull::StateType &State) {
2610 bool IsNonNull =
false;
2611 bool TrackUse =
false;
2612 getKnownNonNullAndDerefBytesForUse(
A, *
this, getAssociatedValue(), U,
I,
2613 IsNonNull, TrackUse);
2614 State.setKnown(IsNonNull);
2619 const std::string getAsStr(Attributor *
A)
const override {
2620 return getAssumed() ?
"nonnull" :
"may-null";
2625struct AANonNullFloating :
public AANonNullImpl {
2626 AANonNullFloating(
const IRPosition &IRP, Attributor &
A)
2627 : AANonNullImpl(IRP,
A) {}
2631 auto CheckIRP = [&](
const IRPosition &IRP) {
2632 bool IsKnownNonNull;
2634 A, *
this, IRP, DepClassTy::OPTIONAL, IsKnownNonNull);
2638 bool UsedAssumedInformation =
false;
2639 Value *AssociatedValue = &getAssociatedValue();
2641 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
2646 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
2652 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2653 A, this, IRPosition::value(*Op), DepClassTy::OPTIONAL,
2656 return ChangeStatus::UNCHANGED;
2660 DepClassTy::OPTIONAL, IsKnown) &&
2663 DepClassTy::OPTIONAL, IsKnown))
2664 return ChangeStatus::UNCHANGED;
2671 if (AVIRP == getIRPosition() || !CheckIRP(AVIRP))
2672 return indicatePessimisticFixpoint();
2673 return ChangeStatus::UNCHANGED;
2676 for (
const auto &VAC : Values)
2678 return indicatePessimisticFixpoint();
2680 return ChangeStatus::UNCHANGED;
2688struct AANonNullReturned final
2689 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2690 false, AANonNull::IRAttributeKind, false> {
2691 AANonNullReturned(
const IRPosition &IRP, Attributor &
A)
2692 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2697 const std::string getAsStr(Attributor *
A)
const override {
2698 return getAssumed() ?
"nonnull" :
"may-null";
2706struct AANonNullArgument final
2707 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl> {
2708 AANonNullArgument(
const IRPosition &IRP, Attributor &
A)
2709 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl>(IRP,
A) {}
2715struct AANonNullCallSiteArgument final : AANonNullFloating {
2716 AANonNullCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
2717 : AANonNullFloating(IRP,
A) {}
2724struct AANonNullCallSiteReturned final
2725 : AACalleeToCallSite<AANonNull, AANonNullImpl> {
2726 AANonNullCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
2727 : AACalleeToCallSite<AANonNull, AANonNullImpl>(IRP,
A) {}
2736struct AAMustProgressImpl :
public AAMustProgress {
2737 AAMustProgressImpl(
const IRPosition &IRP, Attributor &
A)
2738 : AAMustProgress(IRP,
A) {}
2744 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2749 const std::string getAsStr(Attributor *
A)
const override {
2750 return getAssumed() ?
"mustprogress" :
"may-not-progress";
2754struct AAMustProgressFunction final : AAMustProgressImpl {
2755 AAMustProgressFunction(
const IRPosition &IRP, Attributor &
A)
2756 : AAMustProgressImpl(IRP,
A) {}
2762 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnown)) {
2764 return indicateOptimisticFixpoint();
2765 return ChangeStatus::UNCHANGED;
2768 auto CheckForMustProgress = [&](AbstractCallSite ACS) {
2770 bool IsKnownMustProgress;
2772 A,
this, IPos, DepClassTy::REQUIRED, IsKnownMustProgress,
2776 bool AllCallSitesKnown =
true;
2777 if (!
A.checkForAllCallSites(CheckForMustProgress, *
this,
2780 return indicatePessimisticFixpoint();
2782 return ChangeStatus::UNCHANGED;
2786 void trackStatistics()
const override {
2792struct AAMustProgressCallSite final : AAMustProgressImpl {
2793 AAMustProgressCallSite(
const IRPosition &IRP, Attributor &
A)
2794 : AAMustProgressImpl(IRP,
A) {}
2803 bool IsKnownMustProgress;
2805 A,
this, FnPos, DepClassTy::REQUIRED, IsKnownMustProgress))
2806 return indicatePessimisticFixpoint();
2807 return ChangeStatus::UNCHANGED;
2811 void trackStatistics()
const override {
2820struct AANoRecurseImpl :
public AANoRecurse {
2821 AANoRecurseImpl(
const IRPosition &IRP, Attributor &
A) : AANoRecurse(IRP,
A) {}
2827 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2832 const std::string getAsStr(Attributor *
A)
const override {
2833 return getAssumed() ?
"norecurse" :
"may-recurse";
2837struct AANoRecurseFunction final : AANoRecurseImpl {
2838 AANoRecurseFunction(
const IRPosition &IRP, Attributor &
A)
2839 : AANoRecurseImpl(IRP,
A) {}
2845 auto CallSitePred = [&](AbstractCallSite ACS) {
2846 bool IsKnownNoRecurse;
2850 DepClassTy::NONE, IsKnownNoRecurse))
2852 return IsKnownNoRecurse;
2854 bool UsedAssumedInformation =
false;
2855 if (
A.checkForAllCallSites(CallSitePred, *
this,
true,
2856 UsedAssumedInformation)) {
2862 if (!UsedAssumedInformation)
2863 indicateOptimisticFixpoint();
2864 return ChangeStatus::UNCHANGED;
2867 const AAInterFnReachability *EdgeReachability =
2868 A.getAAFor<AAInterFnReachability>(*
this, getIRPosition(),
2869 DepClassTy::REQUIRED);
2870 if (EdgeReachability && EdgeReachability->
canReach(
A, *getAnchorScope()))
2871 return indicatePessimisticFixpoint();
2872 return ChangeStatus::UNCHANGED;
2879struct AANoRecurseCallSite final
2880 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl> {
2881 AANoRecurseCallSite(
const IRPosition &IRP, Attributor &
A)
2882 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl>(IRP,
A) {}
2892struct AANonConvergentImpl :
public AANonConvergent {
2893 AANonConvergentImpl(
const IRPosition &IRP, Attributor &
A)
2894 : AANonConvergent(IRP,
A) {}
2897 const std::string getAsStr(Attributor *
A)
const override {
2898 return getAssumed() ?
"non-convergent" :
"may-be-convergent";
2902struct AANonConvergentFunction final : AANonConvergentImpl {
2903 AANonConvergentFunction(
const IRPosition &IRP, Attributor &
A)
2904 : AANonConvergentImpl(IRP,
A) {}
2910 auto CalleeIsNotConvergent = [&](
Instruction &Inst) {
2913 if (!Callee ||
Callee->isIntrinsic()) {
2916 if (
Callee->isDeclaration()) {
2917 return !
Callee->hasFnAttribute(Attribute::Convergent);
2919 const auto *ConvergentAA =
A.getAAFor<AANonConvergent>(
2921 return ConvergentAA && ConvergentAA->isAssumedNotConvergent();
2924 bool UsedAssumedInformation =
false;
2925 if (!
A.checkForAllCallLikeInstructions(CalleeIsNotConvergent, *
this,
2926 UsedAssumedInformation)) {
2927 return indicatePessimisticFixpoint();
2929 return ChangeStatus::UNCHANGED;
2933 if (isKnownNotConvergent() &&
2934 A.hasAttr(getIRPosition(), Attribute::Convergent)) {
2935 A.removeAttrs(getIRPosition(), {Attribute::Convergent});
2936 return ChangeStatus::CHANGED;
2938 return ChangeStatus::UNCHANGED;
2948struct AAUndefinedBehaviorImpl :
public AAUndefinedBehavior {
2949 AAUndefinedBehaviorImpl(
const IRPosition &IRP, Attributor &
A)
2950 : AAUndefinedBehavior(IRP,
A) {}
2955 const size_t UBPrevSize = KnownUBInsts.size();
2956 const size_t NoUBPrevSize = AssumedNoUBInsts.size();
2960 if (
I.isVolatile() &&
I.mayWriteToMemory())
2964 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2973 "Expected pointer operand of memory accessing instruction");
2977 std::optional<Value *> SimplifiedPtrOp =
2978 stopOnUndefOrAssumed(
A, PtrOp, &
I);
2979 if (!SimplifiedPtrOp || !*SimplifiedPtrOp)
2981 const Value *PtrOpVal = *SimplifiedPtrOp;
2987 AssumedNoUBInsts.insert(&
I);
2999 AssumedNoUBInsts.insert(&
I);
3001 KnownUBInsts.insert(&
I);
3010 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3018 std::optional<Value *> SimplifiedCond =
3019 stopOnUndefOrAssumed(
A, BrInst->getCondition(), BrInst);
3020 if (!SimplifiedCond || !*SimplifiedCond)
3022 AssumedNoUBInsts.insert(&
I);
3030 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3039 for (
unsigned idx = 0; idx < CB.
arg_size(); idx++) {
3045 if (idx >=
Callee->arg_size())
3057 bool IsKnownNoUndef;
3059 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNoUndef);
3060 if (!IsKnownNoUndef)
3062 bool UsedAssumedInformation =
false;
3063 std::optional<Value *> SimplifiedVal =
3066 if (UsedAssumedInformation)
3068 if (SimplifiedVal && !*SimplifiedVal)
3071 KnownUBInsts.insert(&
I);
3077 bool IsKnownNonNull;
3079 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNonNull);
3081 KnownUBInsts.insert(&
I);
3090 std::optional<Value *> SimplifiedRetValue =
3091 stopOnUndefOrAssumed(
A, RI.getReturnValue(), &
I);
3092 if (!SimplifiedRetValue || !*SimplifiedRetValue)
3110 bool IsKnownNonNull;
3115 KnownUBInsts.insert(&
I);
3121 bool UsedAssumedInformation =
false;
3122 A.checkForAllInstructions(InspectMemAccessInstForUB, *
this,
3123 {Instruction::Load, Instruction::Store,
3124 Instruction::AtomicCmpXchg,
3125 Instruction::AtomicRMW},
3126 UsedAssumedInformation,
3128 A.checkForAllInstructions(InspectBrInstForUB, *
this, {Instruction::CondBr},
3129 UsedAssumedInformation,
3131 A.checkForAllCallLikeInstructions(InspectCallSiteForUB, *
this,
3132 UsedAssumedInformation);
3136 if (!getAnchorScope()->getReturnType()->isVoidTy()) {
3138 if (!
A.isAssumedDead(ReturnIRP,
this,
nullptr, UsedAssumedInformation)) {
3139 bool IsKnownNoUndef;
3141 A,
this, ReturnIRP, DepClassTy::NONE, IsKnownNoUndef);
3143 A.checkForAllInstructions(InspectReturnInstForUB, *
this,
3144 {Instruction::Ret}, UsedAssumedInformation,
3149 if (NoUBPrevSize != AssumedNoUBInsts.size() ||
3150 UBPrevSize != KnownUBInsts.size())
3151 return ChangeStatus::CHANGED;
3152 return ChangeStatus::UNCHANGED;
3155 bool isKnownToCauseUB(Instruction *
I)
const override {
3156 return KnownUBInsts.count(
I);
3159 bool isAssumedToCauseUB(Instruction *
I)
const override {
3166 switch (
I->getOpcode()) {
3167 case Instruction::Load:
3168 case Instruction::Store:
3169 case Instruction::AtomicCmpXchg:
3170 case Instruction::AtomicRMW:
3171 case Instruction::CondBr:
3172 return !AssumedNoUBInsts.count(
I);
3180 if (KnownUBInsts.empty())
3181 return ChangeStatus::UNCHANGED;
3182 for (Instruction *
I : KnownUBInsts)
3183 A.changeToUnreachableAfterManifest(
I);
3184 return ChangeStatus::CHANGED;
3188 const std::string getAsStr(Attributor *
A)
const override {
3189 return getAssumed() ?
"undefined-behavior" :
"no-ub";
3217 SmallPtrSet<Instruction *, 8> KnownUBInsts;
3221 SmallPtrSet<Instruction *, 8> AssumedNoUBInsts;
3232 std::optional<Value *> stopOnUndefOrAssumed(Attributor &
A,
Value *V,
3234 bool UsedAssumedInformation =
false;
3235 std::optional<Value *> SimplifiedV =
3238 if (!UsedAssumedInformation) {
3243 KnownUBInsts.insert(
I);
3244 return std::nullopt;
3251 KnownUBInsts.insert(
I);
3252 return std::nullopt;
3258struct AAUndefinedBehaviorFunction final : AAUndefinedBehaviorImpl {
3259 AAUndefinedBehaviorFunction(
const IRPosition &IRP, Attributor &
A)
3260 : AAUndefinedBehaviorImpl(IRP,
A) {}
3263 void trackStatistics()
const override {
3264 STATS_DECL(UndefinedBehaviorInstruction, Instruction,
3265 "Number of instructions known to have UB");
3267 KnownUBInsts.size();
3278static bool mayContainUnboundedCycle(Function &
F, Attributor &
A) {
3279 ScalarEvolution *SE =
3280 A.getInfoCache().getAnalysisResultForFunction<ScalarEvolutionAnalysis>(
F);
3281 LoopInfo *LI =
A.getInfoCache().getAnalysisResultForFunction<LoopAnalysis>(
F);
3287 for (scc_iterator<Function *> SCCI =
scc_begin(&
F); !SCCI.isAtEnd(); ++SCCI)
3288 if (SCCI.hasCycle())
3298 for (
auto *L : LI->getLoopsInPreorder()) {
3305struct AAWillReturnImpl :
public AAWillReturn {
3306 AAWillReturnImpl(
const IRPosition &IRP, Attributor &
A)
3307 : AAWillReturn(IRP,
A) {}
3313 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
3318 bool isImpliedByMustprogressAndReadonly(Attributor &
A,
bool KnownOnly) {
3319 if (!
A.hasAttr(getIRPosition(), {Attribute::MustProgress}))
3324 return IsKnown || !KnownOnly;
3330 if (isImpliedByMustprogressAndReadonly(
A,
false))
3331 return ChangeStatus::UNCHANGED;
3337 A,
this, IPos, DepClassTy::REQUIRED, IsKnown)) {
3343 bool IsKnownNoRecurse;
3345 A,
this, IPos, DepClassTy::REQUIRED, IsKnownNoRecurse);
3348 bool UsedAssumedInformation =
false;
3349 if (!
A.checkForAllCallLikeInstructions(CheckForWillReturn, *
this,
3350 UsedAssumedInformation))
3351 return indicatePessimisticFixpoint();
3353 return ChangeStatus::UNCHANGED;
3357 const std::string getAsStr(Attributor *
A)
const override {
3358 return getAssumed() ?
"willreturn" :
"may-noreturn";
3362struct AAWillReturnFunction final : AAWillReturnImpl {
3363 AAWillReturnFunction(
const IRPosition &IRP, Attributor &
A)
3364 : AAWillReturnImpl(IRP,
A) {}
3368 AAWillReturnImpl::initialize(
A);
3371 assert(
F &&
"Did expect an anchor function");
3372 if (
F->isDeclaration() || mayContainUnboundedCycle(*
F,
A))
3373 indicatePessimisticFixpoint();
3381struct AAWillReturnCallSite final
3382 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl> {
3383 AAWillReturnCallSite(
const IRPosition &IRP, Attributor &
A)
3384 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl>(IRP,
A) {}
3388 if (isImpliedByMustprogressAndReadonly(
A,
false))
3389 return ChangeStatus::UNCHANGED;
3391 return AACalleeToCallSite::updateImpl(
A);
3413 const ToTy *
To =
nullptr;
3440 if (!ES || ES->
empty()) {
3441 ExclusionSet = nullptr;
3442 }
else if (MakeUnique) {
3443 ExclusionSet =
A.getInfoCache().getOrCreateUniqueBlockExecutionSet(ES);
3468 if (!PairDMI::isEqual({LHS->From, LHS->To}, {RHS->From, RHS->To}))
3470 return InstSetDMI::isEqual(LHS->ExclusionSet, RHS->ExclusionSet);
3474#define DefineKeys(ToTy) \
3476 ReachabilityQueryInfo<ToTy> \
3477 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::EmptyKey = \
3478 ReachabilityQueryInfo<ToTy>( \
3479 DenseMapInfo<const Instruction *>::getEmptyKey(), \
3480 DenseMapInfo<const ToTy *>::getEmptyKey()); \
3482 ReachabilityQueryInfo<ToTy> \
3483 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::TombstoneKey = \
3484 ReachabilityQueryInfo<ToTy>( \
3485 DenseMapInfo<const Instruction *>::getTombstoneKey(), \
3486 DenseMapInfo<const ToTy *>::getTombstoneKey());
3495template <
typename BaseTy,
typename ToTy>
3496struct CachedReachabilityAA :
public BaseTy {
3497 using RQITy = ReachabilityQueryInfo<ToTy>;
3499 CachedReachabilityAA(
const IRPosition &IRP, Attributor &
A) : BaseTy(IRP,
A) {}
3502 bool isQueryAA()
const override {
return true; }
3507 for (
unsigned u = 0,
e = QueryVector.size();
u <
e; ++
u) {
3508 RQITy *RQI = QueryVector[
u];
3509 if (RQI->Result == RQITy::Reachable::No &&
3511 Changed = ChangeStatus::CHANGED;
3517 bool IsTemporaryRQI) = 0;
3519 bool rememberResult(Attributor &
A,
typename RQITy::Reachable
Result,
3520 RQITy &RQI,
bool UsedExclusionSet,
bool IsTemporaryRQI) {
3525 QueryCache.erase(&RQI);
3531 if (
Result == RQITy::Reachable::Yes || !UsedExclusionSet) {
3532 RQITy PlainRQI(RQI.From, RQI.To);
3533 if (!QueryCache.count(&PlainRQI)) {
3534 RQITy *RQIPtr =
new (
A.Allocator) RQITy(RQI.From, RQI.To);
3536 QueryVector.push_back(RQIPtr);
3537 QueryCache.insert(RQIPtr);
3542 if (IsTemporaryRQI &&
Result != RQITy::Reachable::Yes && UsedExclusionSet) {
3543 assert((!RQI.ExclusionSet || !RQI.ExclusionSet->empty()) &&
3544 "Did not expect empty set!");
3545 RQITy *RQIPtr =
new (
A.Allocator)
3546 RQITy(
A, *RQI.From, *RQI.To, RQI.ExclusionSet,
true);
3547 assert(RQIPtr->Result == RQITy::Reachable::No &&
"Already reachable?");
3549 assert(!QueryCache.count(RQIPtr));
3550 QueryVector.push_back(RQIPtr);
3551 QueryCache.insert(RQIPtr);
3554 if (
Result == RQITy::Reachable::No && IsTemporaryRQI)
3555 A.registerForUpdate(*
this);
3556 return Result == RQITy::Reachable::Yes;
3559 const std::string getAsStr(Attributor *
A)
const override {
3561 return "#queries(" + std::to_string(QueryVector.size()) +
")";
3564 bool checkQueryCache(Attributor &
A, RQITy &StackRQI,
3565 typename RQITy::Reachable &
Result) {
3566 if (!this->getState().isValidState()) {
3567 Result = RQITy::Reachable::Yes;
3573 if (StackRQI.ExclusionSet) {
3574 RQITy PlainRQI(StackRQI.From, StackRQI.To);
3575 auto It = QueryCache.find(&PlainRQI);
3576 if (It != QueryCache.end() && (*It)->Result == RQITy::Reachable::No) {
3577 Result = RQITy::Reachable::No;
3582 auto It = QueryCache.find(&StackRQI);
3583 if (It != QueryCache.end()) {
3590 QueryCache.insert(&StackRQI);
3596 DenseSet<RQITy *> QueryCache;
3599struct AAIntraFnReachabilityFunction final
3600 :
public CachedReachabilityAA<AAIntraFnReachability, Instruction> {
3601 using Base = CachedReachabilityAA<AAIntraFnReachability, Instruction>;
3602 AAIntraFnReachabilityFunction(
const IRPosition &IRP, Attributor &
A)
3604 DT =
A.getInfoCache().getAnalysisResultForFunction<DominatorTreeAnalysis>(
3608 bool isAssumedReachable(
3609 Attributor &
A,
const Instruction &From,
const Instruction &To,
3611 auto *NonConstThis =
const_cast<AAIntraFnReachabilityFunction *
>(
this);
3615 RQITy StackRQI(
A, From, To, ExclusionSet,
false);
3617 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
3618 return NonConstThis->isReachableImpl(
A, StackRQI,
3620 return Result == RQITy::Reachable::Yes;
3627 A.getAAFor<AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3630 [&](
const auto &DeadEdge) {
3631 return LivenessAA->isEdgeDead(DeadEdge.first,
3635 return LivenessAA->isAssumedDead(BB);
3637 return ChangeStatus::UNCHANGED;
3641 return Base::updateImpl(
A);
3645 bool IsTemporaryRQI)
override {
3647 bool UsedExclusionSet =
false;
3652 while (IP && IP != &To) {
3653 if (ExclusionSet && IP != Origin && ExclusionSet->
count(IP)) {
3654 UsedExclusionSet =
true;
3662 const BasicBlock *FromBB = RQI.From->getParent();
3663 const BasicBlock *ToBB = RQI.To->getParent();
3665 "Not an intra-procedural query!");
3669 if (FromBB == ToBB &&
3670 WillReachInBlock(*RQI.From, *RQI.To, RQI.ExclusionSet))
3671 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3676 if (!WillReachInBlock(ToBB->
front(), *RQI.To, RQI.ExclusionSet))
3677 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3681 SmallPtrSet<const BasicBlock *, 16> ExclusionBlocks;
3682 if (RQI.ExclusionSet)
3683 for (
auto *
I : *RQI.ExclusionSet)
3684 if (
I->getFunction() == Fn)
3685 ExclusionBlocks.
insert(
I->getParent());
3688 if (ExclusionBlocks.
count(FromBB) &&
3691 return rememberResult(
A, RQITy::Reachable::No, RQI,
true, IsTemporaryRQI);
3694 A.getAAFor<AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3695 if (LivenessAA && LivenessAA->isAssumedDead(ToBB)) {
3696 DeadBlocks.insert(ToBB);
3697 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3701 SmallPtrSet<const BasicBlock *, 16> Visited;
3705 DenseSet<std::pair<const BasicBlock *, const BasicBlock *>> LocalDeadEdges;
3706 while (!Worklist.
empty()) {
3708 if (!Visited.
insert(BB).second)
3710 for (
const BasicBlock *SuccBB :
successors(BB)) {
3711 if (LivenessAA && LivenessAA->isEdgeDead(BB, SuccBB)) {
3712 LocalDeadEdges.
insert({BB, SuccBB});
3717 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3720 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3723 if (ExclusionBlocks.
count(SuccBB)) {
3724 UsedExclusionSet =
true;
3731 DeadEdges.insert_range(LocalDeadEdges);
3732 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3737 void trackStatistics()
const override {}
3742 DenseSet<const BasicBlock *> DeadBlocks;
3746 DenseSet<std::pair<const BasicBlock *, const BasicBlock *>> DeadEdges;
3749 const DominatorTree *DT =
nullptr;
3757 bool IgnoreSubsumingPositions) {
3758 assert(ImpliedAttributeKind == Attribute::NoAlias &&
3759 "Unexpected attribute kind");
3765 IgnoreSubsumingPositions =
true;
3776 if (
A.hasAttr(IRP, {Attribute::ByVal, Attribute::NoAlias},
3777 IgnoreSubsumingPositions, Attribute::NoAlias))
3787 "Noalias is a pointer attribute");
3790 const std::string getAsStr(
Attributor *
A)
const override {
3791 return getAssumed() ?
"noalias" :
"may-alias";
3796struct AANoAliasFloating final : AANoAliasImpl {
3797 AANoAliasFloating(
const IRPosition &IRP, Attributor &
A)
3798 : AANoAliasImpl(IRP,
A) {}
3803 return indicatePessimisticFixpoint();
3807 void trackStatistics()
const override {
3813struct AANoAliasArgument final
3814 : AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl> {
3815 using Base = AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl>;
3816 AANoAliasArgument(
const IRPosition &IRP, Attributor &
A) :
Base(IRP,
A) {}
3829 DepClassTy::OPTIONAL, IsKnownNoSycn))
3830 return Base::updateImpl(
A);
3835 return Base::updateImpl(
A);
3839 bool UsedAssumedInformation =
false;
3840 if (
A.checkForAllCallSites(
3841 [](AbstractCallSite ACS) { return !ACS.isCallbackCall(); }, *
this,
3842 true, UsedAssumedInformation))
3843 return Base::updateImpl(
A);
3851 return indicatePessimisticFixpoint();
3858struct AANoAliasCallSiteArgument final : AANoAliasImpl {
3859 AANoAliasCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
3860 : AANoAliasImpl(IRP,
A) {}
3864 bool mayAliasWithArgument(Attributor &
A, AAResults *&AAR,
3865 const AAMemoryBehavior &MemBehaviorAA,
3866 const CallBase &CB,
unsigned OtherArgNo) {
3868 if (this->getCalleeArgNo() == (
int)OtherArgNo)
3876 auto *CBArgMemBehaviorAA =
A.getAAFor<AAMemoryBehavior>(
3880 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadNone()) {
3881 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3888 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadOnly() &&
3890 A.recordDependence(MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3891 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3897 AAR =
A.getInfoCache().getAnalysisResultForFunction<AAManager>(
3901 bool IsAliasing = !AAR || !AAR->
isNoAlias(&getAssociatedValue(), ArgOp);
3903 "callsite arguments: "
3904 << getAssociatedValue() <<
" " << *ArgOp <<
" => "
3905 << (IsAliasing ?
"" :
"no-") <<
"alias \n");
3910 bool isKnownNoAliasDueToNoAliasPreservation(
3911 Attributor &
A, AAResults *&AAR,
const AAMemoryBehavior &MemBehaviorAA) {
3924 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
3940 bool IsKnownNoCapture;
3943 DepClassTy::OPTIONAL, IsKnownNoCapture))
3949 A, *UserI, *getCtxI(), *
this,
nullptr,
3950 [ScopeFn](
const Function &Fn) {
return &Fn != ScopeFn; }))
3965 LLVM_DEBUG(
dbgs() <<
"[AANoAliasCSArg] Unknown user: " << *UserI <<
"\n");
3969 bool IsKnownNoCapture;
3970 const AANoCapture *NoCaptureAA =
nullptr;
3972 A,
this, VIRP, DepClassTy::NONE, IsKnownNoCapture,
false, &NoCaptureAA);
3973 if (!IsAssumedNoCapture &&
3975 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue())) {
3977 dbgs() <<
"[AANoAliasCSArg] " << getAssociatedValue()
3978 <<
" cannot be noalias as it is potentially captured\n");
3983 A.recordDependence(*NoCaptureAA, *
this, DepClassTy::OPTIONAL);
3989 for (
unsigned OtherArgNo = 0; OtherArgNo < CB.
arg_size(); OtherArgNo++)
3990 if (mayAliasWithArgument(
A, AAR, MemBehaviorAA, CB, OtherArgNo))
4000 auto *MemBehaviorAA =
4001 A.getAAFor<AAMemoryBehavior>(*
this, getIRPosition(), DepClassTy::NONE);
4003 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
4004 return ChangeStatus::UNCHANGED;
4007 bool IsKnownNoAlias;
4010 A,
this, VIRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
4012 <<
" is not no-alias at the definition\n");
4013 return indicatePessimisticFixpoint();
4016 AAResults *AAR =
nullptr;
4017 if (MemBehaviorAA &&
4018 isKnownNoAliasDueToNoAliasPreservation(
A, AAR, *MemBehaviorAA)) {
4020 dbgs() <<
"[AANoAlias] No-Alias deduced via no-alias preservation\n");
4021 return ChangeStatus::UNCHANGED;
4024 return indicatePessimisticFixpoint();
4032struct AANoAliasReturned final : AANoAliasImpl {
4033 AANoAliasReturned(
const IRPosition &IRP, Attributor &
A)
4034 : AANoAliasImpl(IRP,
A) {}
4039 auto CheckReturnValue = [&](
Value &RV) ->
bool {
4050 bool IsKnownNoAlias;
4052 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoAlias))
4055 bool IsKnownNoCapture;
4056 const AANoCapture *NoCaptureAA =
nullptr;
4058 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
4060 return IsAssumedNoCapture ||
4064 if (!
A.checkForAllReturnedValues(CheckReturnValue, *
this))
4065 return indicatePessimisticFixpoint();
4067 return ChangeStatus::UNCHANGED;
4075struct AANoAliasCallSiteReturned final
4076 : AACalleeToCallSite<AANoAlias, AANoAliasImpl> {
4077 AANoAliasCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
4078 : AACalleeToCallSite<AANoAlias, AANoAliasImpl>(IRP,
A) {}
4088struct AAIsDeadValueImpl :
public AAIsDead {
4089 AAIsDeadValueImpl(
const IRPosition &IRP, Attributor &
A) : AAIsDead(IRP,
A) {}
4092 bool isAssumedDead()
const override {
return isAssumed(IS_DEAD); }
4095 bool isKnownDead()
const override {
return isKnown(IS_DEAD); }
4098 bool isAssumedDead(
const BasicBlock *BB)
const override {
return false; }
4101 bool isKnownDead(
const BasicBlock *BB)
const override {
return false; }
4104 bool isAssumedDead(
const Instruction *
I)
const override {
4105 return I == getCtxI() && isAssumedDead();
4109 bool isKnownDead(
const Instruction *
I)
const override {
4110 return isAssumedDead(
I) && isKnownDead();
4114 const std::string getAsStr(Attributor *
A)
const override {
4115 return isAssumedDead() ?
"assumed-dead" :
"assumed-live";
4119 bool areAllUsesAssumedDead(Attributor &
A,
Value &V) {
4121 if (
V.getType()->isVoidTy() ||
V.use_empty())
4127 if (!
A.isRunOn(*
I->getFunction()))
4129 bool UsedAssumedInformation =
false;
4130 std::optional<Constant *>
C =
4131 A.getAssumedConstant(V, *
this, UsedAssumedInformation);
4136 auto UsePred = [&](
const Use &
U,
bool &Follow) {
return false; };
4141 return A.checkForAllUses(UsePred, *
this, V,
false,
4142 DepClassTy::REQUIRED,
4147 bool isAssumedSideEffectFree(Attributor &
A, Instruction *
I) {
4157 bool IsKnownNoUnwind;
4159 A,
this, CallIRP, DepClassTy::OPTIONAL, IsKnownNoUnwind))
4167struct AAIsDeadFloating :
public AAIsDeadValueImpl {
4168 AAIsDeadFloating(
const IRPosition &IRP, Attributor &
A)
4169 : AAIsDeadValueImpl(IRP,
A) {}
4173 AAIsDeadValueImpl::initialize(
A);
4176 indicatePessimisticFixpoint();
4181 if (!isAssumedSideEffectFree(
A,
I)) {
4183 indicatePessimisticFixpoint();
4185 removeAssumedBits(HAS_NO_EFFECT);
4189 bool isDeadFence(Attributor &
A, FenceInst &FI) {
4190 const auto *ExecDomainAA =
A.lookupAAFor<AAExecutionDomain>(
4192 if (!ExecDomainAA || !ExecDomainAA->isNoOpFence(FI))
4194 A.recordDependence(*ExecDomainAA, *
this, DepClassTy::OPTIONAL);
4198 bool isDeadStore(Attributor &
A, StoreInst &SI,
4199 SmallSetVector<Instruction *, 8> *AssumeOnlyInst =
nullptr) {
4201 if (
SI.isVolatile())
4207 bool UsedAssumedInformation =
false;
4208 if (!AssumeOnlyInst) {
4209 PotentialCopies.clear();
4211 UsedAssumedInformation)) {
4214 <<
"[AAIsDead] Could not determine potential copies of store!\n");
4218 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Store has " << PotentialCopies.size()
4219 <<
" potential copies.\n");
4221 InformationCache &InfoCache =
A.getInfoCache();
4224 UsedAssumedInformation))
4228 auto &UserI = cast<Instruction>(*U.getUser());
4229 if (InfoCache.isOnlyUsedByAssume(UserI)) {
4231 AssumeOnlyInst->insert(&UserI);
4234 return A.isAssumedDead(U,
this,
nullptr, UsedAssumedInformation);
4240 <<
" is assumed live!\n");
4246 const std::string getAsStr(Attributor *
A)
const override {
4250 return "assumed-dead-store";
4253 return "assumed-dead-fence";
4254 return AAIsDeadValueImpl::getAsStr(
A);
4261 if (!isDeadStore(
A, *SI))
4262 return indicatePessimisticFixpoint();
4264 if (!isDeadFence(
A, *FI))
4265 return indicatePessimisticFixpoint();
4267 if (!isAssumedSideEffectFree(
A,
I))
4268 return indicatePessimisticFixpoint();
4269 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4270 return indicatePessimisticFixpoint();
4275 bool isRemovableStore()
const override {
4276 return isAssumed(IS_REMOVABLE) &&
isa<StoreInst>(&getAssociatedValue());
4281 Value &
V = getAssociatedValue();
4288 SmallSetVector<Instruction *, 8> AssumeOnlyInst;
4289 bool IsDead = isDeadStore(
A, *SI, &AssumeOnlyInst);
4292 A.deleteAfterManifest(*
I);
4293 for (
size_t i = 0; i < AssumeOnlyInst.
size(); ++i) {
4295 for (
auto *Usr : AOI->
users())
4297 A.deleteAfterManifest(*AOI);
4303 A.deleteAfterManifest(*FI);
4307 A.deleteAfterManifest(*
I);
4315 void trackStatistics()
const override {
4321 SmallSetVector<Value *, 4> PotentialCopies;
4324struct AAIsDeadArgument :
public AAIsDeadFloating {
4325 AAIsDeadArgument(
const IRPosition &IRP, Attributor &
A)
4326 : AAIsDeadFloating(IRP,
A) {}
4330 Argument &Arg = *getAssociatedArgument();
4331 if (
A.isValidFunctionSignatureRewrite(Arg, {}))
4332 if (
A.registerFunctionSignatureRewrite(
4336 return ChangeStatus::CHANGED;
4338 return ChangeStatus::UNCHANGED;
4345struct AAIsDeadCallSiteArgument :
public AAIsDeadValueImpl {
4346 AAIsDeadCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
4347 : AAIsDeadValueImpl(IRP,
A) {}
4351 AAIsDeadValueImpl::initialize(
A);
4353 indicatePessimisticFixpoint();
4362 Argument *Arg = getAssociatedArgument();
4364 return indicatePessimisticFixpoint();
4366 auto *ArgAA =
A.getAAFor<AAIsDead>(*
this, ArgPos, DepClassTy::REQUIRED);
4368 return indicatePessimisticFixpoint();
4377 "Expected undef values to be filtered out!");
4379 if (
A.changeUseAfterManifest(U, UV))
4380 return ChangeStatus::CHANGED;
4381 return ChangeStatus::UNCHANGED;
4388struct AAIsDeadCallSiteReturned :
public AAIsDeadFloating {
4389 AAIsDeadCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
4390 : AAIsDeadFloating(IRP,
A) {}
4393 bool isAssumedDead()
const override {
4394 return AAIsDeadFloating::isAssumedDead() && IsAssumedSideEffectFree;
4399 AAIsDeadFloating::initialize(
A);
4401 indicatePessimisticFixpoint();
4406 IsAssumedSideEffectFree = isAssumedSideEffectFree(
A, getCtxI());
4412 if (IsAssumedSideEffectFree && !isAssumedSideEffectFree(
A, getCtxI())) {
4413 IsAssumedSideEffectFree =
false;
4414 Changed = ChangeStatus::CHANGED;
4416 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4417 return indicatePessimisticFixpoint();
4422 void trackStatistics()
const override {
4423 if (IsAssumedSideEffectFree)
4430 const std::string getAsStr(Attributor *
A)
const override {
4431 return isAssumedDead()
4433 : (getAssumed() ?
"assumed-dead-users" :
"assumed-live");
4437 bool IsAssumedSideEffectFree =
true;
4440struct AAIsDeadReturned :
public AAIsDeadValueImpl {
4441 AAIsDeadReturned(
const IRPosition &IRP, Attributor &
A)
4442 : AAIsDeadValueImpl(IRP,
A) {}
4447 bool UsedAssumedInformation =
false;
4448 A.checkForAllInstructions([](Instruction &) {
return true; }, *
this,
4449 {Instruction::Ret}, UsedAssumedInformation);
4451 auto PredForCallSite = [&](AbstractCallSite ACS) {
4452 if (ACS.isCallbackCall() || !ACS.getInstruction())
4454 return areAllUsesAssumedDead(
A, *ACS.getInstruction());
4457 if (!
A.checkForAllCallSites(PredForCallSite, *
this,
true,
4458 UsedAssumedInformation))
4459 return indicatePessimisticFixpoint();
4461 return ChangeStatus::UNCHANGED;
4467 bool AnyChange =
false;
4468 UndefValue &UV = *
UndefValue::get(getAssociatedFunction()->getReturnType());
4475 bool UsedAssumedInformation =
false;
4476 A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
4477 UsedAssumedInformation);
4478 return AnyChange ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
4485struct AAIsDeadFunction :
public AAIsDead {
4486 AAIsDeadFunction(
const IRPosition &IRP, Attributor &
A) : AAIsDead(IRP,
A) {}
4491 assert(
F &&
"Did expect an anchor function");
4492 if (!isAssumedDeadInternalFunction(
A)) {
4493 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4494 assumeLive(
A,
F->getEntryBlock());
4498 bool isAssumedDeadInternalFunction(Attributor &
A) {
4499 if (!getAnchorScope()->hasLocalLinkage())
4501 bool UsedAssumedInformation =
false;
4502 return A.checkForAllCallSites([](AbstractCallSite) {
return false; }, *
this,
4503 true, UsedAssumedInformation);
4507 const std::string getAsStr(Attributor *
A)
const override {
4508 return "Live[#BB " + std::to_string(AssumedLiveBlocks.size()) +
"/" +
4509 std::to_string(getAnchorScope()->
size()) +
"][#TBEP " +
4510 std::to_string(ToBeExploredFrom.size()) +
"][#KDE " +
4511 std::to_string(KnownDeadEnds.size()) +
"]";
4516 assert(getState().isValidState() &&
4517 "Attempted to manifest an invalid state!");
4522 if (AssumedLiveBlocks.empty()) {
4523 A.deleteAfterManifest(
F);
4524 return ChangeStatus::CHANGED;
4530 bool Invoke2CallAllowed = !mayCatchAsynchronousExceptions(
F);
4532 KnownDeadEnds.set_union(ToBeExploredFrom);
4533 for (
const Instruction *DeadEndI : KnownDeadEnds) {
4537 bool IsKnownNoReturn;
4545 A.registerInvokeWithDeadSuccessor(
const_cast<InvokeInst &
>(*
II));
4547 A.changeToUnreachableAfterManifest(
4548 const_cast<Instruction *
>(DeadEndI->getNextNode()));
4549 HasChanged = ChangeStatus::CHANGED;
4552 STATS_DECL(AAIsDead, BasicBlock,
"Number of dead basic blocks deleted.");
4553 for (BasicBlock &BB :
F)
4554 if (!AssumedLiveBlocks.count(&BB)) {
4555 A.deleteAfterManifest(BB);
4557 HasChanged = ChangeStatus::CHANGED;
4566 bool isEdgeDead(
const BasicBlock *From,
const BasicBlock *To)
const override {
4569 "Used AAIsDead of the wrong function");
4570 return isValidState() && !AssumedLiveEdges.count(std::make_pair(From, To));
4574 void trackStatistics()
const override {}
4577 bool isAssumedDead()
const override {
return false; }
4580 bool isKnownDead()
const override {
return false; }
4583 bool isAssumedDead(
const BasicBlock *BB)
const override {
4585 "BB must be in the same anchor scope function.");
4589 return !AssumedLiveBlocks.count(BB);
4593 bool isKnownDead(
const BasicBlock *BB)
const override {
4594 return getKnown() && isAssumedDead(BB);
4598 bool isAssumedDead(
const Instruction *
I)
const override {
4599 assert(
I->getParent()->getParent() == getAnchorScope() &&
4600 "Instruction must be in the same anchor scope function.");
4607 if (!AssumedLiveBlocks.count(
I->getParent()))
4613 if (KnownDeadEnds.count(PrevI) || ToBeExploredFrom.count(PrevI))
4621 bool isKnownDead(
const Instruction *
I)
const override {
4622 return getKnown() && isAssumedDead(
I);
4627 bool assumeLive(Attributor &
A,
const BasicBlock &BB) {
4628 if (!AssumedLiveBlocks.insert(&BB).second)
4635 for (
const Instruction &
I : BB)
4638 if (
F->hasLocalLinkage())
4639 A.markLiveInternalFunction(*
F);
4645 SmallSetVector<const Instruction *, 8> ToBeExploredFrom;
4648 SmallSetVector<const Instruction *, 8> KnownDeadEnds;
4651 DenseSet<std::pair<const BasicBlock *, const BasicBlock *>> AssumedLiveEdges;
4654 DenseSet<const BasicBlock *> AssumedLiveBlocks;
4658identifyAliveSuccessors(Attributor &
A,
const CallBase &CB,
4659 AbstractAttribute &AA,
4660 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4663 bool IsKnownNoReturn;
4666 return !IsKnownNoReturn;
4675identifyAliveSuccessors(Attributor &
A,
const InvokeInst &
II,
4676 AbstractAttribute &AA,
4677 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4678 bool UsedAssumedInformation =
4684 if (AAIsDeadFunction::mayCatchAsynchronousExceptions(*
II.getFunction())) {
4685 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4689 bool IsKnownNoUnwind;
4692 UsedAssumedInformation |= !IsKnownNoUnwind;
4694 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4697 return UsedAssumedInformation;
4701identifyAliveSuccessors(Attributor &
A,
const BranchInst &BI,
4702 AbstractAttribute &AA,
4703 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4704 bool UsedAssumedInformation =
false;
4708 std::optional<Constant *>
C =
4709 A.getAssumedConstant(*BI.
getCondition(), AA, UsedAssumedInformation);
4719 UsedAssumedInformation =
false;
4722 return UsedAssumedInformation;
4726identifyAliveSuccessors(Attributor &
A,
const SwitchInst &SI,
4727 AbstractAttribute &AA,
4728 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4729 bool UsedAssumedInformation =
false;
4733 UsedAssumedInformation)) {
4735 for (
const BasicBlock *SuccBB :
successors(
SI.getParent()))
4740 if (Values.
empty() ||
4741 (Values.
size() == 1 &&
4744 return UsedAssumedInformation;
4747 Type &Ty = *
SI.getCondition()->getType();
4748 SmallPtrSet<ConstantInt *, 8>
Constants;
4749 auto CheckForConstantInt = [&](
Value *
V) {
4757 if (!
all_of(Values, [&](AA::ValueAndContext &VAC) {
4758 return CheckForConstantInt(VAC.
getValue());
4760 for (
const BasicBlock *SuccBB :
successors(
SI.getParent()))
4762 return UsedAssumedInformation;
4765 unsigned MatchedCases = 0;
4766 for (
const auto &CaseIt :
SI.cases()) {
4767 if (
Constants.count(CaseIt.getCaseValue())) {
4769 AliveSuccessors.
push_back(&CaseIt.getCaseSuccessor()->front());
4776 AliveSuccessors.
push_back(&
SI.getDefaultDest()->front());
4777 return UsedAssumedInformation;
4783 if (AssumedLiveBlocks.empty()) {
4784 if (isAssumedDeadInternalFunction(
A))
4788 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4789 assumeLive(
A,
F->getEntryBlock());
4793 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Live [" << AssumedLiveBlocks.size() <<
"/"
4794 << getAnchorScope()->
size() <<
"] BBs and "
4795 << ToBeExploredFrom.size() <<
" exploration points and "
4796 << KnownDeadEnds.size() <<
" known dead ends\n");
4801 ToBeExploredFrom.end());
4802 decltype(ToBeExploredFrom) NewToBeExploredFrom;
4805 while (!Worklist.
empty()) {
4812 I =
I->getNextNode();
4814 AliveSuccessors.
clear();
4816 bool UsedAssumedInformation =
false;
4817 switch (
I->getOpcode()) {
4821 "Expected non-terminators to be handled already!");
4822 for (
const BasicBlock *SuccBB :
successors(
I->getParent()))
4825 case Instruction::Call:
4827 *
this, AliveSuccessors);
4829 case Instruction::Invoke:
4831 *
this, AliveSuccessors);
4833 case Instruction::UncondBr:
4834 case Instruction::CondBr:
4836 *
this, AliveSuccessors);
4838 case Instruction::Switch:
4840 *
this, AliveSuccessors);
4844 if (UsedAssumedInformation) {
4845 NewToBeExploredFrom.insert(
I);
4846 }
else if (AliveSuccessors.
empty() ||
4847 (
I->isTerminator() &&
4848 AliveSuccessors.
size() <
I->getNumSuccessors())) {
4849 if (KnownDeadEnds.insert(
I))
4854 << AliveSuccessors.
size() <<
" UsedAssumedInformation: "
4855 << UsedAssumedInformation <<
"\n");
4857 for (
const Instruction *AliveSuccessor : AliveSuccessors) {
4858 if (!
I->isTerminator()) {
4859 assert(AliveSuccessors.size() == 1 &&
4860 "Non-terminator expected to have a single successor!");
4864 auto Edge = std::make_pair(
I->getParent(), AliveSuccessor->getParent());
4865 if (AssumedLiveEdges.insert(
Edge).second)
4867 if (assumeLive(
A, *AliveSuccessor->getParent()))
4874 if (NewToBeExploredFrom.size() != ToBeExploredFrom.size() ||
4875 llvm::any_of(NewToBeExploredFrom, [&](
const Instruction *
I) {
4876 return !ToBeExploredFrom.count(I);
4879 ToBeExploredFrom = std::move(NewToBeExploredFrom);
4888 if (ToBeExploredFrom.empty() &&
4889 getAnchorScope()->
size() == AssumedLiveBlocks.size() &&
4890 llvm::all_of(KnownDeadEnds, [](
const Instruction *DeadEndI) {
4891 return DeadEndI->isTerminator() && DeadEndI->getNumSuccessors() == 0;
4893 return indicatePessimisticFixpoint();
4898struct AAIsDeadCallSite final : AAIsDeadFunction {
4899 AAIsDeadCallSite(
const IRPosition &IRP, Attributor &
A)
4900 : AAIsDeadFunction(IRP,
A) {}
4909 "supported for call sites yet!");
4914 return indicatePessimisticFixpoint();
4918 void trackStatistics()
const override {}
4925struct AADereferenceableImpl : AADereferenceable {
4926 AADereferenceableImpl(
const IRPosition &IRP, Attributor &
A)
4927 : AADereferenceable(IRP,
A) {}
4928 using StateType = DerefState;
4932 Value &
V = *getAssociatedValue().stripPointerCasts();
4934 A.getAttrs(getIRPosition(),
4935 {Attribute::Dereferenceable, Attribute::DereferenceableOrNull},
4938 takeKnownDerefBytesMaximum(Attr.getValueAsInt());
4941 bool IsKnownNonNull;
4943 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNonNull);
4945 bool CanBeNull, CanBeFreed;
4946 takeKnownDerefBytesMaximum(
V.getPointerDereferenceableBytes(
4947 A.getDataLayout(), CanBeNull, CanBeFreed));
4949 if (Instruction *CtxI = getCtxI())
4950 followUsesInMBEC(*
this,
A, getState(), *CtxI);
4955 StateType &getState()
override {
return *
this; }
4956 const StateType &getState()
const override {
return *
this; }
4960 void addAccessedBytesForUse(Attributor &
A,
const Use *U,
const Instruction *
I,
4961 DerefState &State) {
4962 const Value *UseV =
U->get();
4967 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
I->isVolatile())
4972 Loc->Ptr,
Offset,
A.getDataLayout(),
true);
4973 if (
Base &&
Base == &getAssociatedValue())
4974 State.addAccessedBytes(
Offset, Loc->Size.getValue());
4978 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
4979 AADereferenceable::StateType &State) {
4980 bool IsNonNull =
false;
4981 bool TrackUse =
false;
4982 int64_t DerefBytes = getKnownNonNullAndDerefBytesForUse(
4983 A, *
this, getAssociatedValue(), U,
I, IsNonNull, TrackUse);
4984 LLVM_DEBUG(
dbgs() <<
"[AADereferenceable] Deref bytes: " << DerefBytes
4985 <<
" for instruction " << *
I <<
"\n");
4987 addAccessedBytesForUse(
A, U,
I, State);
4988 State.takeKnownDerefBytesMaximum(DerefBytes);
4995 bool IsKnownNonNull;
4997 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
4998 if (IsAssumedNonNull &&
4999 A.hasAttr(getIRPosition(), Attribute::DereferenceableOrNull)) {
5000 A.removeAttrs(getIRPosition(), {Attribute::DereferenceableOrNull});
5001 return ChangeStatus::CHANGED;
5006 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
5007 SmallVectorImpl<Attribute> &Attrs)
const override {
5009 bool IsKnownNonNull;
5011 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5012 if (IsAssumedNonNull)
5013 Attrs.emplace_back(Attribute::getWithDereferenceableBytes(
5014 Ctx, getAssumedDereferenceableBytes()));
5016 Attrs.emplace_back(Attribute::getWithDereferenceableOrNullBytes(
5017 Ctx, getAssumedDereferenceableBytes()));
5021 const std::string getAsStr(Attributor *
A)
const override {
5022 if (!getAssumedDereferenceableBytes())
5023 return "unknown-dereferenceable";
5024 bool IsKnownNonNull;
5025 bool IsAssumedNonNull =
false;
5028 *
A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5029 return std::string(
"dereferenceable") +
5030 (IsAssumedNonNull ?
"" :
"_or_null") +
5031 (isAssumedGlobal() ?
"_globally" :
"") +
"<" +
5032 std::to_string(getKnownDereferenceableBytes()) +
"-" +
5033 std::to_string(getAssumedDereferenceableBytes()) +
">" +
5034 (!
A ?
" [non-null is unknown]" :
"");
5039struct AADereferenceableFloating : AADereferenceableImpl {
5040 AADereferenceableFloating(
const IRPosition &IRP, Attributor &
A)
5041 : AADereferenceableImpl(IRP,
A) {}
5046 bool UsedAssumedInformation =
false;
5048 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5050 Values.
push_back({getAssociatedValue(), getCtxI()});
5053 Stripped = Values.
size() != 1 ||
5054 Values.
front().getValue() != &getAssociatedValue();
5057 const DataLayout &
DL =
A.getDataLayout();
5060 auto VisitValueCB = [&](
const Value &
V) ->
bool {
5062 DL.getIndexSizeInBits(
V.getType()->getPointerAddressSpace());
5063 APInt
Offset(IdxWidth, 0);
5068 const auto *AA =
A.getAAFor<AADereferenceable>(
5070 int64_t DerefBytes = 0;
5071 if (!AA || (!Stripped &&
this == AA)) {
5074 bool CanBeNull, CanBeFreed;
5076 Base->getPointerDereferenceableBytes(
DL, CanBeNull, CanBeFreed);
5077 T.GlobalState.indicatePessimisticFixpoint();
5080 DerefBytes =
DS.DerefBytesState.getAssumed();
5081 T.GlobalState &=
DS.GlobalState;
5087 int64_t OffsetSExt =
Offset.getSExtValue();
5091 T.takeAssumedDerefBytesMinimum(
5092 std::max(int64_t(0), DerefBytes - OffsetSExt));
5097 T.takeKnownDerefBytesMaximum(
5098 std::max(int64_t(0), DerefBytes - OffsetSExt));
5099 T.indicatePessimisticFixpoint();
5100 }
else if (OffsetSExt > 0) {
5106 T.indicatePessimisticFixpoint();
5110 return T.isValidState();
5113 for (
const auto &VAC : Values)
5114 if (!VisitValueCB(*VAC.
getValue()))
5115 return indicatePessimisticFixpoint();
5121 void trackStatistics()
const override {
5127struct AADereferenceableReturned final
5128 : AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl> {
5130 AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl>;
5131 AADereferenceableReturned(
const IRPosition &IRP, Attributor &
A)
5135 void trackStatistics()
const override {
5141struct AADereferenceableArgument final
5142 : AAArgumentFromCallSiteArguments<AADereferenceable,
5143 AADereferenceableImpl> {
5145 AAArgumentFromCallSiteArguments<AADereferenceable, AADereferenceableImpl>;
5146 AADereferenceableArgument(
const IRPosition &IRP, Attributor &
A)
5150 void trackStatistics()
const override {
5156struct AADereferenceableCallSiteArgument final : AADereferenceableFloating {
5157 AADereferenceableCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
5158 : AADereferenceableFloating(IRP,
A) {}
5161 void trackStatistics()
const override {
5167struct AADereferenceableCallSiteReturned final
5168 : AACalleeToCallSite<AADereferenceable, AADereferenceableImpl> {
5169 using Base = AACalleeToCallSite<AADereferenceable, AADereferenceableImpl>;
5170 AADereferenceableCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
5174 void trackStatistics()
const override {
5184static unsigned getKnownAlignForUse(Attributor &
A, AAAlign &QueryingAA,
5185 Value &AssociatedValue,
const Use *U,
5186 const Instruction *
I,
bool &TrackUse) {
5195 if (
GEP->hasAllConstantIndices())
5200 switch (
II->getIntrinsicID()) {
5201 case Intrinsic::ptrmask: {
5203 const auto *ConstVals =
A.getAAFor<AAPotentialConstantValues>(
5205 const auto *AlignAA =
A.getAAFor<AAAlign>(
5207 if (ConstVals && ConstVals->isValidState() && ConstVals->isAtFixpoint()) {
5208 unsigned ShiftValue = std::min(ConstVals->getAssumedMinTrailingZeros(),
5210 Align ConstAlign(UINT64_C(1) << ShiftValue);
5211 if (ConstAlign >= AlignAA->getKnownAlign())
5212 return Align(1).value();
5215 return AlignAA->getKnownAlign().
value();
5218 case Intrinsic::amdgcn_make_buffer_rsrc: {
5219 const auto *AlignAA =
A.getAAFor<AAAlign>(
5222 return AlignAA->getKnownAlign().
value();
5240 MA = MaybeAlign(AlignAA->getKnownAlign());
5243 const DataLayout &
DL =
A.getDataLayout();
5244 const Value *UseV =
U->get();
5246 if (
SI->getPointerOperand() == UseV)
5247 MA =
SI->getAlign();
5249 if (LI->getPointerOperand() == UseV)
5250 MA = LI->getAlign();
5252 if (AI->getPointerOperand() == UseV)
5253 MA = AI->getAlign();
5255 if (AI->getPointerOperand() == UseV)
5256 MA = AI->getAlign();
5262 unsigned Alignment = MA->value();
5266 if (
Base == &AssociatedValue) {
5271 uint32_t
gcd = std::gcd(uint32_t(
abs((int32_t)
Offset)), Alignment);
5279struct AAAlignImpl : AAAlign {
5280 AAAlignImpl(
const IRPosition &IRP, Attributor &
A) : AAAlign(IRP,
A) {}
5285 A.getAttrs(getIRPosition(), {Attribute::Alignment},
Attrs);
5287 takeKnownMaximum(Attr.getValueAsInt());
5289 Value &
V = *getAssociatedValue().stripPointerCasts();
5290 takeKnownMaximum(
V.getPointerAlignment(
A.getDataLayout()).value());
5292 if (Instruction *CtxI = getCtxI())
5293 followUsesInMBEC(*
this,
A, getState(), *CtxI);
5301 Value &AssociatedValue = getAssociatedValue();
5303 return ChangeStatus::UNCHANGED;
5305 for (
const Use &U : AssociatedValue.
uses()) {
5307 if (
SI->getPointerOperand() == &AssociatedValue)
5308 if (
SI->getAlign() < getAssumedAlign()) {
5310 "Number of times alignment added to a store");
5311 SI->setAlignment(getAssumedAlign());
5312 InstrChanged = ChangeStatus::CHANGED;
5315 if (LI->getPointerOperand() == &AssociatedValue)
5316 if (LI->getAlign() < getAssumedAlign()) {
5317 LI->setAlignment(getAssumedAlign());
5319 "Number of times alignment added to a load");
5320 InstrChanged = ChangeStatus::CHANGED;
5323 if (RMW->getPointerOperand() == &AssociatedValue) {
5324 if (RMW->getAlign() < getAssumedAlign()) {
5326 "Number of times alignment added to atomicrmw");
5328 RMW->setAlignment(getAssumedAlign());
5329 InstrChanged = ChangeStatus::CHANGED;
5333 if (CAS->getPointerOperand() == &AssociatedValue) {
5334 if (CAS->getAlign() < getAssumedAlign()) {
5336 "Number of times alignment added to cmpxchg");
5337 CAS->setAlignment(getAssumedAlign());
5338 InstrChanged = ChangeStatus::CHANGED;
5346 Align InheritAlign =
5347 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5348 if (InheritAlign >= getAssumedAlign())
5349 return InstrChanged;
5350 return Changed | InstrChanged;
5358 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
5359 SmallVectorImpl<Attribute> &Attrs)
const override {
5360 if (getAssumedAlign() > 1)
5362 Attribute::getWithAlignment(Ctx,
Align(getAssumedAlign())));
5366 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
5367 AAAlign::StateType &State) {
5368 bool TrackUse =
false;
5370 unsigned int KnownAlign =
5371 getKnownAlignForUse(
A, *
this, getAssociatedValue(), U,
I, TrackUse);
5372 State.takeKnownMaximum(KnownAlign);
5378 const std::string getAsStr(Attributor *
A)
const override {
5379 return "align<" + std::to_string(getKnownAlign().value()) +
"-" +
5380 std::to_string(getAssumedAlign().value()) +
">";
5385struct AAAlignFloating : AAAlignImpl {
5386 AAAlignFloating(
const IRPosition &IRP, Attributor &
A) : AAAlignImpl(IRP,
A) {}
5390 const DataLayout &
DL =
A.getDataLayout();
5393 bool UsedAssumedInformation =
false;
5395 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5397 Values.
push_back({getAssociatedValue(), getCtxI()});
5400 Stripped = Values.
size() != 1 ||
5401 Values.
front().getValue() != &getAssociatedValue();
5405 auto VisitValueCB = [&](
Value &
V) ->
bool {
5409 DepClassTy::REQUIRED);
5410 if (!AA || (!Stripped &&
this == AA)) {
5412 unsigned Alignment = 1;
5425 Alignment =
V.getPointerAlignment(
DL).value();
5428 T.takeKnownMaximum(Alignment);
5429 T.indicatePessimisticFixpoint();
5432 const AAAlign::StateType &
DS = AA->
getState();
5435 return T.isValidState();
5438 for (
const auto &VAC : Values) {
5439 if (!VisitValueCB(*VAC.
getValue()))
5440 return indicatePessimisticFixpoint();
5453struct AAAlignReturned final
5454 : AAReturnedFromReturnedValues<AAAlign, AAAlignImpl> {
5455 using Base = AAReturnedFromReturnedValues<AAAlign, AAAlignImpl>;
5456 AAAlignReturned(
const IRPosition &IRP, Attributor &
A) :
Base(IRP,
A) {}
5463struct AAAlignArgument final
5464 : AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl> {
5465 using Base = AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl>;
5466 AAAlignArgument(
const IRPosition &IRP, Attributor &
A) :
Base(IRP,
A) {}
5473 if (
A.getInfoCache().isInvolvedInMustTailCall(*getAssociatedArgument()))
5474 return ChangeStatus::UNCHANGED;
5475 return Base::manifest(
A);
5482struct AAAlignCallSiteArgument final : AAAlignFloating {
5483 AAAlignCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
5484 : AAAlignFloating(IRP,
A) {}
5491 if (Argument *Arg = getAssociatedArgument())
5492 if (
A.getInfoCache().isInvolvedInMustTailCall(*Arg))
5493 return ChangeStatus::UNCHANGED;
5495 Align InheritAlign =
5496 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5497 if (InheritAlign >= getAssumedAlign())
5498 Changed = ChangeStatus::UNCHANGED;
5505 if (Argument *Arg = getAssociatedArgument()) {
5508 const auto *ArgAlignAA =
A.getAAFor<AAAlign>(
5511 takeKnownMaximum(ArgAlignAA->getKnownAlign().value());
5521struct AAAlignCallSiteReturned final
5522 : AACalleeToCallSite<AAAlign, AAAlignImpl> {
5523 using Base = AACalleeToCallSite<AAAlign, AAAlignImpl>;
5524 AAAlignCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
5530 switch (
II->getIntrinsicID()) {
5531 case Intrinsic::ptrmask: {
5535 const auto *ConstVals =
A.getAAFor<AAPotentialConstantValues>(
5537 if (ConstVals && ConstVals->isValidState()) {
5538 unsigned ShiftValue =
5539 std::min(ConstVals->getAssumedMinTrailingZeros(),
5540 Value::MaxAlignmentExponent);
5541 Alignment =
Align(UINT64_C(1) << ShiftValue);
5545 const auto *AlignAA =
5547 DepClassTy::REQUIRED);
5549 Alignment = std::max(AlignAA->getAssumedAlign(), Alignment);
5556 std::min(this->getAssumedAlign(), Alignment).value());
5562 case Intrinsic::amdgcn_make_buffer_rsrc: {
5563 const auto *AlignAA =
5565 DepClassTy::REQUIRED);
5568 this->getState(), AlignAA->getAssumedAlign().
value());
5575 return Base::updateImpl(
A);
5584struct AANoReturnImpl :
public AANoReturn {
5585 AANoReturnImpl(
const IRPosition &IRP, Attributor &
A) : AANoReturn(IRP,
A) {}
5591 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
5596 const std::string getAsStr(Attributor *
A)
const override {
5597 return getAssumed() ?
"noreturn" :
"may-return";
5602 auto CheckForNoReturn = [](
Instruction &) {
return false; };
5603 bool UsedAssumedInformation =
false;
5604 if (!
A.checkForAllInstructions(CheckForNoReturn, *
this,
5605 {(unsigned)Instruction::Ret},
5606 UsedAssumedInformation))
5607 return indicatePessimisticFixpoint();
5608 return ChangeStatus::UNCHANGED;
5612struct AANoReturnFunction final : AANoReturnImpl {
5613 AANoReturnFunction(
const IRPosition &IRP, Attributor &
A)
5614 : AANoReturnImpl(IRP,
A) {}
5621struct AANoReturnCallSite final
5622 : AACalleeToCallSite<AANoReturn, AANoReturnImpl> {
5623 AANoReturnCallSite(
const IRPosition &IRP, Attributor &
A)
5624 : AACalleeToCallSite<AANoReturn, AANoReturnImpl>(IRP,
A) {}
5635struct AAInstanceInfoImpl :
public AAInstanceInfo {
5636 AAInstanceInfoImpl(
const IRPosition &IRP, Attributor &
A)
5637 : AAInstanceInfo(IRP,
A) {}
5641 Value &
V = getAssociatedValue();
5643 if (
C->isThreadDependent())
5644 indicatePessimisticFixpoint();
5646 indicateOptimisticFixpoint();
5652 indicateOptimisticFixpoint();
5657 A.getInfoCache().getAnalysisResultForFunction<CycleAnalysis>(
5660 indicatePessimisticFixpoint();
5670 Value &
V = getAssociatedValue();
5673 Scope =
I->getFunction();
5676 if (!
Scope->hasLocalLinkage())
5680 return indicateOptimisticFixpoint();
5682 bool IsKnownNoRecurse;
5688 auto UsePred = [&](
const Use &
U,
bool &Follow) {
5703 if (!Callee || !
Callee->hasLocalLinkage())
5707 const auto *ArgInstanceInfoAA =
A.getAAFor<AAInstanceInfo>(
5709 DepClassTy::OPTIONAL);
5710 if (!ArgInstanceInfoAA ||
5711 !ArgInstanceInfoAA->isAssumedUniqueForAnalysis())
5716 A, *CB, *Scope, *
this,
nullptr,
5717 [Scope](
const Function &Fn) {
return &Fn !=
Scope; }))
5724 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
5726 auto *Ptr =
SI->getPointerOperand()->stripPointerCasts();
5734 if (!
A.checkForAllUses(UsePred, *
this, V,
true,
5735 DepClassTy::OPTIONAL,
5736 true, EquivalentUseCB))
5737 return indicatePessimisticFixpoint();
5743 const std::string getAsStr(Attributor *
A)
const override {
5744 return isAssumedUniqueForAnalysis() ?
"<unique [fAa]>" :
"<unknown>";
5748 void trackStatistics()
const override {}
5752struct AAInstanceInfoFloating : AAInstanceInfoImpl {
5753 AAInstanceInfoFloating(
const IRPosition &IRP, Attributor &
A)
5754 : AAInstanceInfoImpl(IRP,
A) {}
5758struct AAInstanceInfoArgument final : AAInstanceInfoFloating {
5759 AAInstanceInfoArgument(
const IRPosition &IRP, Attributor &
A)
5760 : AAInstanceInfoFloating(IRP,
A) {}
5764struct AAInstanceInfoCallSiteArgument final : AAInstanceInfoImpl {
5765 AAInstanceInfoCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
5766 : AAInstanceInfoImpl(IRP,
A) {}
5774 Argument *Arg = getAssociatedArgument();
5776 return indicatePessimisticFixpoint();
5779 A.getAAFor<AAInstanceInfo>(*
this, ArgPos, DepClassTy::REQUIRED);
5781 return indicatePessimisticFixpoint();
5787struct AAInstanceInfoReturned final : AAInstanceInfoImpl {
5788 AAInstanceInfoReturned(
const IRPosition &IRP, Attributor &
A)
5789 : AAInstanceInfoImpl(IRP,
A) {
5805struct AAInstanceInfoCallSiteReturned final : AAInstanceInfoFloating {
5806 AAInstanceInfoCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
5807 : AAInstanceInfoFloating(IRP,
A) {}
5814 bool IgnoreSubsumingPositions) {
5815 assert(ImpliedAttributeKind == Attribute::Captures &&
5816 "Unexpected attribute kind");
5826 V.getType()->getPointerAddressSpace() == 0)) {
5831 A.getAttrs(IRP, {Attribute::Captures}, Attrs,
5841 {Attribute::Captures, Attribute::ByVal}, Attrs,
5878 bool ReadOnly =
F.onlyReadsMemory();
5879 bool NoThrow =
F.doesNotThrow();
5880 bool IsVoidReturn =
F.getReturnType()->isVoidTy();
5881 if (ReadOnly && NoThrow && IsVoidReturn) {
5894 if (NoThrow && IsVoidReturn)
5899 if (!NoThrow || ArgNo < 0 ||
5900 !
F.getAttributes().hasAttrSomewhere(Attribute::Returned))
5903 for (
unsigned U = 0, E =
F.arg_size(); U < E; ++U)
5904 if (
F.hasParamAttribute(U, Attribute::Returned)) {
5905 if (U ==
unsigned(ArgNo))
5932 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
5933 SmallVectorImpl<Attribute> &Attrs)
const override {
5934 if (!isAssumedNoCaptureMaybeReturned())
5937 if (isArgumentPosition()) {
5938 if (isAssumedNoCapture())
5939 Attrs.emplace_back(Attribute::get(Ctx, Attribute::Captures));
5941 Attrs.emplace_back(Attribute::get(Ctx,
"no-capture-maybe-returned"));
5946 const std::string getAsStr(Attributor *
A)
const override {
5947 if (isKnownNoCapture())
5948 return "known not-captured";
5949 if (isAssumedNoCapture())
5950 return "assumed not-captured";
5951 if (isKnownNoCaptureMaybeReturned())
5952 return "known not-captured-maybe-returned";
5953 if (isAssumedNoCaptureMaybeReturned())
5954 return "assumed not-captured-maybe-returned";
5955 return "assumed-captured";
5960 bool checkUse(Attributor &
A, AANoCapture::StateType &State,
const Use &U,
5963 LLVM_DEBUG(
dbgs() <<
"[AANoCapture] Check use: " << *
U.get() <<
" in "
5969 return isCapturedIn(State,
true,
true,
5976 return isCapturedIn(State,
true,
true,
5982 return isCapturedIn(State,
false,
false,
5984 return isCapturedIn(State,
true,
true,
5992 return isCapturedIn(State,
true,
true,
5999 bool IsKnownNoCapture;
6000 const AANoCapture *ArgNoCaptureAA =
nullptr;
6002 A,
this, CSArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
6004 if (IsAssumedNoCapture)
6005 return isCapturedIn(State,
false,
false,
6009 return isCapturedIn(State,
false,
false,
6014 return isCapturedIn(State,
true,
true,
6021 static bool isCapturedIn(AANoCapture::StateType &State,
bool CapturedInMem,
6022 bool CapturedInInt,
bool CapturedInRet) {
6023 LLVM_DEBUG(
dbgs() <<
" - captures [Mem " << CapturedInMem <<
"|Int "
6024 << CapturedInInt <<
"|Ret " << CapturedInRet <<
"]\n");
6036 const IRPosition &IRP = getIRPosition();
6040 return indicatePessimisticFixpoint();
6047 return indicatePessimisticFixpoint();
6055 T.addKnownBits(NOT_CAPTURED_IN_MEM);
6057 addKnownBits(NOT_CAPTURED_IN_MEM);
6064 auto CheckReturnedArgs = [&](
bool &UsedAssumedInformation) {
6068 UsedAssumedInformation))
6070 bool SeenConstant =
false;
6071 for (
const AA::ValueAndContext &VAC : Values) {
6075 SeenConstant =
true;
6077 VAC.
getValue() == getAssociatedArgument())
6083 bool IsKnownNoUnwind;
6086 bool IsVoidTy =
F->getReturnType()->isVoidTy();
6087 bool UsedAssumedInformation =
false;
6088 if (IsVoidTy || CheckReturnedArgs(UsedAssumedInformation)) {
6089 T.addKnownBits(NOT_CAPTURED_IN_RET);
6090 if (
T.isKnown(NOT_CAPTURED_IN_MEM))
6092 if (IsKnownNoUnwind && (IsVoidTy || !UsedAssumedInformation)) {
6093 addKnownBits(NOT_CAPTURED_IN_RET);
6094 if (isKnown(NOT_CAPTURED_IN_MEM))
6095 return indicateOptimisticFixpoint();
6100 auto UseCheck = [&](
const Use &
U,
bool &Follow) ->
bool {
6109 return checkUse(
A,
T, U, Follow);
6112 if (!
A.checkForAllUses(UseCheck, *
this, *V))
6113 return indicatePessimisticFixpoint();
6116 auto Assumed = S.getAssumed();
6117 S.intersectAssumedBits(
T.getAssumed());
6118 if (!isAssumedNoCaptureMaybeReturned())
6119 return indicatePessimisticFixpoint();
6125struct AANoCaptureArgument final : AANoCaptureImpl {
6126 AANoCaptureArgument(
const IRPosition &IRP, Attributor &
A)
6127 : AANoCaptureImpl(IRP,
A) {}
6134struct AANoCaptureCallSiteArgument final : AANoCaptureImpl {
6135 AANoCaptureCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
6136 : AANoCaptureImpl(IRP,
A) {}
6144 Argument *Arg = getAssociatedArgument();
6146 return indicatePessimisticFixpoint();
6148 bool IsKnownNoCapture;
6149 const AANoCapture *ArgAA =
nullptr;
6151 A,
this, ArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
6153 return ChangeStatus::UNCHANGED;
6155 return indicatePessimisticFixpoint();
6160 void trackStatistics()
const override {
6166struct AANoCaptureFloating final : AANoCaptureImpl {
6167 AANoCaptureFloating(
const IRPosition &IRP, Attributor &
A)
6168 : AANoCaptureImpl(IRP,
A) {}
6171 void trackStatistics()
const override {
6177struct AANoCaptureReturned final : AANoCaptureImpl {
6178 AANoCaptureReturned(
const IRPosition &IRP, Attributor &
A)
6179 : AANoCaptureImpl(IRP,
A) {
6194 void trackStatistics()
const override {}
6198struct AANoCaptureCallSiteReturned final : AANoCaptureImpl {
6199 AANoCaptureCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
6200 : AANoCaptureImpl(IRP,
A) {}
6206 determineFunctionCaptureCapabilities(getIRPosition(), *
F, *
this);
6210 void trackStatistics()
const override {
6227 dbgs() <<
"[ValueSimplify] is assumed to be "
6230 dbgs() <<
"[ValueSimplify] is assumed to be <none>\n";
6242 if (getAssociatedValue().
getType()->isVoidTy())
6243 indicatePessimisticFixpoint();
6244 if (
A.hasSimplificationCallback(getIRPosition()))
6245 indicatePessimisticFixpoint();
6249 const std::string getAsStr(Attributor *
A)
const override {
6251 dbgs() <<
"SAV: " << (bool)SimplifiedAssociatedValue <<
" ";
6252 if (SimplifiedAssociatedValue && *SimplifiedAssociatedValue)
6253 dbgs() <<
"SAV: " << **SimplifiedAssociatedValue <<
" ";
6255 return isValidState() ? (isAtFixpoint() ?
"simplified" :
"maybe-simple")
6260 void trackStatistics()
const override {}
6263 std::optional<Value *>
6264 getAssumedSimplifiedValue(Attributor &
A)
const override {
6265 return SimplifiedAssociatedValue;
6272 static Value *ensureType(Attributor &
A,
Value &V,
Type &Ty, Instruction *CtxI,
6276 if (CtxI &&
V.getType()->canLosslesslyBitCastTo(&Ty))
6278 : BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6287 static Value *reproduceInst(Attributor &
A,
6288 const AbstractAttribute &QueryingAA,
6289 Instruction &
I,
Type &Ty, Instruction *CtxI,
6291 assert(CtxI &&
"Cannot reproduce an instruction without context!");
6292 if (
Check && (
I.mayReadFromMemory() ||
6297 Value *NewOp = reproduceValue(
A, QueryingAA, *
Op, Ty, CtxI,
Check, VMap);
6299 assert(
Check &&
"Manifest of new value unexpectedly failed!");
6321 static Value *reproduceValue(Attributor &
A,
6322 const AbstractAttribute &QueryingAA,
Value &V,
6323 Type &Ty, Instruction *CtxI,
bool Check,
6325 if (
const auto &NewV = VMap.
lookup(&V))
6327 bool UsedAssumedInformation =
false;
6328 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
6330 if (!SimpleV.has_value())
6334 EffectiveV = *SimpleV;
6339 return ensureType(
A, *EffectiveV, Ty, CtxI,
Check);
6341 if (
Value *NewV = reproduceInst(
A, QueryingAA, *
I, Ty, CtxI,
Check, VMap))
6342 return ensureType(
A, *NewV, Ty, CtxI,
Check);
6348 Value *manifestReplacementValue(Attributor &
A, Instruction *CtxI)
const {
6349 Value *NewV = SimplifiedAssociatedValue
6350 ? *SimplifiedAssociatedValue
6352 if (NewV && NewV != &getAssociatedValue()) {
6356 if (reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6358 return reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6366 bool checkAndUpdate(Attributor &
A,
const AbstractAttribute &QueryingAA,
6367 const IRPosition &IRP,
bool Simplify =
true) {
6368 bool UsedAssumedInformation =
false;
6371 QueryingValueSimplified =
A.getAssumedSimplified(
6373 return unionAssumed(QueryingValueSimplified);
6377 template <
typename AAType>
bool askSimplifiedValueFor(Attributor &
A) {
6378 if (!getAssociatedValue().
getType()->isIntegerTy())
6383 A.getAAFor<AAType>(*
this, getIRPosition(), DepClassTy::NONE);
6387 std::optional<Constant *> COpt = AA->getAssumedConstant(
A);
6390 SimplifiedAssociatedValue = std::nullopt;
6391 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6394 if (
auto *
C = *COpt) {
6395 SimplifiedAssociatedValue =
C;
6396 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6402 bool askSimplifiedValueForOtherAAs(Attributor &
A) {
6403 if (askSimplifiedValueFor<AAValueConstantRange>(
A))
6405 if (askSimplifiedValueFor<AAPotentialConstantValues>(
A))
6413 for (
auto &U : getAssociatedValue().uses()) {
6418 IP =
PHI->getIncomingBlock(U)->getTerminator();
6419 if (
auto *NewV = manifestReplacementValue(
A, IP)) {
6421 <<
" -> " << *NewV <<
" :: " << *
this <<
"\n");
6422 if (
A.changeUseAfterManifest(U, *NewV))
6423 Changed = ChangeStatus::CHANGED;
6427 return Changed | AAValueSimplify::manifest(
A);
6432 SimplifiedAssociatedValue = &getAssociatedValue();
6433 return AAValueSimplify::indicatePessimisticFixpoint();
6437struct AAValueSimplifyArgument final : AAValueSimplifyImpl {
6438 AAValueSimplifyArgument(
const IRPosition &IRP, Attributor &
A)
6439 : AAValueSimplifyImpl(IRP,
A) {}
6442 AAValueSimplifyImpl::initialize(
A);
6443 if (
A.hasAttr(getIRPosition(),
6444 {Attribute::InAlloca, Attribute::Preallocated,
6445 Attribute::StructRet, Attribute::Nest, Attribute::ByVal},
6447 indicatePessimisticFixpoint();
6454 Argument *Arg = getAssociatedArgument();
6460 return indicatePessimisticFixpoint();
6463 auto Before = SimplifiedAssociatedValue;
6465 auto PredForCallSite = [&](AbstractCallSite ACS) {
6466 const IRPosition &ACSArgPos =
6477 bool UsedAssumedInformation =
false;
6478 std::optional<Constant *> SimpleArgOp =
6479 A.getAssumedConstant(ACSArgPos, *
this, UsedAssumedInformation);
6486 return unionAssumed(*SimpleArgOp);
6491 bool UsedAssumedInformation =
false;
6492 if (hasCallBaseContext() &&
6493 getCallBaseContext()->getCalledOperand() == Arg->
getParent())
6495 AbstractCallSite(&getCallBaseContext()->getCalledOperandUse()));
6497 Success =
A.checkForAllCallSites(PredForCallSite, *
this,
true,
6498 UsedAssumedInformation);
6501 if (!askSimplifiedValueForOtherAAs(
A))
6502 return indicatePessimisticFixpoint();
6505 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6506 : ChangeStatus ::CHANGED;
6510 void trackStatistics()
const override {
6515struct AAValueSimplifyReturned : AAValueSimplifyImpl {
6516 AAValueSimplifyReturned(
const IRPosition &IRP, Attributor &
A)
6517 : AAValueSimplifyImpl(IRP,
A) {}
6520 std::optional<Value *>
6521 getAssumedSimplifiedValue(Attributor &
A)
const override {
6522 if (!isValidState())
6524 return SimplifiedAssociatedValue;
6529 auto Before = SimplifiedAssociatedValue;
6533 return checkAndUpdate(
6538 bool UsedAssumedInformation =
false;
6539 if (!
A.checkForAllInstructions(ReturnInstCB, *
this, {Instruction::Ret},
6540 UsedAssumedInformation))
6541 if (!askSimplifiedValueForOtherAAs(
A))
6542 return indicatePessimisticFixpoint();
6545 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6546 : ChangeStatus ::CHANGED;
6552 return ChangeStatus::UNCHANGED;
6556 void trackStatistics()
const override {
6561struct AAValueSimplifyFloating : AAValueSimplifyImpl {
6562 AAValueSimplifyFloating(
const IRPosition &IRP, Attributor &
A)
6563 : AAValueSimplifyImpl(IRP,
A) {}
6567 AAValueSimplifyImpl::initialize(
A);
6568 Value &
V = getAnchorValue();
6572 indicatePessimisticFixpoint();
6577 auto Before = SimplifiedAssociatedValue;
6578 if (!askSimplifiedValueForOtherAAs(
A))
6579 return indicatePessimisticFixpoint();
6582 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6583 : ChangeStatus ::CHANGED;
6587 void trackStatistics()
const override {
6592struct AAValueSimplifyFunction : AAValueSimplifyImpl {
6593 AAValueSimplifyFunction(
const IRPosition &IRP, Attributor &
A)
6594 : AAValueSimplifyImpl(IRP,
A) {}
6598 SimplifiedAssociatedValue =
nullptr;
6599 indicateOptimisticFixpoint();
6604 "AAValueSimplify(Function|CallSite)::updateImpl will not be called");
6607 void trackStatistics()
const override {
6612struct AAValueSimplifyCallSite : AAValueSimplifyFunction {
6613 AAValueSimplifyCallSite(
const IRPosition &IRP, Attributor &
A)
6614 : AAValueSimplifyFunction(IRP,
A) {}
6616 void trackStatistics()
const override {
6621struct AAValueSimplifyCallSiteReturned : AAValueSimplifyImpl {
6622 AAValueSimplifyCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
6623 : AAValueSimplifyImpl(IRP,
A) {}
6626 AAValueSimplifyImpl::initialize(
A);
6627 Function *Fn = getAssociatedFunction();
6628 assert(Fn &&
"Did expect an associted function");
6629 for (Argument &Arg : Fn->
args()) {
6634 checkAndUpdate(
A, *
this, IRP))
6635 indicateOptimisticFixpoint();
6637 indicatePessimisticFixpoint();
6645 return indicatePessimisticFixpoint();
6648 void trackStatistics()
const override {
6653struct AAValueSimplifyCallSiteArgument : AAValueSimplifyFloating {
6654 AAValueSimplifyCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
6655 : AAValueSimplifyFloating(IRP,
A) {}
6661 auto *FloatAA =
A.lookupAAFor<AAValueSimplify>(
6663 if (FloatAA && FloatAA->getState().isValidState())
6666 if (
auto *NewV = manifestReplacementValue(
A, getCtxI())) {
6668 ->getArgOperandUse(getCallSiteArgNo());
6669 if (
A.changeUseAfterManifest(U, *NewV))
6670 Changed = ChangeStatus::CHANGED;
6673 return Changed | AAValueSimplify::manifest(
A);
6676 void trackStatistics()
const override {
6684struct AAHeapToStackFunction final :
public AAHeapToStack {
6686 struct AllocationInfo {
6691 LibFunc LibraryFunctionId = NotLibFunc;
6698 } Status = STACK_DUE_TO_USE;
6702 bool HasPotentiallyFreeingUnknownUses =
false;
6706 bool MoveAllocaIntoEntry =
true;
6709 SmallSetVector<CallBase *, 1> PotentialFreeCalls{};
6712 struct DeallocationInfo {
6720 bool MightFreeUnknownObjects =
false;
6723 SmallSetVector<CallBase *, 1> PotentialAllocationCalls{};
6726 AAHeapToStackFunction(
const IRPosition &IRP, Attributor &
A)
6727 : AAHeapToStack(IRP,
A) {}
6729 ~AAHeapToStackFunction()
override {
6732 for (
auto &It : AllocationInfos)
6733 It.second->~AllocationInfo();
6734 for (
auto &It : DeallocationInfos)
6735 It.second->~DeallocationInfo();
6739 AAHeapToStack::initialize(
A);
6742 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6749 DeallocationInfos[CB] =
new (
A.Allocator) DeallocationInfo{CB, FreedOp};
6756 auto *I8Ty = Type::getInt8Ty(CB->
getParent()->getContext());
6758 AllocationInfo *AI =
new (
A.Allocator) AllocationInfo{CB};
6759 AllocationInfos[CB] = AI;
6761 TLI->getLibFunc(*CB, AI->LibraryFunctionId);
6767 bool UsedAssumedInformation =
false;
6768 bool Success =
A.checkForAllCallLikeInstructions(
6769 AllocationIdentifierCB, *
this, UsedAssumedInformation,
6773 assert(
Success &&
"Did not expect the call base visit callback to fail!");
6776 [](
const IRPosition &,
const AbstractAttribute *,
6777 bool &) -> std::optional<Value *> {
return nullptr; };
6778 for (
const auto &It : AllocationInfos)
6781 for (
const auto &It : DeallocationInfos)
6786 const std::string getAsStr(Attributor *
A)
const override {
6787 unsigned NumH2SMallocs = 0, NumInvalidMallocs = 0;
6788 for (
const auto &It : AllocationInfos) {
6789 if (It.second->Status == AllocationInfo::INVALID)
6790 ++NumInvalidMallocs;
6794 return "[H2S] Mallocs Good/Bad: " + std::to_string(NumH2SMallocs) +
"/" +
6795 std::to_string(NumInvalidMallocs);
6799 void trackStatistics()
const override {
6801 MallocCalls, Function,
6802 "Number of malloc/calloc/aligned_alloc calls converted to allocas");
6803 for (
const auto &It : AllocationInfos)
6804 if (It.second->Status != AllocationInfo::INVALID)
6808 bool isAssumedHeapToStack(
const CallBase &CB)
const override {
6810 if (AllocationInfo *AI =
6811 AllocationInfos.lookup(
const_cast<CallBase *
>(&CB)))
6812 return AI->Status != AllocationInfo::INVALID;
6816 bool isAssumedHeapToStackRemovedFree(CallBase &CB)
const override {
6817 if (!isValidState())
6820 for (
const auto &It : AllocationInfos) {
6821 AllocationInfo &AI = *It.second;
6822 if (AI.Status == AllocationInfo::INVALID)
6825 if (AI.PotentialFreeCalls.count(&CB))
6833 assert(getState().isValidState() &&
6834 "Attempted to manifest an invalid state!");
6838 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6840 for (
auto &It : AllocationInfos) {
6841 AllocationInfo &AI = *It.second;
6842 if (AI.Status == AllocationInfo::INVALID)
6845 for (CallBase *FreeCall : AI.PotentialFreeCalls) {
6846 LLVM_DEBUG(
dbgs() <<
"H2S: Removing free call: " << *FreeCall <<
"\n");
6847 A.deleteAfterManifest(*FreeCall);
6848 HasChanged = ChangeStatus::CHANGED;
6851 LLVM_DEBUG(
dbgs() <<
"H2S: Removing malloc-like call: " << *AI.CB
6854 auto Remark = [&](OptimizationRemark
OR) {
6855 LibFunc IsAllocShared;
6856 if (TLI->getLibFunc(*AI.CB, IsAllocShared))
6857 if (IsAllocShared == LibFunc___kmpc_alloc_shared)
6858 return OR <<
"Moving globalized variable to the stack.";
6859 return OR <<
"Moving memory allocation from the heap to the stack.";
6861 if (AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
6862 A.emitRemark<OptimizationRemark>(AI.CB,
"OMP110",
Remark);
6864 A.emitRemark<OptimizationRemark>(AI.CB,
"HeapToStack",
Remark);
6866 const DataLayout &
DL =
A.getInfoCache().getDL();
6868 std::optional<APInt> SizeAPI =
getSize(
A, *
this, AI);
6870 Size = ConstantInt::get(AI.CB->getContext(), *SizeAPI);
6872 LLVMContext &Ctx = AI.CB->getContext();
6873 ObjectSizeOpts Opts;
6874 ObjectSizeOffsetEvaluator Eval(
DL, TLI, Ctx, Opts);
6875 SizeOffsetValue SizeOffsetPair = Eval.compute(AI.CB);
6882 ?
F->getEntryBlock().begin()
6883 : AI.CB->getIterator();
6886 if (MaybeAlign RetAlign = AI.CB->getRetAlign())
6887 Alignment = std::max(Alignment, *RetAlign);
6889 std::optional<APInt> AlignmentAPI = getAPInt(
A, *
this, *Align);
6890 assert(AlignmentAPI && AlignmentAPI->getZExtValue() > 0 &&
6891 "Expected an alignment during manifest!");
6893 std::max(Alignment,
assumeAligned(AlignmentAPI->getZExtValue()));
6897 unsigned AS =
DL.getAllocaAddrSpace();
6899 new AllocaInst(Type::getInt8Ty(
F->getContext()), AS,
Size, Alignment,
6900 AI.CB->getName() +
".h2s", IP);
6902 if (Alloca->
getType() != AI.CB->getType())
6903 Alloca = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6904 Alloca, AI.CB->getType(),
"malloc_cast", AI.CB->getIterator());
6906 auto *I8Ty = Type::getInt8Ty(
F->getContext());
6909 "Must be able to materialize initial memory state of allocation");
6914 auto *NBB =
II->getNormalDest();
6916 A.deleteAfterManifest(*AI.CB);
6918 A.deleteAfterManifest(*AI.CB);
6927 Builder.CreateMemSet(Alloca, InitVal,
Size, std::nullopt);
6929 HasChanged = ChangeStatus::CHANGED;
6935 std::optional<APInt> getAPInt(Attributor &
A,
const AbstractAttribute &AA,
6937 bool UsedAssumedInformation =
false;
6938 std::optional<Constant *> SimpleV =
6939 A.getAssumedConstant(V, AA, UsedAssumedInformation);
6941 return APInt(64, 0);
6943 return CI->getValue();
6944 return std::nullopt;
6947 std::optional<APInt>
getSize(Attributor &
A,
const AbstractAttribute &AA,
6948 AllocationInfo &AI) {
6949 auto Mapper = [&](
const Value *
V) ->
const Value * {
6950 bool UsedAssumedInformation =
false;
6951 if (std::optional<Constant *> SimpleV =
6952 A.getAssumedConstant(*V, AA, UsedAssumedInformation))
6959 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6965 MapVector<CallBase *, AllocationInfo *> AllocationInfos;
6969 MapVector<CallBase *, DeallocationInfo *> DeallocationInfos;
6974ChangeStatus AAHeapToStackFunction::updateImpl(Attributor &
A) {
6977 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6979 const auto *LivenessAA =
6982 MustBeExecutedContextExplorer *Explorer =
6983 A.getInfoCache().getMustBeExecutedContextExplorer();
6985 bool StackIsAccessibleByOtherThreads =
6986 A.getInfoCache().stackIsAccessibleByOtherThreads();
6989 A.getInfoCache().getAnalysisResultForFunction<LoopAnalysis>(*F);
6990 std::optional<bool> MayContainIrreducibleControl;
6992 if (&
F->getEntryBlock() == &BB)
6994 if (!MayContainIrreducibleControl.has_value())
6996 if (*MayContainIrreducibleControl)
7005 bool HasUpdatedFrees =
false;
7007 auto UpdateFrees = [&]() {
7008 HasUpdatedFrees =
true;
7010 for (
auto &It : DeallocationInfos) {
7011 DeallocationInfo &DI = *It.second;
7014 if (DI.MightFreeUnknownObjects)
7018 bool UsedAssumedInformation =
false;
7019 if (
A.isAssumedDead(*DI.CB,
this, LivenessAA, UsedAssumedInformation,
7026 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown underlying object for free!\n");
7027 DI.MightFreeUnknownObjects =
true;
7040 DI.MightFreeUnknownObjects =
true;
7044 AllocationInfo *AI = AllocationInfos.lookup(ObjCB);
7046 LLVM_DEBUG(
dbgs() <<
"[H2S] Free of a non-allocation object: " << *Obj
7048 DI.MightFreeUnknownObjects =
true;
7052 DI.PotentialAllocationCalls.insert(ObjCB);
7056 auto FreeCheck = [&](AllocationInfo &AI) {
7060 if (!StackIsAccessibleByOtherThreads) {
7065 dbgs() <<
"[H2S] found an escaping use, stack is not accessible by "
7066 "other threads and function is not nosync:\n");
7070 if (!HasUpdatedFrees)
7074 if (AI.PotentialFreeCalls.size() != 1) {
7076 << AI.PotentialFreeCalls.size() <<
"\n");
7079 CallBase *UniqueFree = *AI.PotentialFreeCalls.begin();
7080 DeallocationInfo *DI = DeallocationInfos.lookup(UniqueFree);
7083 dbgs() <<
"[H2S] unique free call was not known as deallocation call "
7084 << *UniqueFree <<
"\n");
7087 if (DI->MightFreeUnknownObjects) {
7089 dbgs() <<
"[H2S] unique free call might free unknown allocations\n");
7092 if (DI->PotentialAllocationCalls.empty())
7094 if (DI->PotentialAllocationCalls.size() > 1) {
7096 << DI->PotentialAllocationCalls.size()
7097 <<
" different allocations\n");
7100 if (*DI->PotentialAllocationCalls.begin() != AI.CB) {
7103 <<
"[H2S] unique free call not known to free this allocation but "
7104 << **DI->PotentialAllocationCalls.begin() <<
"\n");
7109 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared) {
7111 if (!Explorer || !Explorer->findInContextOf(UniqueFree, CtxI)) {
7112 LLVM_DEBUG(
dbgs() <<
"[H2S] unique free call might not be executed "
7113 "with the allocation "
7114 << *UniqueFree <<
"\n");
7121 auto UsesCheck = [&](AllocationInfo &AI) {
7122 bool ValidUsesOnly =
true;
7124 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
7129 if (
SI->getValueOperand() ==
U.get()) {
7131 <<
"[H2S] escaping store to memory: " << *UserI <<
"\n");
7132 ValidUsesOnly =
false;
7141 if (DeallocationInfos.count(CB)) {
7142 AI.PotentialFreeCalls.insert(CB);
7149 bool IsKnownNoCapture;
7158 if (!IsAssumedNoCapture ||
7159 (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7160 !IsAssumedNoFree)) {
7161 AI.HasPotentiallyFreeingUnknownUses |= !IsAssumedNoFree;
7164 auto Remark = [&](OptimizationRemarkMissed ORM) {
7166 <<
"Could not move globalized variable to the stack. "
7167 "Variable is potentially captured in call. Mark "
7168 "parameter as `__attribute__((noescape))` to override.";
7171 if (ValidUsesOnly &&
7172 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
7173 A.emitRemark<OptimizationRemarkMissed>(CB,
"OMP113",
Remark);
7176 ValidUsesOnly =
false;
7189 ValidUsesOnly =
false;
7192 if (!
A.checkForAllUses(Pred, *
this, *AI.CB,
false,
7194 [&](
const Use &OldU,
const Use &NewU) {
7195 auto *SI = dyn_cast<StoreInst>(OldU.getUser());
7196 return !SI || StackIsAccessibleByOtherThreads ||
7197 AA::isAssumedThreadLocalObject(
7198 A, *SI->getPointerOperand(), *this);
7201 return ValidUsesOnly;
7206 for (
auto &It : AllocationInfos) {
7207 AllocationInfo &AI = *It.second;
7208 if (AI.Status == AllocationInfo::INVALID)
7212 std::optional<APInt> APAlign = getAPInt(
A, *
this, *Align);
7216 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown allocation alignment: " << *AI.CB
7218 AI.Status = AllocationInfo::INVALID;
7223 !APAlign->isPowerOf2()) {
7224 LLVM_DEBUG(
dbgs() <<
"[H2S] Invalid allocation alignment: " << APAlign
7226 AI.Status = AllocationInfo::INVALID;
7233 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7238 dbgs() <<
"[H2S] Unknown allocation size: " << *AI.CB <<
"\n";
7240 dbgs() <<
"[H2S] Allocation size too large: " << *AI.CB <<
" vs. "
7244 AI.Status = AllocationInfo::INVALID;
7250 switch (AI.Status) {
7251 case AllocationInfo::STACK_DUE_TO_USE:
7254 AI.Status = AllocationInfo::STACK_DUE_TO_FREE;
7256 case AllocationInfo::STACK_DUE_TO_FREE:
7259 AI.Status = AllocationInfo::INVALID;
7262 case AllocationInfo::INVALID:
7269 bool IsGlobalizedLocal =
7270 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared;
7271 if (AI.MoveAllocaIntoEntry &&
7272 (!
Size.has_value() ||
7273 (!IsGlobalizedLocal && IsInLoop(*AI.CB->getParent()))))
7274 AI.MoveAllocaIntoEntry =
false;
7283struct AAPrivatizablePtrImpl :
public AAPrivatizablePtr {
7284 AAPrivatizablePtrImpl(
const IRPosition &IRP, Attributor &
A)
7285 : AAPrivatizablePtr(IRP,
A), PrivatizableType(std::nullopt) {}
7288 AAPrivatizablePtr::indicatePessimisticFixpoint();
7289 PrivatizableType =
nullptr;
7290 return ChangeStatus::CHANGED;
7296 virtual std::optional<Type *> identifyPrivatizableType(Attributor &
A) = 0;
7300 std::optional<Type *> combineTypes(std::optional<Type *> T0,
7301 std::optional<Type *>
T1) {
7311 std::optional<Type *> getPrivatizableType()
const override {
7312 return PrivatizableType;
7315 const std::string getAsStr(Attributor *
A)
const override {
7316 return isAssumedPrivatizablePtr() ?
"[priv]" :
"[no-priv]";
7320 std::optional<Type *> PrivatizableType;
7325struct AAPrivatizablePtrArgument final :
public AAPrivatizablePtrImpl {
7326 AAPrivatizablePtrArgument(
const IRPosition &IRP, Attributor &
A)
7327 : AAPrivatizablePtrImpl(IRP,
A) {}
7330 std::optional<Type *> identifyPrivatizableType(Attributor &
A)
override {
7333 bool UsedAssumedInformation =
false;
7335 A.getAttrs(getIRPosition(), {Attribute::ByVal},
Attrs,
7337 if (!
Attrs.empty() &&
7338 A.checkForAllCallSites([](AbstractCallSite ACS) { return true; }, *
this,
7339 true, UsedAssumedInformation))
7340 return Attrs[0].getValueAsType();
7342 std::optional<Type *> Ty;
7343 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
7351 auto CallSiteCheck = [&](AbstractCallSite ACS) {
7360 A.getAAFor<AAPrivatizablePtr>(*
this, ACSArgPos, DepClassTy::REQUIRED);
7363 std::optional<Type *> CSTy = PrivCSArgAA->getPrivatizableType();
7366 dbgs() <<
"[AAPrivatizablePtr] ACSPos: " << ACSArgPos <<
", CSTy: ";
7370 dbgs() <<
"<nullptr>";
7375 Ty = combineTypes(Ty, CSTy);
7378 dbgs() <<
" : New Type: ";
7380 (*Ty)->print(
dbgs());
7382 dbgs() <<
"<nullptr>";
7391 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7392 UsedAssumedInformation))
7399 PrivatizableType = identifyPrivatizableType(
A);
7400 if (!PrivatizableType)
7401 return ChangeStatus::UNCHANGED;
7402 if (!*PrivatizableType)
7403 return indicatePessimisticFixpoint();
7408 DepClassTy::OPTIONAL);
7411 if (!
A.hasAttr(getIRPosition(), Attribute::ByVal) &&
7414 return indicatePessimisticFixpoint();
7420 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7424 Function &Fn = *getIRPosition().getAnchorScope();
7426 A.getInfoCache().getAnalysisResultForFunction<TargetIRAnalysis>(Fn);
7428 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Missing TTI for function "
7430 return indicatePessimisticFixpoint();
7433 auto CallSiteCheck = [&](AbstractCallSite ACS) {
7440 bool UsedAssumedInformation =
false;
7441 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7442 UsedAssumedInformation)) {
7444 dbgs() <<
"[AAPrivatizablePtr] ABI incompatibility detected for "
7446 return indicatePessimisticFixpoint();
7450 Argument *Arg = getAssociatedArgument();
7451 if (!
A.isValidFunctionSignatureRewrite(*Arg, ReplacementTypes)) {
7453 return indicatePessimisticFixpoint();
7460 auto IsCompatiblePrivArgOfCallback = [&](CallBase &CB) {
7463 for (
const Use *U : CallbackUses) {
7464 AbstractCallSite CBACS(U);
7465 assert(CBACS && CBACS.isCallbackCall());
7466 for (Argument &CBArg : CBACS.getCalledFunction()->args()) {
7467 int CBArgNo = CBACS.getCallArgOperandNo(CBArg);
7471 <<
"[AAPrivatizablePtr] Argument " << *Arg
7472 <<
"check if can be privatized in the context of its parent ("
7474 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7476 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7477 <<
")\n[AAPrivatizablePtr] " << CBArg <<
" : "
7478 << CBACS.getCallArgOperand(CBArg) <<
" vs "
7480 <<
"[AAPrivatizablePtr] " << CBArg <<
" : "
7481 << CBACS.getCallArgOperandNo(CBArg) <<
" vs " << ArgNo <<
"\n";
7484 if (CBArgNo !=
int(ArgNo))
7486 const auto *CBArgPrivAA =
A.getAAFor<AAPrivatizablePtr>(
7488 if (CBArgPrivAA && CBArgPrivAA->isValidState()) {
7489 auto CBArgPrivTy = CBArgPrivAA->getPrivatizableType();
7492 if (*CBArgPrivTy == PrivatizableType)
7497 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7498 <<
" cannot be privatized in the context of its parent ("
7500 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7502 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7503 <<
").\n[AAPrivatizablePtr] for which the argument "
7504 "privatization is not compatible.\n";
7514 auto IsCompatiblePrivArgOfDirectCS = [&](AbstractCallSite ACS) {
7518 "Expected a direct call operand for callback call operand");
7523 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7524 <<
" check if be privatized in the context of its parent ("
7526 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7528 << DCArgNo <<
"@" << DCCallee->
getName() <<
").\n";
7531 if (
unsigned(DCArgNo) < DCCallee->
arg_size()) {
7532 const auto *DCArgPrivAA =
A.getAAFor<AAPrivatizablePtr>(
7534 DepClassTy::REQUIRED);
7535 if (DCArgPrivAA && DCArgPrivAA->isValidState()) {
7536 auto DCArgPrivTy = DCArgPrivAA->getPrivatizableType();
7539 if (*DCArgPrivTy == PrivatizableType)
7545 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7546 <<
" cannot be privatized in the context of its parent ("
7548 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7551 <<
").\n[AAPrivatizablePtr] for which the argument "
7552 "privatization is not compatible.\n";
7560 auto IsCompatiblePrivArgOfOtherCallSite = [&](AbstractCallSite ACS) {
7564 return IsCompatiblePrivArgOfDirectCS(ACS);
7568 if (!
A.checkForAllCallSites(IsCompatiblePrivArgOfOtherCallSite, *
this,
true,
7569 UsedAssumedInformation))
7570 return indicatePessimisticFixpoint();
7572 return ChangeStatus::UNCHANGED;
7578 identifyReplacementTypes(
Type *PrivType,
7579 SmallVectorImpl<Type *> &ReplacementTypes) {
7582 assert(PrivType &&
"Expected privatizable type!");
7586 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++)
7587 ReplacementTypes.
push_back(PrivStructType->getElementType(u));
7589 ReplacementTypes.
append(PrivArrayType->getNumElements(),
7590 PrivArrayType->getElementType());
7599 static void createInitialization(
Type *PrivType,
Value &
Base, Function &
F,
7601 assert(PrivType &&
"Expected privatizable type!");
7604 const DataLayout &
DL =
F.getDataLayout();
7608 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7609 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7612 new StoreInst(
F.getArg(ArgNo + u), Ptr, IP);
7615 Type *PointeeTy = PrivArrayType->getElementType();
7616 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7617 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7619 new StoreInst(
F.getArg(ArgNo + u), Ptr, IP);
7622 new StoreInst(
F.getArg(ArgNo), &
Base, IP);
7628 void createReplacementValues(Align Alignment,
Type *PrivType,
7630 SmallVectorImpl<Value *> &ReplacementValues) {
7632 assert(PrivType &&
"Expected privatizable type!");
7640 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7641 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7642 Type *PointeeTy = PrivStructType->getElementType(u);
7645 LoadInst *
L =
new LoadInst(PointeeTy, Ptr,
"", IP->
getIterator());
7646 L->setAlignment(Alignment);
7650 Type *PointeeTy = PrivArrayType->getElementType();
7651 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7652 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7654 LoadInst *
L =
new LoadInst(PointeeTy, Ptr,
"", IP->
getIterator());
7655 L->setAlignment(Alignment);
7660 L->setAlignment(Alignment);
7667 if (!PrivatizableType)
7668 return ChangeStatus::UNCHANGED;
7669 assert(*PrivatizableType &&
"Expected privatizable type!");
7675 bool UsedAssumedInformation =
false;
7676 if (!
A.checkForAllInstructions(
7677 [&](Instruction &
I) {
7678 CallInst &CI = cast<CallInst>(I);
7679 if (CI.isTailCall())
7680 TailCalls.push_back(&CI);
7683 *
this, {Instruction::Call}, UsedAssumedInformation))
7684 return ChangeStatus::UNCHANGED;
7686 Argument *Arg = getAssociatedArgument();
7689 const auto *AlignAA =
7696 [=](
const Attributor::ArgumentReplacementInfo &ARI,
7698 BasicBlock &EntryBB = ReplacementFn.getEntryBlock();
7700 const DataLayout &
DL = IP->getDataLayout();
7701 unsigned AS =
DL.getAllocaAddrSpace();
7702 Instruction *AI =
new AllocaInst(*PrivatizableType, AS,
7703 Arg->
getName() +
".priv", IP);
7704 createInitialization(*PrivatizableType, *AI, ReplacementFn,
7705 ArgIt->getArgNo(), IP);
7708 AI = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
7712 for (CallInst *CI : TailCalls)
7713 CI->setTailCall(
false);
7720 [=](
const Attributor::ArgumentReplacementInfo &ARI,
7721 AbstractCallSite ACS, SmallVectorImpl<Value *> &NewArgOperands) {
7724 createReplacementValues(
7725 AlignAA ? AlignAA->getAssumedAlign() :
Align(0),
7726 *PrivatizableType, ACS,
7734 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7737 if (
A.registerFunctionSignatureRewrite(*Arg, ReplacementTypes,
7738 std::move(FnRepairCB),
7739 std::move(ACSRepairCB)))
7740 return ChangeStatus::CHANGED;
7741 return ChangeStatus::UNCHANGED;
7745 void trackStatistics()
const override {
7750struct AAPrivatizablePtrFloating :
public AAPrivatizablePtrImpl {
7751 AAPrivatizablePtrFloating(
const IRPosition &IRP, Attributor &
A)
7752 : AAPrivatizablePtrImpl(IRP,
A) {}
7757 indicatePessimisticFixpoint();
7762 "updateImpl will not be called");
7766 std::optional<Type *> identifyPrivatizableType(Attributor &
A)
override {
7769 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] No underlying object found!\n");
7776 return AI->getAllocatedType();
7778 auto *PrivArgAA =
A.getAAFor<AAPrivatizablePtr>(
7780 if (PrivArgAA && PrivArgAA->isAssumedPrivatizablePtr())
7781 return PrivArgAA->getPrivatizableType();
7784 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Underlying object neither valid "
7785 "alloca nor privatizable argument: "
7791 void trackStatistics()
const override {
7796struct AAPrivatizablePtrCallSiteArgument final
7797 :
public AAPrivatizablePtrFloating {
7798 AAPrivatizablePtrCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
7799 : AAPrivatizablePtrFloating(IRP,
A) {}
7803 if (
A.hasAttr(getIRPosition(), Attribute::ByVal))
7804 indicateOptimisticFixpoint();
7809 PrivatizableType = identifyPrivatizableType(
A);
7810 if (!PrivatizableType)
7811 return ChangeStatus::UNCHANGED;
7812 if (!*PrivatizableType)
7813 return indicatePessimisticFixpoint();
7815 const IRPosition &IRP = getIRPosition();
7816 bool IsKnownNoCapture;
7818 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoCapture);
7819 if (!IsAssumedNoCapture) {
7820 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might be captured!\n");
7821 return indicatePessimisticFixpoint();
7824 bool IsKnownNoAlias;
7826 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
7827 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might alias!\n");
7828 return indicatePessimisticFixpoint();
7833 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer is written!\n");
7834 return indicatePessimisticFixpoint();
7837 return ChangeStatus::UNCHANGED;
7841 void trackStatistics()
const override {
7846struct AAPrivatizablePtrCallSiteReturned final
7847 :
public AAPrivatizablePtrFloating {
7848 AAPrivatizablePtrCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
7849 : AAPrivatizablePtrFloating(IRP,
A) {}
7854 indicatePessimisticFixpoint();
7858 void trackStatistics()
const override {
7863struct AAPrivatizablePtrReturned final :
public AAPrivatizablePtrFloating {
7864 AAPrivatizablePtrReturned(
const IRPosition &IRP, Attributor &
A)
7865 : AAPrivatizablePtrFloating(IRP,
A) {}
7870 indicatePessimisticFixpoint();
7874 void trackStatistics()
const override {
7884struct AAMemoryBehaviorImpl :
public AAMemoryBehavior {
7885 AAMemoryBehaviorImpl(
const IRPosition &IRP, Attributor &
A)
7886 : AAMemoryBehavior(IRP,
A) {}
7890 intersectAssumedBits(BEST_STATE);
7891 getKnownStateFromValue(
A, getIRPosition(), getState());
7892 AAMemoryBehavior::initialize(
A);
7896 static void getKnownStateFromValue(Attributor &
A,
const IRPosition &IRP,
7897 BitIntegerState &State,
7898 bool IgnoreSubsumingPositions =
false) {
7900 A.getAttrs(IRP, AttrKinds, Attrs, IgnoreSubsumingPositions);
7902 switch (Attr.getKindAsEnum()) {
7903 case Attribute::ReadNone:
7906 case Attribute::ReadOnly:
7909 case Attribute::WriteOnly:
7918 if (!
I->mayReadFromMemory())
7920 if (!
I->mayWriteToMemory())
7926 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
7927 SmallVectorImpl<Attribute> &Attrs)
const override {
7930 Attrs.push_back(Attribute::get(Ctx, Attribute::ReadNone));
7932 Attrs.push_back(Attribute::get(Ctx, Attribute::ReadOnly));
7933 else if (isAssumedWriteOnly())
7934 Attrs.push_back(Attribute::get(Ctx, Attribute::WriteOnly));
7940 const IRPosition &IRP = getIRPosition();
7942 if (
A.hasAttr(IRP, Attribute::ReadNone,
7944 return ChangeStatus::UNCHANGED;
7953 return ChangeStatus::UNCHANGED;
7956 A.removeAttrs(IRP, AttrKinds);
7959 A.removeAttrs(IRP, Attribute::Writable);
7966 const std::string getAsStr(Attributor *
A)
const override {
7971 if (isAssumedWriteOnly())
7973 return "may-read/write";
7977 static const Attribute::AttrKind AttrKinds[3];
7981 Attribute::ReadNone, Attribute::ReadOnly, Attribute::WriteOnly};
7984struct AAMemoryBehaviorFloating : AAMemoryBehaviorImpl {
7985 AAMemoryBehaviorFloating(
const IRPosition &IRP, Attributor &
A)
7986 : AAMemoryBehaviorImpl(IRP,
A) {}
7992 void trackStatistics()
const override {
7997 else if (isAssumedWriteOnly())
8004 bool followUsersOfUseIn(Attributor &
A,
const Use &U,
8005 const Instruction *UserI);
8008 void analyzeUseIn(Attributor &
A,
const Use &U,
const Instruction *UserI);
8012struct AAMemoryBehaviorArgument : AAMemoryBehaviorFloating {
8013 AAMemoryBehaviorArgument(
const IRPosition &IRP, Attributor &
A)
8014 : AAMemoryBehaviorFloating(IRP,
A) {}
8018 intersectAssumedBits(BEST_STATE);
8019 const IRPosition &IRP = getIRPosition();
8023 bool HasByVal =
A.hasAttr(IRP, {Attribute::ByVal},
8025 getKnownStateFromValue(
A, IRP, getState(),
8032 return ChangeStatus::UNCHANGED;
8036 if (
A.hasAttr(getIRPosition(),
8037 {Attribute::InAlloca, Attribute::Preallocated})) {
8038 removeKnownBits(NO_WRITES);
8039 removeAssumedBits(NO_WRITES);
8041 A.removeAttrs(getIRPosition(), AttrKinds);
8042 return AAMemoryBehaviorFloating::manifest(
A);
8046 void trackStatistics()
const override {
8051 else if (isAssumedWriteOnly())
8056struct AAMemoryBehaviorCallSiteArgument final : AAMemoryBehaviorArgument {
8057 AAMemoryBehaviorCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
8058 : AAMemoryBehaviorArgument(IRP,
A) {}
8064 Argument *Arg = getAssociatedArgument();
8066 indicatePessimisticFixpoint();
8070 addKnownBits(NO_WRITES);
8071 removeKnownBits(NO_READS);
8072 removeAssumedBits(NO_READS);
8074 AAMemoryBehaviorArgument::initialize(
A);
8075 if (getAssociatedFunction()->isDeclaration())
8076 indicatePessimisticFixpoint();
8085 Argument *Arg = getAssociatedArgument();
8088 A.getAAFor<AAMemoryBehavior>(*
this, ArgPos, DepClassTy::REQUIRED);
8090 return indicatePessimisticFixpoint();
8095 void trackStatistics()
const override {
8100 else if (isAssumedWriteOnly())
8106struct AAMemoryBehaviorCallSiteReturned final : AAMemoryBehaviorFloating {
8107 AAMemoryBehaviorCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
8108 : AAMemoryBehaviorFloating(IRP,
A) {}
8112 AAMemoryBehaviorImpl::initialize(
A);
8117 return ChangeStatus::UNCHANGED;
8121 void trackStatistics()
const override {}
8125struct AAMemoryBehaviorFunction final :
public AAMemoryBehaviorImpl {
8126 AAMemoryBehaviorFunction(
const IRPosition &IRP, Attributor &
A)
8127 : AAMemoryBehaviorImpl(IRP,
A) {}
8143 else if (isAssumedWriteOnly())
8146 A.removeAttrs(getIRPosition(), AttrKinds);
8149 for (Argument &Arg :
F.args())
8151 return A.manifestAttrs(getIRPosition(),
8152 Attribute::getWithMemoryEffects(
F.getContext(), ME));
8156 void trackStatistics()
const override {
8161 else if (isAssumedWriteOnly())
8167struct AAMemoryBehaviorCallSite final
8168 : AACalleeToCallSite<AAMemoryBehavior, AAMemoryBehaviorImpl> {
8169 AAMemoryBehaviorCallSite(
const IRPosition &IRP, Attributor &
A)
8170 : AACalleeToCallSite<AAMemoryBehavior, AAMemoryBehaviorImpl>(IRP,
A) {}
8181 else if (isAssumedWriteOnly())
8184 A.removeAttrs(getIRPosition(), AttrKinds);
8187 for (Use &U : CB.
args())
8189 Attribute::Writable);
8190 return A.manifestAttrs(
8191 getIRPosition(), Attribute::getWithMemoryEffects(CB.
getContext(), ME));
8195 void trackStatistics()
const override {
8200 else if (isAssumedWriteOnly())
8205ChangeStatus AAMemoryBehaviorFunction::updateImpl(Attributor &
A) {
8208 auto AssumedState = getAssumed();
8215 const auto *MemBehaviorAA =
A.getAAFor<AAMemoryBehavior>(
8217 if (MemBehaviorAA) {
8218 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8219 return !isAtFixpoint();
8224 if (
I.mayReadFromMemory())
8225 removeAssumedBits(NO_READS);
8226 if (
I.mayWriteToMemory())
8227 removeAssumedBits(NO_WRITES);
8228 return !isAtFixpoint();
8231 bool UsedAssumedInformation =
false;
8232 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8233 UsedAssumedInformation))
8234 return indicatePessimisticFixpoint();
8240ChangeStatus AAMemoryBehaviorFloating::updateImpl(Attributor &
A) {
8242 const IRPosition &IRP = getIRPosition();
8253 const auto *FnMemAA =
8256 FnMemAssumedState = FnMemAA->getAssumed();
8257 S.addKnownBits(FnMemAA->getKnown());
8258 if ((S.getAssumed() & FnMemAA->getAssumed()) == S.getAssumed())
8264 auto AssumedState = S.getAssumed();
8270 bool IsKnownNoCapture;
8271 const AANoCapture *ArgNoCaptureAA =
nullptr;
8276 if (!IsAssumedNoCapture &&
8278 S.intersectAssumedBits(FnMemAssumedState);
8284 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
8286 LLVM_DEBUG(
dbgs() <<
"[AAMemoryBehavior] Use: " << *U <<
" in " << *UserI
8294 Follow = followUsersOfUseIn(
A, U, UserI);
8298 analyzeUseIn(
A, U, UserI);
8300 return !isAtFixpoint();
8303 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue()))
8304 return indicatePessimisticFixpoint();
8310bool AAMemoryBehaviorFloating::followUsersOfUseIn(Attributor &
A,
const Use &U,
8311 const Instruction *UserI) {
8329 if (
U.get()->getType()->isPointerTy()) {
8331 bool IsKnownNoCapture;
8340void AAMemoryBehaviorFloating::analyzeUseIn(Attributor &
A,
const Use &U,
8341 const Instruction *UserI) {
8348 case Instruction::Load:
8350 removeAssumedBits(NO_READS);
8353 case Instruction::Store:
8358 removeAssumedBits(NO_WRITES);
8360 indicatePessimisticFixpoint();
8363 case Instruction::Call:
8364 case Instruction::CallBr:
8365 case Instruction::Invoke: {
8372 indicatePessimisticFixpoint();
8379 removeAssumedBits(NO_READS);
8386 if (
U.get()->getType()->isPointerTy())
8390 const auto *MemBehaviorAA =
8396 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8404 removeAssumedBits(NO_READS);
8406 removeAssumedBits(NO_WRITES);
8418 return "all memory";
8421 std::string S =
"memory:";
8427 S +=
"internal global,";
8429 S +=
"external global,";
8433 S +=
"inaccessible,";
8447 AccessKind2Accesses.fill(
nullptr);
8450 ~AAMemoryLocationImpl()
override {
8453 for (AccessSet *AS : AccessKind2Accesses)
8460 intersectAssumedBits(BEST_STATE);
8461 getKnownStateFromValue(
A, getIRPosition(), getState());
8462 AAMemoryLocation::initialize(
A);
8466 static void getKnownStateFromValue(Attributor &
A,
const IRPosition &IRP,
8467 BitIntegerState &State,
8468 bool IgnoreSubsumingPositions =
false) {
8477 bool UseArgMemOnly =
true;
8479 if (AnchorFn &&
A.isRunOn(*AnchorFn))
8483 A.getAttrs(IRP, {Attribute::Memory},
Attrs, IgnoreSubsumingPositions);
8492 State.
addKnownBits(inverseLocation(NO_INACCESSIBLE_MEM,
true,
true));
8497 State.
addKnownBits(inverseLocation(NO_ARGUMENT_MEM,
true,
true));
8501 A.manifestAttrs(IRP,
8502 Attribute::getWithMemoryEffects(
8511 NO_INACCESSIBLE_MEM | NO_ARGUMENT_MEM,
true,
true));
8515 A.manifestAttrs(IRP,
8516 Attribute::getWithMemoryEffects(
8526 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
8527 SmallVectorImpl<Attribute> &Attrs)
const override {
8534 else if (isAssumedInaccessibleMemOnly())
8535 Attrs.push_back(Attribute::getWithMemoryEffects(
8537 else if (isAssumedArgMemOnly())
8540 else if (isAssumedInaccessibleOrArgMemOnly())
8541 Attrs.push_back(Attribute::getWithMemoryEffects(
8551 const IRPosition &IRP = getIRPosition();
8555 if (DeducedAttrs.
size() != 1)
8556 return ChangeStatus::UNCHANGED;
8559 return A.manifestAttrs(IRP, Attribute::getWithMemoryEffects(
8564 bool checkForAllAccessesToMemoryKind(
8566 MemoryLocationsKind)>
8568 MemoryLocationsKind RequestedMLK)
const override {
8569 if (!isValidState())
8572 MemoryLocationsKind AssumedMLK = getAssumedNotAccessedLocation();
8573 if (AssumedMLK == NO_LOCATIONS)
8577 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS;
8578 CurMLK *= 2, ++Idx) {
8579 if (CurMLK & RequestedMLK)
8582 if (
const AccessSet *
Accesses = AccessKind2Accesses[Idx])
8583 for (
const AccessInfo &AI : *
Accesses)
8584 if (!Pred(AI.I, AI.Ptr, AI.Kind, CurMLK))
8597 MemoryLocationsKind KnownMLK = getKnown();
8599 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2)
8600 if (!(CurMLK & KnownMLK))
8601 updateStateAndAccessesMap(getState(), CurMLK,
I,
nullptr,
Changed,
8602 getAccessKindFromInst(
I));
8603 return AAMemoryLocation::indicatePessimisticFixpoint();
8623 bool operator()(
const AccessInfo &
LHS,
const AccessInfo &
RHS)
const {
8627 return LHS.Ptr <
RHS.Ptr;
8628 if (
LHS.Kind !=
RHS.Kind)
8629 return LHS.Kind <
RHS.Kind;
8636 using AccessSet = SmallSet<AccessInfo, 2, AccessInfo>;
8637 std::array<AccessSet *, llvm::ConstantLog2<VALID_STATE>()>
8638 AccessKind2Accesses;
8643 categorizeArgumentPointerLocations(Attributor &
A, CallBase &CB,
8644 AAMemoryLocation::StateType &AccessedLocs,
8649 categorizeAccessedLocations(Attributor &
A, Instruction &
I,
bool &
Changed);
8652 AccessKind getAccessKindFromInst(
const Instruction *
I) {
8655 AK =
I->mayReadFromMemory() ? READ :
NONE;
8664 void updateStateAndAccessesMap(AAMemoryLocation::StateType &State,
8665 MemoryLocationsKind MLK,
const Instruction *
I,
8674 if (MLK == NO_UNKOWN_MEM)
8676 State.removeAssumedBits(MLK);
8681 void categorizePtrValue(Attributor &
A,
const Instruction &
I,
const Value &Ptr,
8682 AAMemoryLocation::StateType &State,
bool &
Changed,
8683 unsigned AccessAS = 0);
8689void AAMemoryLocationImpl::categorizePtrValue(
8690 Attributor &
A,
const Instruction &
I,
const Value &Ptr,
8692 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize pointer locations for "
8697 unsigned ObjectAS =
Obj.getType()->getPointerAddressSpace();
8699 MemoryLocationsKind MLK = NO_LOCATIONS;
8718 MLK = NO_ARGUMENT_MEM;
8724 if (GVar->isConstant())
8727 if (GV->hasLocalLinkage())
8728 MLK = NO_GLOBAL_INTERNAL_MEM;
8730 MLK = NO_GLOBAL_EXTERNAL_MEM;
8738 bool IsKnownNoAlias;
8742 MLK = NO_MALLOCED_MEM;
8744 MLK = NO_UNKOWN_MEM;
8746 MLK = NO_UNKOWN_MEM;
8749 assert(MLK != NO_LOCATIONS &&
"No location specified!");
8750 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Ptr value can be categorized: "
8751 << Obj <<
" -> " << getMemoryLocationsAsStr(MLK) <<
"\n");
8753 getAccessKindFromInst(&
I));
8758 const auto *AA =
A.getAAFor<AAUnderlyingObjects>(
8762 dbgs() <<
"[AAMemoryLocation] Pointer locations not categorized\n");
8763 updateStateAndAccessesMap(
State, NO_UNKOWN_MEM, &
I,
nullptr,
Changed,
8764 getAccessKindFromInst(&
I));
8769 dbgs() <<
"[AAMemoryLocation] Accessed locations with pointer locations: "
8773void AAMemoryLocationImpl::categorizeArgumentPointerLocations(
8776 for (
unsigned ArgNo = 0,
E = CB.
arg_size(); ArgNo <
E; ++ArgNo) {
8785 const auto *ArgOpMemLocationAA =
8788 if (ArgOpMemLocationAA && ArgOpMemLocationAA->isAssumedReadNone())
8793 categorizePtrValue(
A, CB, *ArgOp, AccessedLocs,
Changed);
8798AAMemoryLocationImpl::categorizeAccessedLocations(Attributor &
A, Instruction &
I,
8800 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize accessed locations for "
8804 AccessedLocs.intersectAssumedBits(NO_LOCATIONS);
8809 const auto *CBMemLocationAA =
A.getAAFor<AAMemoryLocation>(
8812 <<
" [" << CBMemLocationAA <<
"]\n");
8813 if (!CBMemLocationAA) {
8814 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr,
8815 Changed, getAccessKindFromInst(&
I));
8816 return NO_UNKOWN_MEM;
8819 if (CBMemLocationAA->isAssumedReadNone())
8820 return NO_LOCATIONS;
8822 if (CBMemLocationAA->isAssumedInaccessibleMemOnly()) {
8823 updateStateAndAccessesMap(AccessedLocs, NO_INACCESSIBLE_MEM, &
I,
nullptr,
8824 Changed, getAccessKindFromInst(&
I));
8825 return AccessedLocs.getAssumed();
8828 uint32_t CBAssumedNotAccessedLocs =
8829 CBMemLocationAA->getAssumedNotAccessedLocation();
8832 uint32_t CBAssumedNotAccessedLocsNoArgMem =
8833 CBAssumedNotAccessedLocs | NO_ARGUMENT_MEM | NO_GLOBAL_MEM;
8835 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2) {
8836 if (CBAssumedNotAccessedLocsNoArgMem & CurMLK)
8838 updateStateAndAccessesMap(AccessedLocs, CurMLK, &
I,
nullptr,
Changed,
8839 getAccessKindFromInst(&
I));
8844 bool HasGlobalAccesses = ((~CBAssumedNotAccessedLocs) & NO_GLOBAL_MEM);
8845 if (HasGlobalAccesses) {
8848 updateStateAndAccessesMap(AccessedLocs, MLK, &
I, Ptr,
Changed,
8849 getAccessKindFromInst(&
I));
8852 if (!CBMemLocationAA->checkForAllAccessesToMemoryKind(
8853 AccessPred, inverseLocation(NO_GLOBAL_MEM,
false,
false)))
8854 return AccessedLocs.getWorstState();
8858 dbgs() <<
"[AAMemoryLocation] Accessed state before argument handling: "
8859 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8862 bool HasArgAccesses = ((~CBAssumedNotAccessedLocs) & NO_ARGUMENT_MEM);
8864 categorizeArgumentPointerLocations(
A, *CB, AccessedLocs,
Changed);
8867 dbgs() <<
"[AAMemoryLocation] Accessed state after argument handling: "
8868 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8870 return AccessedLocs.getAssumed();
8875 dbgs() <<
"[AAMemoryLocation] Categorize memory access with pointer: "
8876 <<
I <<
" [" << *Ptr <<
"]\n");
8877 categorizePtrValue(
A,
I, *Ptr, AccessedLocs,
Changed,
8878 Ptr->getType()->getPointerAddressSpace());
8879 return AccessedLocs.getAssumed();
8882 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Failed to categorize instruction: "
8884 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr,
Changed,
8885 getAccessKindFromInst(&
I));
8886 return AccessedLocs.getAssumed();
8890struct AAMemoryLocationFunction final :
public AAMemoryLocationImpl {
8891 AAMemoryLocationFunction(
const IRPosition &IRP, Attributor &
A)
8892 : AAMemoryLocationImpl(IRP,
A) {}
8897 const auto *MemBehaviorAA =
8898 A.getAAFor<AAMemoryBehavior>(*
this, getIRPosition(), DepClassTy::NONE);
8901 return indicateOptimisticFixpoint();
8903 "AAMemoryLocation was not read-none but AAMemoryBehavior was!");
8904 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
8905 return ChangeStatus::UNCHANGED;
8909 auto AssumedState = getAssumed();
8913 MemoryLocationsKind MLK = categorizeAccessedLocations(
A,
I,
Changed);
8914 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Accessed locations for " <<
I
8915 <<
": " << getMemoryLocationsAsStr(MLK) <<
"\n");
8916 removeAssumedBits(inverseLocation(MLK,
false,
false));
8919 return getAssumedNotAccessedLocation() != VALID_STATE;
8922 bool UsedAssumedInformation =
false;
8923 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8924 UsedAssumedInformation))
8925 return indicatePessimisticFixpoint();
8927 Changed |= AssumedState != getAssumed();
8928 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8932 void trackStatistics()
const override {
8935 else if (isAssumedArgMemOnly())
8937 else if (isAssumedInaccessibleMemOnly())
8939 else if (isAssumedInaccessibleOrArgMemOnly())
8945struct AAMemoryLocationCallSite final : AAMemoryLocationImpl {
8946 AAMemoryLocationCallSite(
const IRPosition &IRP, Attributor &
A)
8947 : AAMemoryLocationImpl(IRP,
A) {}
8958 A.getAAFor<AAMemoryLocation>(*
this, FnPos, DepClassTy::REQUIRED);
8960 return indicatePessimisticFixpoint();
8964 updateStateAndAccessesMap(getState(), MLK,
I, Ptr,
Changed,
8965 getAccessKindFromInst(
I));
8968 if (!FnAA->checkForAllAccessesToMemoryKind(AccessPred, ALL_LOCATIONS))
8969 return indicatePessimisticFixpoint();
8970 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8974 void trackStatistics()
const override {
8984struct AADenormalFPMathImpl :
public AADenormalFPMath {
8985 AADenormalFPMathImpl(
const IRPosition &IRP, Attributor &
A)
8986 : AADenormalFPMath(IRP,
A) {}
8988 const std::string getAsStr(Attributor *
A)
const override {
8989 std::string Str(
"AADenormalFPMath[");
8990 raw_string_ostream OS(Str);
8992 DenormalState Known = getKnown();
8993 if (Known.Mode.isValid())
8994 OS <<
"denormal-fp-math=" << Known.Mode;
8998 if (Known.ModeF32.isValid())
8999 OS <<
" denormal-fp-math-f32=" << Known.ModeF32;
9005struct AADenormalFPMathFunction final : AADenormalFPMathImpl {
9006 AADenormalFPMathFunction(
const IRPosition &IRP, Attributor &
A)
9007 : AADenormalFPMathImpl(IRP,
A) {}
9011 DenormalFPEnv DenormEnv =
F->getDenormalFPEnv();
9021 auto CheckCallSite = [=, &Change, &
A](AbstractCallSite CS) {
9024 <<
"->" << getAssociatedFunction()->
getName() <<
'\n');
9026 const auto *CallerInfo =
A.getAAFor<AADenormalFPMath>(
9032 CallerInfo->getState());
9036 bool AllCallSitesKnown =
true;
9037 if (!
A.checkForAllCallSites(CheckCallSite, *
this,
true, AllCallSitesKnown))
9038 return indicatePessimisticFixpoint();
9040 if (Change == ChangeStatus::CHANGED && isModeFixed())
9046 LLVMContext &Ctx = getAssociatedFunction()->getContext();
9052 DenormalFPEnv KnownEnv(Known.Mode, Known.ModeF32);
9055 AttrToRemove.
push_back(Attribute::DenormalFPEnv);
9058 Ctx, Attribute::DenormalFPEnv,
9059 DenormalFPEnv(Known.Mode, Known.ModeF32).toIntValue()));
9062 auto &IRP = getIRPosition();
9065 return A.removeAttrs(IRP, AttrToRemove) |
9066 A.manifestAttrs(IRP, AttrToAdd,
true);
9069 void trackStatistics()
const override {
9078struct AAValueConstantRangeImpl : AAValueConstantRange {
9079 using StateType = IntegerRangeState;
9080 AAValueConstantRangeImpl(
const IRPosition &IRP, Attributor &
A)
9081 : AAValueConstantRange(IRP,
A) {}
9085 if (
A.hasSimplificationCallback(getIRPosition())) {
9086 indicatePessimisticFixpoint();
9091 intersectKnown(getConstantRangeFromSCEV(
A, getCtxI()));
9094 intersectKnown(getConstantRangeFromLVI(
A, getCtxI()));
9098 const std::string getAsStr(Attributor *
A)
const override {
9100 llvm::raw_string_ostream OS(Str);
9102 getKnown().print(OS);
9104 getAssumed().print(OS);
9111 const SCEV *getSCEV(Attributor &
A,
const Instruction *
I =
nullptr)
const {
9112 if (!getAnchorScope())
9115 ScalarEvolution *SE =
9116 A.getInfoCache().getAnalysisResultForFunction<ScalarEvolutionAnalysis>(
9119 LoopInfo *LI =
A.getInfoCache().getAnalysisResultForFunction<LoopAnalysis>(
9125 const SCEV *S = SE->
getSCEV(&getAssociatedValue());
9134 ConstantRange getConstantRangeFromSCEV(Attributor &
A,
9135 const Instruction *
I =
nullptr)
const {
9136 if (!getAnchorScope())
9139 ScalarEvolution *SE =
9140 A.getInfoCache().getAnalysisResultForFunction<ScalarEvolutionAnalysis>(
9143 const SCEV *S = getSCEV(
A,
I);
9153 getConstantRangeFromLVI(Attributor &
A,
9154 const Instruction *CtxI =
nullptr)
const {
9155 if (!getAnchorScope())
9158 LazyValueInfo *LVI =
9159 A.getInfoCache().getAnalysisResultForFunction<LazyValueAnalysis>(
9174 bool isValidCtxInstructionForOutsideAnalysis(Attributor &
A,
9175 const Instruction *CtxI,
9176 bool AllowAACtxI)
const {
9177 if (!CtxI || (!AllowAACtxI && CtxI == getCtxI()))
9189 InformationCache &InfoCache =
A.getInfoCache();
9190 const DominatorTree *DT =
9201 getKnownConstantRange(Attributor &
A,
9202 const Instruction *CtxI =
nullptr)
const override {
9203 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9207 ConstantRange LVIR = getConstantRangeFromLVI(
A, CtxI);
9208 ConstantRange SCEVR = getConstantRangeFromSCEV(
A, CtxI);
9209 return getKnown().intersectWith(SCEVR).intersectWith(LVIR);
9214 getAssumedConstantRange(Attributor &
A,
9215 const Instruction *CtxI =
nullptr)
const override {
9220 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9222 return getAssumed();
9224 ConstantRange LVIR = getConstantRangeFromLVI(
A, CtxI);
9225 ConstantRange SCEVR = getConstantRangeFromSCEV(
A, CtxI);
9226 return getAssumed().intersectWith(SCEVR).intersectWith(LVIR);
9231 getMDNodeForConstantRange(
Type *Ty, LLVMContext &Ctx,
9232 const ConstantRange &AssumedConstantRange) {
9234 Ty, AssumedConstantRange.
getLower())),
9236 Ty, AssumedConstantRange.
getUpper()))};
9241 static bool isBetterRange(
const ConstantRange &Assumed,
9242 const Instruction &
I) {
9246 std::optional<ConstantRange> Known;
9250 }
else if (MDNode *KnownRanges =
I.getMetadata(LLVMContext::MD_range)) {
9256 if (KnownRanges->getNumOperands() > 2)
9259 ConstantInt *
Lower =
9261 ConstantInt *
Upper =
9264 Known.emplace(
Lower->getValue(),
Upper->getValue());
9266 return !Known || (*Known != Assumed && Known->contains(Assumed));
9271 setRangeMetadataIfisBetterRange(Instruction *
I,
9272 const ConstantRange &AssumedConstantRange) {
9273 if (isBetterRange(AssumedConstantRange, *
I)) {
9274 I->setMetadata(LLVMContext::MD_range,
9275 getMDNodeForConstantRange(
I->getType(),
I->getContext(),
9276 AssumedConstantRange));
9283 setRangeRetAttrIfisBetterRange(Attributor &
A,
const IRPosition &IRP,
9285 const ConstantRange &AssumedConstantRange) {
9286 if (isBetterRange(AssumedConstantRange, *
I)) {
9287 A.manifestAttrs(IRP,
9288 Attribute::get(
I->getContext(), Attribute::Range,
9289 AssumedConstantRange),
9299 ConstantRange AssumedConstantRange = getAssumedConstantRange(
A);
9302 auto &
V = getAssociatedValue();
9306 assert(
I == getCtxI() &&
"Should not annotate an instruction which is "
9307 "not the context instruction");
9309 if (setRangeMetadataIfisBetterRange(
I, AssumedConstantRange))
9310 Changed = ChangeStatus::CHANGED;
9312 if (setRangeRetAttrIfisBetterRange(
A, getIRPosition(),
I,
9313 AssumedConstantRange))
9314 Changed = ChangeStatus::CHANGED;
9322struct AAValueConstantRangeArgument final
9323 : AAArgumentFromCallSiteArguments<
9324 AAValueConstantRange, AAValueConstantRangeImpl, IntegerRangeState,
9326 using Base = AAArgumentFromCallSiteArguments<
9327 AAValueConstantRange, AAValueConstantRangeImpl, IntegerRangeState,
9329 AAValueConstantRangeArgument(
const IRPosition &IRP, Attributor &
A)
9333 void trackStatistics()
const override {
9338struct AAValueConstantRangeReturned
9339 : AAReturnedFromReturnedValues<AAValueConstantRange,
9340 AAValueConstantRangeImpl,
9341 AAValueConstantRangeImpl::StateType,
9344 AAReturnedFromReturnedValues<AAValueConstantRange,
9345 AAValueConstantRangeImpl,
9346 AAValueConstantRangeImpl::StateType,
9348 AAValueConstantRangeReturned(
const IRPosition &IRP, Attributor &
A)
9353 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9354 indicatePessimisticFixpoint();
9358 void trackStatistics()
const override {
9363struct AAValueConstantRangeFloating : AAValueConstantRangeImpl {
9364 AAValueConstantRangeFloating(
const IRPosition &IRP, Attributor &
A)
9365 : AAValueConstantRangeImpl(IRP,
A) {}
9369 AAValueConstantRangeImpl::initialize(
A);
9373 Value &
V = getAssociatedValue();
9376 unionAssumed(ConstantRange(
C->getValue()));
9377 indicateOptimisticFixpoint();
9383 unionAssumed(ConstantRange(APInt(
getBitWidth(), 0)));
9384 indicateOptimisticFixpoint();
9396 if (
auto *RangeMD = LI->getMetadata(LLVMContext::MD_range)) {
9407 indicatePessimisticFixpoint();
9410 << getAssociatedValue() <<
"\n");
9413 bool calculateBinaryOperator(
9414 Attributor &
A, BinaryOperator *BinOp, IntegerRangeState &
T,
9415 const Instruction *CtxI,
9416 SmallVectorImpl<const AAValueConstantRange *> &QuerriedAAs) {
9421 bool UsedAssumedInformation =
false;
9422 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9425 if (!SimplifiedLHS.has_value())
9427 if (!*SimplifiedLHS)
9429 LHS = *SimplifiedLHS;
9431 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9434 if (!SimplifiedRHS.has_value())
9436 if (!*SimplifiedRHS)
9438 RHS = *SimplifiedRHS;
9444 auto *LHSAA =
A.getAAFor<AAValueConstantRange>(
9446 DepClassTy::REQUIRED);
9450 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9452 auto *RHSAA =
A.getAAFor<AAValueConstantRange>(
9454 DepClassTy::REQUIRED);
9458 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9460 auto AssumedRange = LHSAARange.binaryOp(BinOp->
getOpcode(), RHSAARange);
9462 T.unionAssumed(AssumedRange);
9466 return T.isValidState();
9469 bool calculateCastInst(
9470 Attributor &
A, CastInst *CastI, IntegerRangeState &
T,
9471 const Instruction *CtxI,
9472 SmallVectorImpl<const AAValueConstantRange *> &QuerriedAAs) {
9478 bool UsedAssumedInformation =
false;
9479 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9482 if (!SimplifiedOpV.has_value())
9484 if (!*SimplifiedOpV)
9486 OpV = *SimplifiedOpV;
9491 auto *OpAA =
A.getAAFor<AAValueConstantRange>(
9493 DepClassTy::REQUIRED);
9497 T.unionAssumed(OpAA->getAssumed().castOp(CastI->
getOpcode(),
9499 return T.isValidState();
9503 calculateCmpInst(Attributor &
A, CmpInst *CmpI, IntegerRangeState &
T,
9504 const Instruction *CtxI,
9505 SmallVectorImpl<const AAValueConstantRange *> &QuerriedAAs) {
9510 bool UsedAssumedInformation =
false;
9511 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9514 if (!SimplifiedLHS.has_value())
9516 if (!*SimplifiedLHS)
9518 LHS = *SimplifiedLHS;
9520 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9523 if (!SimplifiedRHS.has_value())
9525 if (!*SimplifiedRHS)
9527 RHS = *SimplifiedRHS;
9533 auto *LHSAA =
A.getAAFor<AAValueConstantRange>(
9535 DepClassTy::REQUIRED);
9539 auto *RHSAA =
A.getAAFor<AAValueConstantRange>(
9541 DepClassTy::REQUIRED);
9545 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9546 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9549 if (LHSAARange.isEmptySet() || RHSAARange.isEmptySet())
9552 bool MustTrue =
false, MustFalse =
false;
9554 auto AllowedRegion =
9557 if (AllowedRegion.intersectWith(LHSAARange).isEmptySet())
9563 assert((!MustTrue || !MustFalse) &&
9564 "Either MustTrue or MustFalse should be false!");
9567 T.unionAssumed(ConstantRange(APInt( 1, 1)));
9569 T.unionAssumed(ConstantRange(APInt( 1, 0)));
9571 T.unionAssumed(ConstantRange( 1,
true));
9573 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] " << *CmpI <<
" after "
9574 << (MustTrue ?
"true" : (MustFalse ?
"false" :
"unknown"))
9575 <<
": " <<
T <<
"\n\t" << *LHSAA <<
"\t<op>\n\t"
9579 return T.isValidState();
9591 bool UsedAssumedInformation =
false;
9592 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9595 if (!SimplifiedOpV.has_value())
9597 if (!*SimplifiedOpV)
9599 Value *VPtr = *SimplifiedOpV;
9602 const auto *AA =
A.getAAFor<AAValueConstantRange>(
9604 DepClassTy::REQUIRED);
9608 T.unionAssumed(AA->getAssumedConstantRange(
A, CtxI));
9612 return T.isValidState();
9617 if (!calculateBinaryOperator(
A, BinOp,
T, CtxI, QuerriedAAs))
9620 if (!calculateCmpInst(
A, CmpI,
T, CtxI, QuerriedAAs))
9623 if (!calculateCastInst(
A, CastI,
T, CtxI, QuerriedAAs))
9629 T.indicatePessimisticFixpoint();
9636 for (
const AAValueConstantRange *QueriedAA : QuerriedAAs) {
9637 if (QueriedAA !=
this)
9640 if (
T.getAssumed() == getState().getAssumed())
9642 T.indicatePessimisticFixpoint();
9645 return T.isValidState();
9648 if (!VisitValueCB(getAssociatedValue(), getCtxI()))
9649 return indicatePessimisticFixpoint();
9654 return ChangeStatus::UNCHANGED;
9655 if (++NumChanges > MaxNumChanges) {
9656 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] performed " << NumChanges
9657 <<
" but only " << MaxNumChanges
9658 <<
" are allowed to avoid cyclic reasoning.");
9659 return indicatePessimisticFixpoint();
9661 return ChangeStatus::CHANGED;
9665 void trackStatistics()
const override {
9674 static constexpr int MaxNumChanges = 5;
9677struct AAValueConstantRangeFunction : AAValueConstantRangeImpl {
9678 AAValueConstantRangeFunction(
const IRPosition &IRP, Attributor &
A)
9679 : AAValueConstantRangeImpl(IRP,
A) {}
9683 llvm_unreachable(
"AAValueConstantRange(Function|CallSite)::updateImpl will "
9691struct AAValueConstantRangeCallSite : AAValueConstantRangeFunction {
9692 AAValueConstantRangeCallSite(
const IRPosition &IRP, Attributor &
A)
9693 : AAValueConstantRangeFunction(IRP,
A) {}
9699struct AAValueConstantRangeCallSiteReturned
9700 : AACalleeToCallSite<AAValueConstantRange, AAValueConstantRangeImpl,
9701 AAValueConstantRangeImpl::StateType,
9703 AAValueConstantRangeCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
9704 : AACalleeToCallSite<AAValueConstantRange, AAValueConstantRangeImpl,
9705 AAValueConstantRangeImpl::StateType,
9712 if (std::optional<ConstantRange>
Range = CI->getRange())
9713 intersectKnown(*
Range);
9716 AAValueConstantRangeImpl::initialize(
A);
9720 void trackStatistics()
const override {
9724struct AAValueConstantRangeCallSiteArgument : AAValueConstantRangeFloating {
9725 AAValueConstantRangeCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
9726 : AAValueConstantRangeFloating(IRP,
A) {}
9730 return ChangeStatus::UNCHANGED;
9734 void trackStatistics()
const override {
9743struct AAPotentialConstantValuesImpl : AAPotentialConstantValues {
9746 AAPotentialConstantValuesImpl(
const IRPosition &IRP, Attributor &
A)
9747 : AAPotentialConstantValues(IRP,
A) {}
9751 if (
A.hasSimplificationCallback(getIRPosition()))
9752 indicatePessimisticFixpoint();
9754 AAPotentialConstantValues::initialize(
A);
9757 bool fillSetWithConstantValues(Attributor &
A,
const IRPosition &IRP, SetTy &S,
9758 bool &ContainsUndef,
bool ForSelf) {
9760 bool UsedAssumedInformation =
false;
9762 UsedAssumedInformation)) {
9769 auto *PotentialValuesAA =
A.getAAFor<AAPotentialConstantValues>(
9770 *
this, IRP, DepClassTy::REQUIRED);
9771 if (!PotentialValuesAA || !PotentialValuesAA->getState().isValidState())
9773 ContainsUndef = PotentialValuesAA->getState().undefIsContained();
9774 S = PotentialValuesAA->getState().getAssumedSet();
9781 ContainsUndef =
false;
9782 for (
auto &It : Values) {
9784 ContainsUndef =
true;
9790 S.insert(CI->getValue());
9792 ContainsUndef &= S.empty();
9798 const std::string getAsStr(Attributor *
A)
const override {
9800 llvm::raw_string_ostream OS(Str);
9807 return indicatePessimisticFixpoint();
9811struct AAPotentialConstantValuesArgument final
9812 : AAArgumentFromCallSiteArguments<AAPotentialConstantValues,
9813 AAPotentialConstantValuesImpl,
9814 PotentialConstantIntValuesState> {
9815 using Base = AAArgumentFromCallSiteArguments<AAPotentialConstantValues,
9816 AAPotentialConstantValuesImpl,
9818 AAPotentialConstantValuesArgument(
const IRPosition &IRP, Attributor &
A)
9822 void trackStatistics()
const override {
9827struct AAPotentialConstantValuesReturned
9828 : AAReturnedFromReturnedValues<AAPotentialConstantValues,
9829 AAPotentialConstantValuesImpl> {
9830 using Base = AAReturnedFromReturnedValues<AAPotentialConstantValues,
9831 AAPotentialConstantValuesImpl>;
9832 AAPotentialConstantValuesReturned(
const IRPosition &IRP, Attributor &
A)
9836 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9837 indicatePessimisticFixpoint();
9838 Base::initialize(
A);
9842 void trackStatistics()
const override {
9847struct AAPotentialConstantValuesFloating : AAPotentialConstantValuesImpl {
9848 AAPotentialConstantValuesFloating(
const IRPosition &IRP, Attributor &
A)
9849 : AAPotentialConstantValuesImpl(IRP,
A) {}
9853 AAPotentialConstantValuesImpl::initialize(
A);
9857 Value &
V = getAssociatedValue();
9860 unionAssumed(
C->getValue());
9861 indicateOptimisticFixpoint();
9866 unionAssumedWithUndef();
9867 indicateOptimisticFixpoint();
9877 indicatePessimisticFixpoint();
9880 << getAssociatedValue() <<
"\n");
9883 static bool calculateICmpInst(
const ICmpInst *ICI,
const APInt &
LHS,
9888 static APInt calculateCastInst(
const CastInst *CI,
const APInt &Src,
9889 uint32_t ResultBitWidth) {
9894 case Instruction::Trunc:
9895 return Src.trunc(ResultBitWidth);
9896 case Instruction::SExt:
9897 return Src.sext(ResultBitWidth);
9898 case Instruction::ZExt:
9899 return Src.zext(ResultBitWidth);
9900 case Instruction::BitCast:
9905 static APInt calculateBinaryOperator(
const BinaryOperator *BinOp,
9906 const APInt &
LHS,
const APInt &
RHS,
9907 bool &SkipOperation,
bool &Unsupported) {
9914 switch (BinOpcode) {
9918 case Instruction::Add:
9920 case Instruction::Sub:
9922 case Instruction::Mul:
9924 case Instruction::UDiv:
9926 SkipOperation =
true;
9930 case Instruction::SDiv:
9932 SkipOperation =
true;
9936 case Instruction::URem:
9938 SkipOperation =
true;
9942 case Instruction::SRem:
9944 SkipOperation =
true;
9948 case Instruction::Shl:
9950 case Instruction::LShr:
9952 case Instruction::AShr:
9954 case Instruction::And:
9956 case Instruction::Or:
9958 case Instruction::Xor:
9963 bool calculateBinaryOperatorAndTakeUnion(
const BinaryOperator *BinOp,
9964 const APInt &
LHS,
const APInt &
RHS) {
9965 bool SkipOperation =
false;
9968 calculateBinaryOperator(BinOp,
LHS,
RHS, SkipOperation, Unsupported);
9973 unionAssumed(Result);
9974 return isValidState();
9977 ChangeStatus updateWithICmpInst(Attributor &
A, ICmpInst *ICI) {
9978 auto AssumedBefore = getAssumed();
9982 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9983 SetTy LHSAAPVS, RHSAAPVS;
9985 LHSContainsUndef,
false) ||
9987 RHSContainsUndef,
false))
9988 return indicatePessimisticFixpoint();
9991 bool MaybeTrue =
false, MaybeFalse =
false;
9993 if (LHSContainsUndef && RHSContainsUndef) {
9996 unionAssumedWithUndef();
9997 }
else if (LHSContainsUndef) {
9998 for (
const APInt &R : RHSAAPVS) {
9999 bool CmpResult = calculateICmpInst(ICI, Zero, R);
10000 MaybeTrue |= CmpResult;
10001 MaybeFalse |= !CmpResult;
10002 if (MaybeTrue & MaybeFalse)
10003 return indicatePessimisticFixpoint();
10005 }
else if (RHSContainsUndef) {
10006 for (
const APInt &L : LHSAAPVS) {
10007 bool CmpResult = calculateICmpInst(ICI, L, Zero);
10008 MaybeTrue |= CmpResult;
10009 MaybeFalse |= !CmpResult;
10010 if (MaybeTrue & MaybeFalse)
10011 return indicatePessimisticFixpoint();
10014 for (
const APInt &L : LHSAAPVS) {
10015 for (
const APInt &R : RHSAAPVS) {
10016 bool CmpResult = calculateICmpInst(ICI, L, R);
10017 MaybeTrue |= CmpResult;
10018 MaybeFalse |= !CmpResult;
10019 if (MaybeTrue & MaybeFalse)
10020 return indicatePessimisticFixpoint();
10025 unionAssumed(APInt( 1, 1));
10027 unionAssumed(APInt( 1, 0));
10028 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10029 : ChangeStatus::CHANGED;
10032 ChangeStatus updateWithSelectInst(Attributor &
A, SelectInst *SI) {
10033 auto AssumedBefore = getAssumed();
10037 bool UsedAssumedInformation =
false;
10038 std::optional<Constant *>
C =
A.getAssumedConstant(
10039 *
SI->getCondition(), *
this, UsedAssumedInformation);
10042 bool OnlyLeft =
false, OnlyRight =
false;
10043 if (
C && *
C && (*C)->isOneValue())
10045 else if (
C && *
C && (*C)->isNullValue())
10048 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
10049 SetTy LHSAAPVS, RHSAAPVS;
10052 LHSContainsUndef,
false))
10053 return indicatePessimisticFixpoint();
10057 RHSContainsUndef,
false))
10058 return indicatePessimisticFixpoint();
10060 if (OnlyLeft || OnlyRight) {
10062 auto *OpAA = OnlyLeft ? &LHSAAPVS : &RHSAAPVS;
10063 auto Undef = OnlyLeft ? LHSContainsUndef : RHSContainsUndef;
10066 unionAssumedWithUndef();
10068 for (
const auto &It : *OpAA)
10072 }
else if (LHSContainsUndef && RHSContainsUndef) {
10074 unionAssumedWithUndef();
10076 for (
const auto &It : LHSAAPVS)
10078 for (
const auto &It : RHSAAPVS)
10081 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10082 : ChangeStatus::CHANGED;
10085 ChangeStatus updateWithCastInst(Attributor &
A, CastInst *CI) {
10086 auto AssumedBefore = getAssumed();
10088 return indicatePessimisticFixpoint();
10093 bool SrcContainsUndef =
false;
10096 SrcContainsUndef,
false))
10097 return indicatePessimisticFixpoint();
10099 if (SrcContainsUndef)
10100 unionAssumedWithUndef();
10102 for (
const APInt &S : SrcPVS) {
10103 APInt
T = calculateCastInst(CI, S, ResultBitWidth);
10107 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10108 : ChangeStatus::CHANGED;
10111 ChangeStatus updateWithBinaryOperator(Attributor &
A, BinaryOperator *BinOp) {
10112 auto AssumedBefore = getAssumed();
10116 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
10117 SetTy LHSAAPVS, RHSAAPVS;
10119 LHSContainsUndef,
false) ||
10121 RHSContainsUndef,
false))
10122 return indicatePessimisticFixpoint();
10127 if (LHSContainsUndef && RHSContainsUndef) {
10128 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, Zero))
10129 return indicatePessimisticFixpoint();
10130 }
else if (LHSContainsUndef) {
10131 for (
const APInt &R : RHSAAPVS) {
10132 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, R))
10133 return indicatePessimisticFixpoint();
10135 }
else if (RHSContainsUndef) {
10136 for (
const APInt &L : LHSAAPVS) {
10137 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, Zero))
10138 return indicatePessimisticFixpoint();
10141 for (
const APInt &L : LHSAAPVS) {
10142 for (
const APInt &R : RHSAAPVS) {
10143 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, R))
10144 return indicatePessimisticFixpoint();
10148 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10149 : ChangeStatus::CHANGED;
10152 ChangeStatus updateWithInstruction(Attributor &
A, Instruction *Inst) {
10153 auto AssumedBefore = getAssumed();
10155 bool ContainsUndef;
10157 ContainsUndef,
true))
10158 return indicatePessimisticFixpoint();
10159 if (ContainsUndef) {
10160 unionAssumedWithUndef();
10162 for (
const auto &It : Incoming)
10165 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10166 : ChangeStatus::CHANGED;
10171 Value &
V = getAssociatedValue();
10175 return updateWithICmpInst(
A, ICI);
10178 return updateWithSelectInst(
A, SI);
10181 return updateWithCastInst(
A, CI);
10184 return updateWithBinaryOperator(
A, BinOp);
10187 return updateWithInstruction(
A,
I);
10189 return indicatePessimisticFixpoint();
10193 void trackStatistics()
const override {
10198struct AAPotentialConstantValuesFunction : AAPotentialConstantValuesImpl {
10199 AAPotentialConstantValuesFunction(
const IRPosition &IRP, Attributor &
A)
10200 : AAPotentialConstantValuesImpl(IRP,
A) {}
10205 "AAPotentialConstantValues(Function|CallSite)::updateImpl will "
10210 void trackStatistics()
const override {
10215struct AAPotentialConstantValuesCallSite : AAPotentialConstantValuesFunction {
10216 AAPotentialConstantValuesCallSite(
const IRPosition &IRP, Attributor &
A)
10217 : AAPotentialConstantValuesFunction(IRP,
A) {}
10220 void trackStatistics()
const override {
10225struct AAPotentialConstantValuesCallSiteReturned
10226 : AACalleeToCallSite<AAPotentialConstantValues,
10227 AAPotentialConstantValuesImpl> {
10228 AAPotentialConstantValuesCallSiteReturned(
const IRPosition &IRP,
10230 : AACalleeToCallSite<AAPotentialConstantValues,
10231 AAPotentialConstantValuesImpl>(IRP,
A) {}
10234 void trackStatistics()
const override {
10239struct AAPotentialConstantValuesCallSiteArgument
10240 : AAPotentialConstantValuesFloating {
10241 AAPotentialConstantValuesCallSiteArgument(
const IRPosition &IRP,
10243 : AAPotentialConstantValuesFloating(IRP,
A) {}
10247 AAPotentialConstantValuesImpl::initialize(
A);
10248 if (isAtFixpoint())
10251 Value &
V = getAssociatedValue();
10254 unionAssumed(
C->getValue());
10255 indicateOptimisticFixpoint();
10260 unionAssumedWithUndef();
10261 indicateOptimisticFixpoint();
10268 Value &
V = getAssociatedValue();
10269 auto AssumedBefore = getAssumed();
10270 auto *AA =
A.getAAFor<AAPotentialConstantValues>(
10273 return indicatePessimisticFixpoint();
10274 const auto &S = AA->getAssumed();
10276 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10277 : ChangeStatus::CHANGED;
10281 void trackStatistics()
const override {
10290 bool IgnoreSubsumingPositions) {
10291 assert(ImpliedAttributeKind == Attribute::NoUndef &&
10292 "Unexpected attribute kind");
10293 if (
A.hasAttr(IRP, {Attribute::NoUndef}, IgnoreSubsumingPositions,
10294 Attribute::NoUndef))
10314 Value &V = getAssociatedValue();
10316 indicatePessimisticFixpoint();
10317 assert(!isImpliedByIR(
A, getIRPosition(), Attribute::NoUndef));
10321 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
10322 AANoUndef::StateType &State) {
10323 const Value *UseV =
U->get();
10324 const DominatorTree *DT =
nullptr;
10325 AssumptionCache *AC =
nullptr;
10326 InformationCache &InfoCache =
A.getInfoCache();
10327 if (Function *
F = getAnchorScope()) {
10332 bool TrackUse =
false;
10341 const std::string getAsStr(Attributor *
A)
const override {
10342 return getAssumed() ?
"noundef" :
"may-undef-or-poison";
10349 bool UsedAssumedInformation =
false;
10350 if (
A.isAssumedDead(getIRPosition(),
nullptr,
nullptr,
10351 UsedAssumedInformation))
10352 return ChangeStatus::UNCHANGED;
10356 if (!
A.getAssumedSimplified(getIRPosition(), *
this, UsedAssumedInformation,
10359 return ChangeStatus::UNCHANGED;
10360 return AANoUndef::manifest(
A);
10364struct AANoUndefFloating :
public AANoUndefImpl {
10365 AANoUndefFloating(
const IRPosition &IRP, Attributor &
A)
10366 : AANoUndefImpl(IRP,
A) {}
10370 AANoUndefImpl::initialize(
A);
10371 if (!getState().isAtFixpoint() && getAnchorScope() &&
10372 !getAnchorScope()->isDeclaration())
10373 if (Instruction *CtxI = getCtxI())
10374 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10379 auto VisitValueCB = [&](
const IRPosition &IRP) ->
bool {
10380 bool IsKnownNoUndef;
10382 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoUndef);
10386 bool UsedAssumedInformation =
false;
10387 Value *AssociatedValue = &getAssociatedValue();
10389 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10394 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
10402 if (AVIRP == getIRPosition() || !VisitValueCB(AVIRP))
10403 return indicatePessimisticFixpoint();
10404 return ChangeStatus::UNCHANGED;
10407 for (
const auto &VAC : Values)
10409 return indicatePessimisticFixpoint();
10411 return ChangeStatus::UNCHANGED;
10418struct AANoUndefReturned final
10419 : AAReturnedFromReturnedValues<AANoUndef, AANoUndefImpl> {
10420 AANoUndefReturned(
const IRPosition &IRP, Attributor &
A)
10421 : AAReturnedFromReturnedValues<AANoUndef, AANoUndefImpl>(IRP,
A) {}
10427struct AANoUndefArgument final
10428 : AAArgumentFromCallSiteArguments<AANoUndef, AANoUndefImpl> {
10429 AANoUndefArgument(
const IRPosition &IRP, Attributor &
A)
10430 : AAArgumentFromCallSiteArguments<AANoUndef, AANoUndefImpl>(IRP,
A) {}
10436struct AANoUndefCallSiteArgument final : AANoUndefFloating {
10437 AANoUndefCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
10438 : AANoUndefFloating(IRP,
A) {}
10444struct AANoUndefCallSiteReturned final
10445 : AACalleeToCallSite<AANoUndef, AANoUndefImpl> {
10446 AANoUndefCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
10447 : AACalleeToCallSite<AANoUndef, AANoUndefImpl>(IRP,
A) {}
10455struct AANoFPClassImpl : AANoFPClass {
10456 AANoFPClassImpl(
const IRPosition &IRP, Attributor &
A) : AANoFPClass(IRP,
A) {}
10459 const IRPosition &IRP = getIRPosition();
10463 indicateOptimisticFixpoint();
10468 A.getAttrs(getIRPosition(), {Attribute::NoFPClass},
Attrs,
false);
10469 for (
const auto &Attr : Attrs) {
10476 const DataLayout &
DL =
A.getDataLayout();
10477 InformationCache &InfoCache =
A.getInfoCache();
10479 const DominatorTree *DT =
nullptr;
10480 AssumptionCache *AC =
nullptr;
10481 const TargetLibraryInfo *TLI =
nullptr;
10485 if (!
F->isDeclaration()) {
10492 SimplifyQuery Q(
DL, TLI, DT, AC, CtxI);
10499 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10503 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
10504 AANoFPClass::StateType &State) {
10515 if (
auto *NoFPAA =
A.getAAFor<AANoFPClass>(*
this, IRP, DepClassTy::NONE))
10516 State.addKnownBits(NoFPAA->getState().getKnown());
10520 const std::string getAsStr(Attributor *
A)
const override {
10521 std::string
Result =
"nofpclass";
10522 raw_string_ostream OS(Result);
10523 OS << getKnownNoFPClass() <<
'/' << getAssumedNoFPClass();
10527 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
10528 SmallVectorImpl<Attribute> &Attrs)
const override {
10529 Attrs.emplace_back(Attribute::getWithNoFPClass(Ctx, getAssumedNoFPClass()));
10533struct AANoFPClassFloating :
public AANoFPClassImpl {
10534 AANoFPClassFloating(
const IRPosition &IRP, Attributor &
A)
10535 : AANoFPClassImpl(IRP,
A) {}
10540 bool UsedAssumedInformation =
false;
10541 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10543 Values.
push_back({getAssociatedValue(), getCtxI()});
10549 DepClassTy::REQUIRED);
10550 if (!AA ||
this == AA) {
10551 T.indicatePessimisticFixpoint();
10553 const AANoFPClass::StateType &S =
10554 static_cast<const AANoFPClass::StateType &
>(AA->
getState());
10557 return T.isValidState();
10560 for (
const auto &VAC : Values)
10562 return indicatePessimisticFixpoint();
10568 void trackStatistics()
const override {
10573struct AANoFPClassReturned final
10574 : AAReturnedFromReturnedValues<AANoFPClass, AANoFPClassImpl,
10575 AANoFPClassImpl::StateType, false,
10576 Attribute::None, false> {
10577 AANoFPClassReturned(
const IRPosition &IRP, Attributor &
A)
10578 : AAReturnedFromReturnedValues<AANoFPClass, AANoFPClassImpl,
10579 AANoFPClassImpl::StateType,
false,
10583 void trackStatistics()
const override {
10588struct AANoFPClassArgument final
10589 : AAArgumentFromCallSiteArguments<AANoFPClass, AANoFPClassImpl> {
10590 AANoFPClassArgument(
const IRPosition &IRP, Attributor &
A)
10591 : AAArgumentFromCallSiteArguments<AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10597struct AANoFPClassCallSiteArgument final : AANoFPClassFloating {
10598 AANoFPClassCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
10599 : AANoFPClassFloating(IRP,
A) {}
10602 void trackStatistics()
const override {
10607struct AANoFPClassCallSiteReturned final
10608 : AACalleeToCallSite<AANoFPClass, AANoFPClassImpl> {
10609 AANoFPClassCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
10610 : AACalleeToCallSite<AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10613 void trackStatistics()
const override {
10618struct AACallEdgesImpl :
public AACallEdges {
10619 AACallEdgesImpl(
const IRPosition &IRP, Attributor &
A) : AACallEdges(IRP,
A) {}
10621 const SetVector<Function *> &getOptimisticEdges()
const override {
10622 return CalledFunctions;
10625 bool hasUnknownCallee()
const override {
return HasUnknownCallee; }
10627 bool hasNonAsmUnknownCallee()
const override {
10628 return HasUnknownCalleeNonAsm;
10631 const std::string getAsStr(Attributor *
A)
const override {
10632 return "CallEdges[" + std::to_string(HasUnknownCallee) +
"," +
10633 std::to_string(CalledFunctions.size()) +
"]";
10636 void trackStatistics()
const override {}
10639 void addCalledFunction(Function *Fn,
ChangeStatus &Change) {
10640 if (CalledFunctions.insert(Fn)) {
10641 Change = ChangeStatus::CHANGED;
10647 void setHasUnknownCallee(
bool NonAsm,
ChangeStatus &Change) {
10648 if (!HasUnknownCallee)
10649 Change = ChangeStatus::CHANGED;
10650 if (NonAsm && !HasUnknownCalleeNonAsm)
10651 Change = ChangeStatus::CHANGED;
10652 HasUnknownCalleeNonAsm |= NonAsm;
10653 HasUnknownCallee =
true;
10658 SetVector<Function *> CalledFunctions;
10661 bool HasUnknownCallee =
false;
10664 bool HasUnknownCalleeNonAsm =
false;
10667struct AACallEdgesCallSite :
public AACallEdgesImpl {
10668 AACallEdgesCallSite(
const IRPosition &IRP, Attributor &
A)
10669 : AACallEdgesImpl(IRP,
A) {}
10676 addCalledFunction(Fn, Change);
10678 LLVM_DEBUG(
dbgs() <<
"[AACallEdges] Unrecognized value: " << V <<
"\n");
10679 setHasUnknownCallee(
true, Change);
10690 VisitValue(*V, CtxI);
10694 bool UsedAssumedInformation =
false;
10700 for (
auto &VAC : Values)
10707 if (
IA->hasSideEffects() &&
10710 setHasUnknownCallee(
false, Change);
10716 if (
auto *IndirectCallAA =
A.getAAFor<AAIndirectCallInfo>(
10717 *
this, getIRPosition(), DepClassTy::OPTIONAL))
10718 if (IndirectCallAA->foreachCallee(
10719 [&](Function *Fn) { return VisitValue(*Fn, CB); }))
10728 for (
const Use *U : CallbackUses)
10729 ProcessCalledOperand(
U->get(), CB);
10735struct AACallEdgesFunction :
public AACallEdgesImpl {
10736 AACallEdgesFunction(
const IRPosition &IRP, Attributor &
A)
10737 : AACallEdgesImpl(IRP,
A) {}
10746 auto *CBEdges =
A.getAAFor<AACallEdges>(
10750 if (CBEdges->hasNonAsmUnknownCallee())
10751 setHasUnknownCallee(
true, Change);
10752 if (CBEdges->hasUnknownCallee())
10753 setHasUnknownCallee(
false, Change);
10755 for (Function *
F : CBEdges->getOptimisticEdges())
10756 addCalledFunction(
F, Change);
10762 bool UsedAssumedInformation =
false;
10763 if (!
A.checkForAllCallLikeInstructions(ProcessCallInst, *
this,
10764 UsedAssumedInformation,
10768 setHasUnknownCallee(
true, Change);
10777struct AAInterFnReachabilityFunction
10778 :
public CachedReachabilityAA<AAInterFnReachability, Function> {
10779 using Base = CachedReachabilityAA<AAInterFnReachability, Function>;
10780 AAInterFnReachabilityFunction(
const IRPosition &IRP, Attributor &
A)
10783 bool instructionCanReach(
10784 Attributor &
A,
const Instruction &From,
const Function &To,
10787 auto *NonConstThis =
const_cast<AAInterFnReachabilityFunction *
>(
this);
10789 RQITy StackRQI(
A, From, To, ExclusionSet,
false);
10790 RQITy::Reachable
Result;
10791 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
10792 return NonConstThis->isReachableImpl(
A, StackRQI,
10794 return Result == RQITy::Reachable::Yes;
10798 bool IsTemporaryRQI)
override {
10800 &RQI.From->getFunction()->getEntryBlock().front();
10801 if (EntryI != RQI.From &&
10802 !instructionCanReach(
A, *EntryI, *RQI.To,
nullptr))
10803 return rememberResult(
A, RQITy::Reachable::No, RQI,
false,
10806 auto CheckReachableCallBase = [&](CallBase *CB) {
10807 auto *CBEdges =
A.getAAFor<AACallEdges>(
10809 if (!CBEdges || !CBEdges->getState().isValidState())
10812 if (CBEdges->hasUnknownCallee())
10815 for (Function *Fn : CBEdges->getOptimisticEdges()) {
10826 if (Fn == getAnchorScope()) {
10827 if (EntryI == RQI.From)
10832 const AAInterFnReachability *InterFnReachability =
10834 DepClassTy::OPTIONAL);
10837 if (!InterFnReachability ||
10845 const auto *IntraFnReachability =
A.getAAFor<AAIntraFnReachability>(
10847 DepClassTy::OPTIONAL);
10855 return IntraFnReachability && !IntraFnReachability->isAssumedReachable(
10856 A, *RQI.From, CBInst, RQI.ExclusionSet);
10859 bool UsedExclusionSet =
true;
10860 bool UsedAssumedInformation =
false;
10861 if (!
A.checkForAllCallLikeInstructions(CheckCallBase, *
this,
10862 UsedAssumedInformation,
10864 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
10867 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
10871 void trackStatistics()
const override {}
10875template <
typename AAType>
10876static std::optional<Constant *>
10879 if (!Ty.isIntegerTy())
10887 std::optional<Constant *> COpt =
AA->getAssumedConstant(
A);
10889 if (!COpt.has_value()) {
10891 return std::nullopt;
10893 if (
auto *
C = *COpt) {
10904 std::optional<Value *> V;
10905 for (
auto &It : Values) {
10907 if (V.has_value() && !*V)
10910 if (!V.has_value())
10924 if (
A.hasSimplificationCallback(getIRPosition())) {
10925 indicatePessimisticFixpoint();
10928 Value *Stripped = getAssociatedValue().stripPointerCasts();
10930 addValue(
A, getState(), *Stripped, getCtxI(),
AA::AnyScope,
10932 indicateOptimisticFixpoint();
10935 AAPotentialValues::initialize(
A);
10939 const std::string getAsStr(Attributor *
A)
const override {
10941 llvm::raw_string_ostream OS(Str);
10946 template <
typename AAType>
10947 static std::optional<Value *> askOtherAA(Attributor &
A,
10948 const AbstractAttribute &AA,
10949 const IRPosition &IRP,
Type &Ty) {
10954 return std::nullopt;
10961 virtual void addValue(Attributor &
A, StateType &State,
Value &V,
10963 Function *AnchorScope)
const {
10967 for (
const auto &U : CB->
args()) {
10977 Type &Ty = *getAssociatedType();
10978 std::optional<Value *> SimpleV =
10979 askOtherAA<AAValueConstantRange>(
A, *
this, ValIRP, Ty);
10980 if (SimpleV.has_value() && !*SimpleV) {
10981 auto *PotentialConstantsAA =
A.getAAFor<AAPotentialConstantValues>(
10982 *
this, ValIRP, DepClassTy::OPTIONAL);
10983 if (PotentialConstantsAA && PotentialConstantsAA->isValidState()) {
10984 for (
const auto &It : PotentialConstantsAA->getAssumedSet())
10985 State.unionAssumed({{*ConstantInt::get(&Ty, It),
nullptr}, S});
10986 if (PotentialConstantsAA->undefIsContained())
10991 if (!SimpleV.has_value())
11003 State.unionAssumed({{*VPtr, CtxI}, S});
11009 AA::ValueAndContext
I;
11013 return II.I ==
I &&
II.S == S;
11016 return std::tie(
I, S) < std::tie(
II.I,
II.S);
11020 bool recurseForValue(Attributor &
A,
const IRPosition &IRP,
AA::ValueScope S) {
11021 SmallMapVector<AA::ValueAndContext, int, 8> ValueScopeMap;
11026 bool UsedAssumedInformation =
false;
11028 if (!
A.getAssumedSimplifiedValues(IRP,
this, Values, CS,
11029 UsedAssumedInformation))
11032 for (
auto &It : Values)
11033 ValueScopeMap[It] += CS;
11035 for (
auto &It : ValueScopeMap)
11036 addValue(
A, getState(), *It.first.getValue(), It.first.getCtxI(),
11042 void giveUpOnIntraprocedural(Attributor &
A) {
11043 auto NewS = StateType::getBestState(getState());
11044 for (
const auto &It : getAssumedSet()) {
11047 addValue(
A, NewS, *It.first.getValue(), It.first.getCtxI(),
11050 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11058 getState() = StateType::getBestState(getState());
11059 getState().unionAssumed({{getAssociatedValue(), getCtxI()},
AA::AnyScope});
11060 AAPotentialValues::indicateOptimisticFixpoint();
11061 return ChangeStatus::CHANGED;
11066 return indicatePessimisticFixpoint();
11074 if (!getAssumedSimplifiedValues(
A, Values, S))
11076 Value &OldV = getAssociatedValue();
11079 Value *NewV = getSingleValue(
A, *
this, getIRPosition(), Values);
11080 if (!NewV || NewV == &OldV)
11085 if (
A.changeAfterManifest(getIRPosition(), *NewV))
11086 return ChangeStatus::CHANGED;
11088 return ChangeStatus::UNCHANGED;
11091 bool getAssumedSimplifiedValues(
11092 Attributor &
A, SmallVectorImpl<AA::ValueAndContext> &Values,
11093 AA::ValueScope S,
bool RecurseForSelectAndPHI =
false)
const override {
11094 if (!isValidState())
11096 bool UsedAssumedInformation =
false;
11097 for (
const auto &It : getAssumedSet())
11098 if (It.second & S) {
11099 if (RecurseForSelectAndPHI && (
isa<PHINode>(It.first.getValue()) ||
11101 if (
A.getAssumedSimplifiedValues(
11103 this, Values, S, UsedAssumedInformation))
11108 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11113struct AAPotentialValuesFloating : AAPotentialValuesImpl {
11114 AAPotentialValuesFloating(
const IRPosition &IRP, Attributor &
A)
11115 : AAPotentialValuesImpl(IRP,
A) {}
11119 auto AssumedBefore = getAssumed();
11121 genericValueTraversal(
A, &getAssociatedValue());
11123 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11124 : ChangeStatus::CHANGED;
11128 struct LivenessInfo {
11129 const AAIsDead *LivenessAA =
nullptr;
11130 bool AnyDead =
false;
11140 SmallVectorImpl<ItemInfo> &Worklist) {
11143 bool UsedAssumedInformation =
false;
11145 auto GetSimplifiedValues = [&](
Value &
V,
11147 if (!
A.getAssumedSimplifiedValues(
11151 Values.
push_back(AA::ValueAndContext{
V,
II.I.getCtxI()});
11153 return Values.
empty();
11155 if (GetSimplifiedValues(*
LHS, LHSValues))
11157 if (GetSimplifiedValues(*
RHS, RHSValues))
11162 InformationCache &InfoCache =
A.getInfoCache();
11169 F ?
A.getInfoCache().getTargetLibraryInfoForFunction(*
F) :
nullptr;
11174 const DataLayout &
DL =
A.getDataLayout();
11175 SimplifyQuery Q(
DL, TLI, DT, AC, CmpI);
11177 auto CheckPair = [&](
Value &LHSV,
Value &RHSV) {
11180 nullptr,
II.S, getAnchorScope());
11186 if (&LHSV == &RHSV &&
11188 Constant *NewV = ConstantInt::get(Type::getInt1Ty(Ctx),
11190 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11197 if (TypedLHS && TypedRHS) {
11199 if (NewV && NewV != &Cmp) {
11200 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11212 if (!LHSIsNull && !RHSIsNull)
11218 assert((LHSIsNull || RHSIsNull) &&
11219 "Expected nullptr versus non-nullptr comparison at this point");
11222 unsigned PtrIdx = LHSIsNull;
11223 bool IsKnownNonNull;
11226 DepClassTy::REQUIRED, IsKnownNonNull);
11227 if (!IsAssumedNonNull)
11233 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11238 for (
auto &LHSValue : LHSValues)
11239 for (
auto &RHSValue : RHSValues)
11240 if (!CheckPair(*LHSValue.getValue(), *RHSValue.getValue()))
11245 bool handleSelectInst(Attributor &
A, SelectInst &SI, ItemInfo
II,
11246 SmallVectorImpl<ItemInfo> &Worklist) {
11248 bool UsedAssumedInformation =
false;
11250 std::optional<Constant *>
C =
11251 A.getAssumedConstant(*
SI.getCondition(), *
this, UsedAssumedInformation);
11252 bool NoValueYet = !
C.has_value();
11260 }
else if (&SI == &getAssociatedValue()) {
11265 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11267 if (!SimpleV.has_value())
11270 addValue(
A, getState(), **SimpleV, CtxI,
II.S, getAnchorScope());
11278 bool handleLoadInst(Attributor &
A, LoadInst &LI, ItemInfo
II,
11279 SmallVectorImpl<ItemInfo> &Worklist) {
11280 SmallSetVector<Value *, 4> PotentialCopies;
11281 SmallSetVector<Instruction *, 4> PotentialValueOrigins;
11282 bool UsedAssumedInformation =
false;
11284 PotentialValueOrigins, *
this,
11285 UsedAssumedInformation,
11287 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Failed to get potentially "
11288 "loaded values for load instruction "
11296 InformationCache &InfoCache =
A.getInfoCache();
11298 if (!
llvm::all_of(PotentialValueOrigins, [&](Instruction *
I) {
11302 return A.isAssumedDead(
SI->getOperandUse(0),
this,
11304 UsedAssumedInformation,
11306 return A.isAssumedDead(*
I,
this,
nullptr,
11307 UsedAssumedInformation,
11310 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Load is onl used by assumes "
11311 "and we cannot delete all the stores: "
11322 bool AllLocal = ScopeIsLocal;
11327 if (!DynamicallyUnique) {
11328 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Not all potentially loaded "
11329 "values are dynamically unique: "
11334 for (
auto *PotentialCopy : PotentialCopies) {
11336 Worklist.
push_back({{*PotentialCopy, CtxI},
II.S});
11341 if (!AllLocal && ScopeIsLocal)
11346 bool handlePHINode(
11347 Attributor &
A, PHINode &
PHI, ItemInfo
II,
11348 SmallVectorImpl<ItemInfo> &Worklist,
11349 SmallMapVector<const Function *, LivenessInfo, 4> &LivenessAAs) {
11350 auto GetLivenessInfo = [&](
const Function &
F) -> LivenessInfo & {
11351 LivenessInfo &LI = LivenessAAs[&
F];
11352 if (!LI.LivenessAA)
11358 if (&
PHI == &getAssociatedValue()) {
11359 LivenessInfo &LI = GetLivenessInfo(*
PHI.getFunction());
11361 A.getInfoCache().getAnalysisResultForFunction<CycleAnalysis>(
11362 *
PHI.getFunction());
11366 for (
unsigned u = 0, e =
PHI.getNumIncomingValues(); u < e; u++) {
11368 if (LI.LivenessAA &&
11369 LI.LivenessAA->isEdgeDead(IncomingBB,
PHI.getParent())) {
11388 bool UsedAssumedInformation =
false;
11389 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11391 if (!SimpleV.has_value())
11395 addValue(
A, getState(), **SimpleV, &
PHI,
II.S, getAnchorScope());
11402 bool handleGenericInst(Attributor &
A, Instruction &
I, ItemInfo
II,
11403 SmallVectorImpl<ItemInfo> &Worklist) {
11404 bool SomeSimplified =
false;
11405 bool UsedAssumedInformation =
false;
11407 SmallVector<Value *, 8> NewOps(
I.getNumOperands());
11410 const auto &SimplifiedOp =
A.getAssumedSimplified(
11415 if (!SimplifiedOp.has_value())
11419 NewOps[Idx] = *SimplifiedOp;
11423 SomeSimplified |= (NewOps[Idx] !=
Op);
11429 if (!SomeSimplified)
11432 InformationCache &InfoCache =
A.getInfoCache();
11436 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
11439 const DataLayout &
DL =
I.getDataLayout();
11440 SimplifyQuery Q(
DL, TLI, DT, AC, &
I);
11442 if (!NewV || NewV == &
I)
11445 LLVM_DEBUG(
dbgs() <<
"Generic inst " <<
I <<
" assumed simplified to "
11452 Attributor &
A, Instruction &
I, ItemInfo
II,
11453 SmallVectorImpl<ItemInfo> &Worklist,
11454 SmallMapVector<const Function *, LivenessInfo, 4> &LivenessAAs) {
11457 CI->getPredicate(),
II, Worklist);
11459 switch (
I.getOpcode()) {
11460 case Instruction::Select:
11462 case Instruction::PHI:
11464 case Instruction::Load:
11467 return handleGenericInst(
A,
I,
II, Worklist);
11472 void genericValueTraversal(Attributor &
A,
Value *InitialV) {
11473 SmallMapVector<const Function *, LivenessInfo, 4> LivenessAAs;
11475 SmallSet<ItemInfo, 16> Visited;
11494 LLVM_DEBUG(
dbgs() <<
"Generic value traversal reached iteration limit: "
11495 << Iteration <<
"!\n");
11496 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11502 Value *NewV =
nullptr;
11503 if (
V->getType()->isPointerTy()) {
11509 for (Argument &Arg :
Callee->args())
11516 if (NewV && NewV != V) {
11517 Worklist.
push_back({{*NewV, CtxI}, S});
11531 if (V == InitialV && CtxI == getCtxI()) {
11532 indicatePessimisticFixpoint();
11536 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11537 }
while (!Worklist.
empty());
11541 for (
auto &It : LivenessAAs)
11542 if (It.second.AnyDead)
11543 A.recordDependence(*It.second.LivenessAA, *
this, DepClassTy::OPTIONAL);
11547 void trackStatistics()
const override {
11552struct AAPotentialValuesArgument final : AAPotentialValuesImpl {
11553 using Base = AAPotentialValuesImpl;
11554 AAPotentialValuesArgument(
const IRPosition &IRP, Attributor &
A)
11561 indicatePessimisticFixpoint();
11566 auto AssumedBefore = getAssumed();
11568 unsigned ArgNo = getCalleeArgNo();
11570 bool UsedAssumedInformation =
false;
11572 auto CallSitePred = [&](AbstractCallSite ACS) {
11574 if (CSArgIRP.getPositionKind() == IRP_INVALID)
11577 if (!
A.getAssumedSimplifiedValues(CSArgIRP,
this, Values,
11579 UsedAssumedInformation))
11582 return isValidState();
11585 if (!
A.checkForAllCallSites(CallSitePred, *
this,
11587 UsedAssumedInformation))
11588 return indicatePessimisticFixpoint();
11590 Function *Fn = getAssociatedFunction();
11591 bool AnyNonLocal =
false;
11592 for (
auto &It : Values) {
11594 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11599 return indicatePessimisticFixpoint();
11603 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11609 AnyNonLocal =
true;
11611 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11613 giveUpOnIntraprocedural(
A);
11615 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11616 : ChangeStatus::CHANGED;
11620 void trackStatistics()
const override {
11625struct AAPotentialValuesReturned :
public AAPotentialValuesFloating {
11626 using Base = AAPotentialValuesFloating;
11627 AAPotentialValuesReturned(
const IRPosition &IRP, Attributor &
A)
11633 if (!
F ||
F->isDeclaration() ||
F->getReturnType()->isVoidTy()) {
11634 indicatePessimisticFixpoint();
11638 for (Argument &Arg :
F->args())
11641 ReturnedArg = &Arg;
11644 if (!
A.isFunctionIPOAmendable(*
F) ||
11645 A.hasSimplificationCallback(getIRPosition())) {
11647 indicatePessimisticFixpoint();
11649 indicateOptimisticFixpoint();
11655 auto AssumedBefore = getAssumed();
11656 bool UsedAssumedInformation =
false;
11659 Function *AnchorScope = getAnchorScope();
11665 UsedAssumedInformation,
11671 bool AllInterAreIntra =
false;
11674 llvm::all_of(Values, [&](
const AA::ValueAndContext &VAC) {
11678 for (
const AA::ValueAndContext &VAC : Values) {
11679 addValue(
A, getState(), *VAC.
getValue(),
11683 if (AllInterAreIntra)
11690 HandleReturnedValue(*ReturnedArg,
nullptr,
true);
11693 bool AddValues =
true;
11696 addValue(
A, getState(), *RetI.getOperand(0), &RetI,
AA::AnyScope,
11700 return HandleReturnedValue(*RetI.getOperand(0), &RetI, AddValues);
11703 if (!
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11704 UsedAssumedInformation,
11706 return indicatePessimisticFixpoint();
11709 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11710 : ChangeStatus::CHANGED;
11715 return ChangeStatus::UNCHANGED;
11717 if (!getAssumedSimplifiedValues(
A, Values, AA::ValueScope::Intraprocedural,
11719 return ChangeStatus::UNCHANGED;
11720 Value *NewVal = getSingleValue(
A, *
this, getIRPosition(), Values);
11722 return ChangeStatus::UNCHANGED;
11727 "Number of function with unique return");
11730 {Attribute::get(Arg->
getContext(), Attribute::Returned)});
11735 Value *RetOp = RetI.getOperand(0);
11739 if (
A.changeUseAfterManifest(RetI.getOperandUse(0), *NewVal))
11740 Changed = ChangeStatus::CHANGED;
11743 bool UsedAssumedInformation =
false;
11744 (void)
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11745 UsedAssumedInformation,
11751 return AAPotentialValues::indicatePessimisticFixpoint();
11755 void trackStatistics()
const override{
11762struct AAPotentialValuesFunction : AAPotentialValuesImpl {
11763 AAPotentialValuesFunction(
const IRPosition &IRP, Attributor &
A)
11764 : AAPotentialValuesImpl(IRP,
A) {}
11773 void trackStatistics()
const override {
11778struct AAPotentialValuesCallSite : AAPotentialValuesFunction {
11779 AAPotentialValuesCallSite(
const IRPosition &IRP, Attributor &
A)
11780 : AAPotentialValuesFunction(IRP,
A) {}
11783 void trackStatistics()
const override {
11788struct AAPotentialValuesCallSiteReturned : AAPotentialValuesImpl {
11789 AAPotentialValuesCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
11790 : AAPotentialValuesImpl(IRP,
A) {}
11794 auto AssumedBefore = getAssumed();
11798 return indicatePessimisticFixpoint();
11800 bool UsedAssumedInformation =
false;
11804 UsedAssumedInformation))
11805 return indicatePessimisticFixpoint();
11812 Values, S, UsedAssumedInformation))
11815 for (
auto &It : Values) {
11816 Value *
V = It.getValue();
11817 std::optional<Value *> CallerV =
A.translateArgumentToCallSiteContent(
11818 V, *CB, *
this, UsedAssumedInformation);
11819 if (!CallerV.has_value()) {
11823 V = *CallerV ? *CallerV :
V;
11829 giveUpOnIntraprocedural(
A);
11832 addValue(
A, getState(), *V, CB, S, getAnchorScope());
11837 return indicatePessimisticFixpoint();
11839 return indicatePessimisticFixpoint();
11840 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11841 : ChangeStatus::CHANGED;
11845 return AAPotentialValues::indicatePessimisticFixpoint();
11849 void trackStatistics()
const override {
11854struct AAPotentialValuesCallSiteArgument : AAPotentialValuesFloating {
11855 AAPotentialValuesCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
11856 : AAPotentialValuesFloating(IRP,
A) {}
11859 void trackStatistics()
const override {
11867struct AAAssumptionInfoImpl :
public AAAssumptionInfo {
11868 AAAssumptionInfoImpl(
const IRPosition &IRP, Attributor &
A,
11869 const DenseSet<StringRef> &Known)
11870 : AAAssumptionInfo(IRP,
A, Known) {}
11875 if (getKnown().isUniversal())
11876 return ChangeStatus::UNCHANGED;
11878 const IRPosition &IRP = getIRPosition();
11880 getAssumed().getSet().
end());
11882 return A.manifestAttrs(IRP,
11889 bool hasAssumption(
const StringRef Assumption)
const override {
11890 return isValidState() && setContains(Assumption);
11894 const std::string getAsStr(Attributor *
A)
const override {
11895 const SetContents &Known = getKnown();
11896 const SetContents &Assumed = getAssumed();
11900 const std::string KnownStr =
llvm::join(Set,
",");
11902 std::string AssumedStr =
"Universal";
11903 if (!Assumed.isUniversal()) {
11904 Set.assign(Assumed.getSet().begin(), Assumed.getSet().end());
11907 return "Known [" + KnownStr +
"]," +
" Assumed [" + AssumedStr +
"]";
11922struct AAAssumptionInfoFunction final : AAAssumptionInfoImpl {
11923 AAAssumptionInfoFunction(
const IRPosition &IRP, Attributor &
A)
11924 : AAAssumptionInfoImpl(IRP,
A,
11931 auto CallSitePred = [&](AbstractCallSite ACS) {
11932 const auto *AssumptionAA =
A.getAAFor<AAAssumptionInfo>(
11934 DepClassTy::REQUIRED);
11938 Changed |= getIntersection(AssumptionAA->getAssumed());
11939 return !getAssumed().empty() || !getKnown().empty();
11942 bool UsedAssumedInformation =
false;
11947 if (!
A.checkForAllCallSites(CallSitePred, *
this,
true,
11948 UsedAssumedInformation))
11949 return indicatePessimisticFixpoint();
11951 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11954 void trackStatistics()
const override {}
11958struct AAAssumptionInfoCallSite final : AAAssumptionInfoImpl {
11960 AAAssumptionInfoCallSite(
const IRPosition &IRP, Attributor &
A)
11961 : AAAssumptionInfoImpl(IRP,
A, getInitialAssumptions(IRP)) {}
11966 A.getAAFor<AAAssumptionInfo>(*
this, FnPos, DepClassTy::REQUIRED);
11972 auto *AssumptionAA =
11973 A.getAAFor<AAAssumptionInfo>(*
this, FnPos, DepClassTy::REQUIRED);
11975 return indicatePessimisticFixpoint();
11976 bool Changed = getIntersection(AssumptionAA->getAssumed());
11977 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11981 void trackStatistics()
const override {}
11986 DenseSet<StringRef> getInitialAssumptions(
const IRPosition &IRP) {
11993 return Assumptions;
12008struct AAUnderlyingObjectsImpl
12014 const std::string getAsStr(
Attributor *
A)
const override {
12015 if (!isValidState())
12016 return "<invalid>";
12019 OS <<
"underlying objects: inter " << InterAssumedUnderlyingObjects.size()
12020 <<
" objects, intra " << IntraAssumedUnderlyingObjects.size()
12022 if (!InterAssumedUnderlyingObjects.empty()) {
12023 OS <<
"inter objects:\n";
12024 for (
auto *Obj : InterAssumedUnderlyingObjects)
12025 OS << *Obj <<
'\n';
12027 if (!IntraAssumedUnderlyingObjects.empty()) {
12028 OS <<
"intra objects:\n";
12029 for (
auto *Obj : IntraAssumedUnderlyingObjects)
12030 OS << *
Obj <<
'\n';
12036 void trackStatistics()
const override {}
12040 auto &Ptr = getAssociatedValue();
12042 bool UsedAssumedInformation =
false;
12043 auto DoUpdate = [&](SmallSetVector<Value *, 8> &UnderlyingObjects,
12045 SmallPtrSet<Value *, 8> SeenObjects;
12049 Scope, UsedAssumedInformation))
12050 return UnderlyingObjects.
insert(&Ptr);
12054 for (
unsigned I = 0;
I < Values.
size(); ++
I) {
12055 auto &VAC = Values[
I];
12058 if (!SeenObjects.
insert(UO ? UO : Obj).second)
12060 if (UO && UO != Obj) {
12066 const auto *OtherAA =
A.getAAFor<AAUnderlyingObjects>(
12068 auto Pred = [&](
Value &
V) {
12076 if (!OtherAA || !OtherAA->forallUnderlyingObjects(Pred, Scope))
12078 "The forall call should not return false at this position");
12084 Changed |= handleIndirect(
A, *Obj, UnderlyingObjects, Scope,
12085 UsedAssumedInformation);
12091 for (
unsigned u = 0, e =
PHI->getNumIncomingValues(); u < e; u++) {
12093 handleIndirect(
A, *
PHI->getIncomingValue(u), UnderlyingObjects,
12094 Scope, UsedAssumedInformation);
12108 if (!UsedAssumedInformation)
12109 indicateOptimisticFixpoint();
12110 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
12113 bool forallUnderlyingObjects(
12114 function_ref<
bool(
Value &)> Pred,
12116 if (!isValidState())
12117 return Pred(getAssociatedValue());
12120 ? IntraAssumedUnderlyingObjects
12121 : InterAssumedUnderlyingObjects;
12122 for (
Value *Obj : AssumedUnderlyingObjects)
12132 bool handleIndirect(Attributor &
A,
Value &V,
12133 SmallSetVector<Value *, 8> &UnderlyingObjects,
12136 const auto *AA =
A.getAAFor<AAUnderlyingObjects>(
12138 auto Pred = [&](
Value &
V) {
12142 if (!AA || !AA->forallUnderlyingObjects(Pred, Scope))
12144 "The forall call should not return false at this position");
12150 SmallSetVector<Value *, 8> IntraAssumedUnderlyingObjects;
12152 SmallSetVector<Value *, 8> InterAssumedUnderlyingObjects;
12155struct AAUnderlyingObjectsFloating final : AAUnderlyingObjectsImpl {
12156 AAUnderlyingObjectsFloating(
const IRPosition &IRP, Attributor &
A)
12157 : AAUnderlyingObjectsImpl(IRP,
A) {}
12160struct AAUnderlyingObjectsArgument final : AAUnderlyingObjectsImpl {
12161 AAUnderlyingObjectsArgument(
const IRPosition &IRP, Attributor &
A)
12162 : AAUnderlyingObjectsImpl(IRP,
A) {}
12165struct AAUnderlyingObjectsCallSite final : AAUnderlyingObjectsImpl {
12166 AAUnderlyingObjectsCallSite(
const IRPosition &IRP, Attributor &
A)
12167 : AAUnderlyingObjectsImpl(IRP,
A) {}
12170struct AAUnderlyingObjectsCallSiteArgument final : AAUnderlyingObjectsImpl {
12171 AAUnderlyingObjectsCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
12172 : AAUnderlyingObjectsImpl(IRP,
A) {}
12175struct AAUnderlyingObjectsReturned final : AAUnderlyingObjectsImpl {
12176 AAUnderlyingObjectsReturned(
const IRPosition &IRP, Attributor &
A)
12177 : AAUnderlyingObjectsImpl(IRP,
A) {}
12180struct AAUnderlyingObjectsCallSiteReturned final : AAUnderlyingObjectsImpl {
12181 AAUnderlyingObjectsCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
12182 : AAUnderlyingObjectsImpl(IRP,
A) {}
12185struct AAUnderlyingObjectsFunction final : AAUnderlyingObjectsImpl {
12186 AAUnderlyingObjectsFunction(
const IRPosition &IRP, Attributor &
A)
12187 : AAUnderlyingObjectsImpl(IRP,
A) {}
12193struct AAGlobalValueInfoFloating :
public AAGlobalValueInfo {
12194 AAGlobalValueInfoFloating(
const IRPosition &IRP, Attributor &
A)
12195 : AAGlobalValueInfo(IRP,
A) {}
12200 bool checkUse(Attributor &
A,
const Use &U,
bool &Follow,
12201 SmallVectorImpl<const Value *> &Worklist) {
12208 LLVM_DEBUG(
dbgs() <<
"[AAGlobalValueInfo] Check use: " << *
U.get() <<
" in "
12209 << *UInst <<
"\n");
12212 int Idx = &
Cmp->getOperandUse(0) == &
U;
12215 return U == &getAnchorValue();
12220 auto CallSitePred = [&](AbstractCallSite ACS) {
12221 Worklist.
push_back(ACS.getInstruction());
12224 bool UsedAssumedInformation =
false;
12226 if (!
A.checkForAllCallSites(CallSitePred, *UInst->
getFunction(),
12228 UsedAssumedInformation))
12246 if (!Fn || !
A.isFunctionIPOAmendable(*Fn))
12255 unsigned NumUsesBefore =
Uses.size();
12257 SmallPtrSet<const Value *, 8> Visited;
12261 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
12269 return checkUse(
A, U, Follow, Worklist);
12271 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
12272 Uses.insert(&OldU);
12276 while (!Worklist.
empty()) {
12278 if (!Visited.
insert(V).second)
12280 if (!
A.checkForAllUses(UsePred, *
this, *V,
12282 DepClassTy::OPTIONAL,
12283 true, EquivalentUseCB)) {
12284 return indicatePessimisticFixpoint();
12288 return Uses.size() == NumUsesBefore ? ChangeStatus::UNCHANGED
12289 : ChangeStatus::CHANGED;
12292 bool isPotentialUse(
const Use &U)
const override {
12293 return !isValidState() ||
Uses.contains(&U);
12298 return ChangeStatus::UNCHANGED;
12302 const std::string getAsStr(Attributor *
A)
const override {
12303 return "[" + std::to_string(
Uses.size()) +
" uses]";
12306 void trackStatistics()
const override {
12312 SmallPtrSet<const Use *, 8>
Uses;
12318struct AAIndirectCallInfoCallSite :
public AAIndirectCallInfo {
12319 AAIndirectCallInfoCallSite(
const IRPosition &IRP, Attributor &
A)
12320 : AAIndirectCallInfo(IRP,
A) {}
12324 auto *MD = getCtxI()->getMetadata(LLVMContext::MD_callees);
12325 if (!MD && !
A.isClosedWorldModule())
12329 for (
const auto &
Op : MD->operands())
12331 PotentialCallees.insert(Callee);
12332 }
else if (
A.isClosedWorldModule()) {
12334 A.getInfoCache().getIndirectlyCallableFunctions(
A);
12335 PotentialCallees.insert_range(IndirectlyCallableFunctions);
12338 if (PotentialCallees.empty())
12339 indicateOptimisticFixpoint();
12347 SmallSetVector<Function *, 4> AssumedCalleesNow;
12348 bool AllCalleesKnownNow = AllCalleesKnown;
12350 auto CheckPotentialCalleeUse = [&](
Function &PotentialCallee,
12351 bool &UsedAssumedInformation) {
12352 const auto *GIAA =
A.getAAFor<AAGlobalValueInfo>(
12354 if (!GIAA || GIAA->isPotentialUse(CalleeUse))
12356 UsedAssumedInformation = !GIAA->isAtFixpoint();
12360 auto AddPotentialCallees = [&]() {
12361 for (
auto *PotentialCallee : PotentialCallees) {
12362 bool UsedAssumedInformation =
false;
12363 if (CheckPotentialCalleeUse(*PotentialCallee, UsedAssumedInformation))
12364 AssumedCalleesNow.
insert(PotentialCallee);
12370 bool UsedAssumedInformation =
false;
12373 AA::ValueScope::AnyScope,
12374 UsedAssumedInformation)) {
12375 if (PotentialCallees.empty())
12376 return indicatePessimisticFixpoint();
12377 AddPotentialCallees();
12382 auto CheckPotentialCallee = [&](
Function &Fn) {
12383 if (!PotentialCallees.empty() && !PotentialCallees.count(&Fn))
12386 auto &CachedResult = FilterResults[&Fn];
12387 if (CachedResult.has_value())
12388 return CachedResult.value();
12390 bool UsedAssumedInformation =
false;
12391 if (!CheckPotentialCalleeUse(Fn, UsedAssumedInformation)) {
12392 if (!UsedAssumedInformation)
12393 CachedResult =
false;
12402 for (
int I = NumCBArgs;
I < NumFnArgs; ++
I) {
12403 bool IsKnown =
false;
12406 DepClassTy::OPTIONAL, IsKnown)) {
12408 CachedResult =
false;
12413 CachedResult =
true;
12419 for (
auto &VAC : Values) {
12427 if (CheckPotentialCallee(*VACFn))
12428 AssumedCalleesNow.
insert(VACFn);
12431 if (!PotentialCallees.empty()) {
12432 AddPotentialCallees();
12435 AllCalleesKnownNow =
false;
12438 if (AssumedCalleesNow == AssumedCallees &&
12439 AllCalleesKnown == AllCalleesKnownNow)
12440 return ChangeStatus::UNCHANGED;
12442 std::swap(AssumedCallees, AssumedCalleesNow);
12443 AllCalleesKnown = AllCalleesKnownNow;
12444 return ChangeStatus::CHANGED;
12450 if (!AllCalleesKnown && AssumedCallees.empty())
12451 return ChangeStatus::UNCHANGED;
12454 bool UsedAssumedInformation =
false;
12455 if (
A.isAssumedDead(*CB,
this,
nullptr,
12456 UsedAssumedInformation))
12457 return ChangeStatus::UNCHANGED;
12461 if (
FP->getType()->getPointerAddressSpace())
12462 FP =
new AddrSpaceCastInst(
FP, PointerType::get(
FP->getContext(), 0),
12472 if (AssumedCallees.empty()) {
12473 assert(AllCalleesKnown &&
12474 "Expected all callees to be known if there are none.");
12475 A.changeToUnreachableAfterManifest(CB);
12476 return ChangeStatus::CHANGED;
12480 if (AllCalleesKnown && AssumedCallees.size() == 1) {
12481 auto *NewCallee = AssumedCallees.front();
12484 NumIndirectCallsPromoted++;
12485 return ChangeStatus::CHANGED;
12492 A.deleteAfterManifest(*CB);
12493 return ChangeStatus::CHANGED;
12503 bool SpecializedForAnyCallees =
false;
12504 bool SpecializedForAllCallees = AllCalleesKnown;
12505 ICmpInst *LastCmp =
nullptr;
12508 for (Function *NewCallee : AssumedCallees) {
12509 if (!
A.shouldSpecializeCallSiteForCallee(*
this, *CB, *NewCallee,
12510 AssumedCallees.size())) {
12511 SkippedAssumedCallees.
push_back(NewCallee);
12512 SpecializedForAllCallees =
false;
12515 SpecializedForAnyCallees =
true;
12521 A.registerManifestAddedBasicBlock(*ThenTI->
getParent());
12522 A.registerManifestAddedBasicBlock(*IP->getParent());
12528 A.registerManifestAddedBasicBlock(*ElseBB);
12530 SplitTI->replaceUsesOfWith(CBBB, ElseBB);
12535 CastInst *RetBC =
nullptr;
12536 CallInst *NewCall =
nullptr;
12541 NumIndirectCallsPromoted++;
12549 auto AttachCalleeMetadata = [&](CallBase &IndirectCB) {
12550 if (!AllCalleesKnown)
12551 return ChangeStatus::UNCHANGED;
12552 MDBuilder MDB(IndirectCB.getContext());
12553 MDNode *Callees = MDB.createCallees(SkippedAssumedCallees);
12554 IndirectCB.setMetadata(LLVMContext::MD_callees, Callees);
12555 return ChangeStatus::CHANGED;
12558 if (!SpecializedForAnyCallees)
12559 return AttachCalleeMetadata(*CB);
12562 if (SpecializedForAllCallees) {
12565 new UnreachableInst(IP->getContext(), IP);
12566 IP->eraseFromParent();
12569 CBClone->setName(CB->
getName());
12570 CBClone->insertBefore(*IP->getParent(), IP);
12571 NewCalls.
push_back({CBClone,
nullptr});
12572 AttachCalleeMetadata(*CBClone);
12579 CB->
getParent()->getFirstInsertionPt());
12580 for (
auto &It : NewCalls) {
12581 CallBase *NewCall = It.first;
12582 Instruction *CallRet = It.second ? It.second : It.first;
12594 A.deleteAfterManifest(*CB);
12595 Changed = ChangeStatus::CHANGED;
12601 const std::string getAsStr(Attributor *
A)
const override {
12602 return std::string(AllCalleesKnown ?
"eliminate" :
"specialize") +
12603 " indirect call site with " + std::to_string(AssumedCallees.size()) +
12607 void trackStatistics()
const override {
12608 if (AllCalleesKnown) {
12610 Eliminated, CallSites,
12611 "Number of indirect call sites eliminated via specialization")
12614 "Number of indirect call sites specialized")
12618 bool foreachCallee(function_ref<
bool(Function *)> CB)
const override {
12619 return isValidState() && AllCalleesKnown &&
all_of(AssumedCallees, CB);
12624 DenseMap<Function *, std::optional<bool>> FilterResults;
12628 SmallSetVector<Function *, 4> PotentialCallees;
12632 SmallSetVector<Function *, 4> AssumedCallees;
12636 bool AllCalleesKnown =
true;
12643struct AAInvariantLoadPointerImpl
12644 :
public StateWrapper<BitIntegerState<uint8_t, 15>,
12645 AAInvariantLoadPointer> {
12649 IS_NOALIAS = 1 << 0,
12652 IS_NOEFFECT = 1 << 1,
12654 IS_LOCALLY_INVARIANT = 1 << 2,
12656 IS_LOCALLY_CONSTRAINED = 1 << 3,
12658 IS_BEST_STATE = IS_NOALIAS | IS_NOEFFECT | IS_LOCALLY_INVARIANT |
12659 IS_LOCALLY_CONSTRAINED,
12661 static_assert(getBestState() == IS_BEST_STATE,
"Unexpected best state");
12664 StateWrapper<BitIntegerState<uint8_t, 15>, AAInvariantLoadPointer>;
12668 AAInvariantLoadPointerImpl(
const IRPosition &IRP, Attributor &
A)
12671 bool isKnownInvariant()
const final {
12672 return isKnownLocallyInvariant() && isKnown(IS_LOCALLY_CONSTRAINED);
12675 bool isKnownLocallyInvariant()
const final {
12676 if (isKnown(IS_LOCALLY_INVARIANT))
12678 return isKnown(IS_NOALIAS | IS_NOEFFECT);
12681 bool isAssumedInvariant()
const final {
12682 return isAssumedLocallyInvariant() && isAssumed(IS_LOCALLY_CONSTRAINED);
12685 bool isAssumedLocallyInvariant()
const final {
12686 if (isAssumed(IS_LOCALLY_INVARIANT))
12688 return isAssumed(IS_NOALIAS | IS_NOEFFECT);
12695 if (requiresNoAlias() && !isAssumed(IS_NOALIAS))
12696 return indicatePessimisticFixpoint();
12700 Changed |= updateLocalInvariance(
A);
12706 if (!isKnownInvariant())
12707 return ChangeStatus::UNCHANGED;
12710 const Value *Ptr = &getAssociatedValue();
12711 const auto TagInvariantLoads = [&](
const Use &
U,
bool &) {
12712 if (
U.get() != Ptr)
12720 if (!
A.isRunOn(
I->getFunction()))
12723 if (
I->hasMetadata(LLVMContext::MD_invariant_load))
12727 LI->setMetadata(LLVMContext::MD_invariant_load,
12729 Changed = ChangeStatus::CHANGED;
12734 (void)
A.checkForAllUses(TagInvariantLoads, *
this, *Ptr);
12739 const std::string getAsStr(Attributor *)
const override {
12740 if (isKnownInvariant())
12741 return "load-invariant pointer";
12742 return "non-invariant pointer";
12746 void trackStatistics()
const override {}
12750 bool requiresNoAlias()
const {
12751 switch (getPositionKind()) {
12757 case IRP_CALL_SITE:
12759 case IRP_CALL_SITE_RETURNED: {
12764 case IRP_ARGUMENT: {
12765 const Function *
F = getAssociatedFunction();
12766 assert(
F &&
"no associated function for argument");
12772 bool isExternal()
const {
12773 const Function *
F = getAssociatedFunction();
12777 getPositionKind() != IRP_CALL_SITE_RETURNED;
12781 if (isKnown(IS_NOALIAS) || !isAssumed(IS_NOALIAS))
12782 return ChangeStatus::UNCHANGED;
12785 if (
const auto *ANoAlias =
A.getOrCreateAAFor<AANoAlias>(
12786 getIRPosition(),
this, DepClassTy::REQUIRED)) {
12787 if (ANoAlias->isKnownNoAlias()) {
12788 addKnownBits(IS_NOALIAS);
12789 return ChangeStatus::CHANGED;
12792 if (!ANoAlias->isAssumedNoAlias()) {
12793 removeAssumedBits(IS_NOALIAS);
12794 return ChangeStatus::CHANGED;
12797 return ChangeStatus::UNCHANGED;
12802 if (
const Argument *Arg = getAssociatedArgument()) {
12804 addKnownBits(IS_NOALIAS);
12805 return ChangeStatus::UNCHANGED;
12810 removeAssumedBits(IS_NOALIAS);
12811 return ChangeStatus::CHANGED;
12814 return ChangeStatus::UNCHANGED;
12818 if (isKnown(IS_NOEFFECT) || !isAssumed(IS_NOEFFECT))
12819 return ChangeStatus::UNCHANGED;
12821 if (!getAssociatedFunction())
12822 return indicatePessimisticFixpoint();
12825 return indicatePessimisticFixpoint();
12827 const auto HasNoEffectLoads = [&](
const Use &
U,
bool &) {
12829 return !LI || !LI->mayHaveSideEffects();
12831 if (!
A.checkForAllUses(HasNoEffectLoads, *
this, getAssociatedValue()))
12832 return indicatePessimisticFixpoint();
12834 if (
const auto *AMemoryBehavior =
A.getOrCreateAAFor<AAMemoryBehavior>(
12835 getIRPosition(),
this, DepClassTy::REQUIRED)) {
12838 if (!AMemoryBehavior->isAssumedReadOnly())
12839 return indicatePessimisticFixpoint();
12841 if (AMemoryBehavior->isKnownReadOnly()) {
12842 addKnownBits(IS_NOEFFECT);
12843 return ChangeStatus::UNCHANGED;
12846 return ChangeStatus::UNCHANGED;
12849 if (
const Argument *Arg = getAssociatedArgument()) {
12851 addKnownBits(IS_NOEFFECT);
12852 return ChangeStatus::UNCHANGED;
12857 return indicatePessimisticFixpoint();
12860 return ChangeStatus::UNCHANGED;
12864 if (isKnown(IS_LOCALLY_INVARIANT) || !isAssumed(IS_LOCALLY_INVARIANT))
12865 return ChangeStatus::UNCHANGED;
12868 const auto *AUO =
A.getOrCreateAAFor<AAUnderlyingObjects>(
12869 getIRPosition(),
this, DepClassTy::REQUIRED);
12871 return ChangeStatus::UNCHANGED;
12873 bool UsedAssumedInformation =
false;
12874 const auto IsLocallyInvariantLoadIfPointer = [&](
const Value &
V) {
12875 if (!
V.getType()->isPointerTy())
12877 const auto *IsInvariantLoadPointer =
12879 DepClassTy::REQUIRED);
12881 if (!IsInvariantLoadPointer)
12884 if (IsInvariantLoadPointer->isKnownLocallyInvariant())
12886 if (!IsInvariantLoadPointer->isAssumedLocallyInvariant())
12889 UsedAssumedInformation =
true;
12892 if (!AUO->forallUnderlyingObjects(IsLocallyInvariantLoadIfPointer))
12893 return indicatePessimisticFixpoint();
12899 if (!IsLocallyInvariantLoadIfPointer(*Arg))
12900 return indicatePessimisticFixpoint();
12905 if (!UsedAssumedInformation) {
12907 addKnownBits(IS_LOCALLY_INVARIANT);
12908 return ChangeStatus::CHANGED;
12911 return ChangeStatus::UNCHANGED;
12915struct AAInvariantLoadPointerFloating final : AAInvariantLoadPointerImpl {
12916 AAInvariantLoadPointerFloating(
const IRPosition &IRP, Attributor &
A)
12917 : AAInvariantLoadPointerImpl(IRP,
A) {}
12920struct AAInvariantLoadPointerReturned final : AAInvariantLoadPointerImpl {
12921 AAInvariantLoadPointerReturned(
const IRPosition &IRP, Attributor &
A)
12922 : AAInvariantLoadPointerImpl(IRP,
A) {}
12925 removeAssumedBits(IS_LOCALLY_CONSTRAINED);
12929struct AAInvariantLoadPointerCallSiteReturned final
12930 : AAInvariantLoadPointerImpl {
12931 AAInvariantLoadPointerCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
12932 : AAInvariantLoadPointerImpl(IRP,
A) {}
12935 const Function *
F = getAssociatedFunction();
12936 assert(
F &&
"no associated function for return from call");
12938 if (!
F->isDeclaration() && !
F->isIntrinsic())
12939 return AAInvariantLoadPointerImpl::initialize(
A);
12944 return AAInvariantLoadPointerImpl::initialize(
A);
12946 if (
F->onlyReadsMemory() &&
F->hasNoSync())
12947 return AAInvariantLoadPointerImpl::initialize(
A);
12951 indicatePessimisticFixpoint();
12955struct AAInvariantLoadPointerArgument final : AAInvariantLoadPointerImpl {
12956 AAInvariantLoadPointerArgument(
const IRPosition &IRP, Attributor &
A)
12957 : AAInvariantLoadPointerImpl(IRP,
A) {}
12960 const Function *
F = getAssociatedFunction();
12961 assert(
F &&
"no associated function for argument");
12964 addKnownBits(IS_LOCALLY_CONSTRAINED);
12968 if (!
F->hasLocalLinkage())
12969 removeAssumedBits(IS_LOCALLY_CONSTRAINED);
12973struct AAInvariantLoadPointerCallSiteArgument final
12974 : AAInvariantLoadPointerImpl {
12975 AAInvariantLoadPointerCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
12976 : AAInvariantLoadPointerImpl(IRP,
A) {}
12983template <
typename InstType>
12984static bool makeChange(Attributor &
A, InstType *MemInst,
const Use &U,
12985 Value *OriginalValue, PointerType *NewPtrTy,
12986 bool UseOriginalValue) {
12987 if (
U.getOperandNo() != InstType::getPointerOperandIndex())
12990 if (MemInst->isVolatile()) {
12991 auto *
TTI =
A.getInfoCache().getAnalysisResultForFunction<TargetIRAnalysis>(
12992 *MemInst->getFunction());
12993 unsigned NewAS = NewPtrTy->getPointerAddressSpace();
12998 if (UseOriginalValue) {
12999 A.changeUseAfterManifest(
const_cast<Use &
>(U), *OriginalValue);
13003 Instruction *CastInst =
new AddrSpaceCastInst(OriginalValue, NewPtrTy);
13005 A.changeUseAfterManifest(
const_cast<Use &
>(U), *CastInst);
13009struct AAAddressSpaceImpl :
public AAAddressSpace {
13010 AAAddressSpaceImpl(
const IRPosition &IRP, Attributor &
A)
13011 : AAAddressSpace(IRP,
A) {}
13014 assert(isValidState() &&
"the AA is invalid");
13015 return AssumedAddressSpace;
13020 assert(getAssociatedType()->isPtrOrPtrVectorTy() &&
13021 "Associated value is not a pointer");
13023 if (!
A.getInfoCache().getFlatAddressSpace().has_value()) {
13024 indicatePessimisticFixpoint();
13028 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13029 unsigned AS = getAssociatedType()->getPointerAddressSpace();
13030 if (AS != FlatAS) {
13031 [[maybe_unused]]
bool R = takeAddressSpace(AS);
13032 assert(R &&
"The take should happen");
13033 indicateOptimisticFixpoint();
13038 uint32_t OldAddressSpace = AssumedAddressSpace;
13039 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13041 auto CheckAddressSpace = [&](
Value &
Obj) {
13047 unsigned ObjAS =
Obj.getType()->getPointerAddressSpace();
13048 if (ObjAS != FlatAS)
13049 return takeAddressSpace(ObjAS);
13063 A.getInfoCache().getAnalysisResultForFunction<TargetIRAnalysis>(*F);
13065 if (AssumedAS != ~0U)
13066 return takeAddressSpace(AssumedAS);
13070 return takeAddressSpace(FlatAS);
13073 auto *AUO =
A.getOrCreateAAFor<AAUnderlyingObjects>(getIRPosition(),
this,
13074 DepClassTy::REQUIRED);
13075 if (!AUO->forallUnderlyingObjects(CheckAddressSpace))
13076 return indicatePessimisticFixpoint();
13078 return OldAddressSpace == AssumedAddressSpace ? ChangeStatus::UNCHANGED
13079 : ChangeStatus::CHANGED;
13086 if (NewAS == InvalidAddressSpace ||
13088 return ChangeStatus::UNCHANGED;
13090 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13092 Value *AssociatedValue = &getAssociatedValue();
13093 Value *OriginalValue = peelAddrspacecast(AssociatedValue, FlatAS);
13096 PointerType::get(getAssociatedType()->
getContext(), NewAS);
13097 bool UseOriginalValue =
13102 auto Pred = [&](
const Use &
U,
bool &) {
13103 if (
U.get() != AssociatedValue)
13114 makeChange(
A, LI, U, OriginalValue, NewPtrTy, UseOriginalValue);
13117 makeChange(
A, SI, U, OriginalValue, NewPtrTy, UseOriginalValue);
13120 makeChange(
A, RMW, U, OriginalValue, NewPtrTy, UseOriginalValue);
13123 makeChange(
A, CmpX, U, OriginalValue, NewPtrTy, UseOriginalValue);
13130 (void)
A.checkForAllUses(Pred, *
this, getAssociatedValue(),
13133 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
13137 const std::string getAsStr(Attributor *
A)
const override {
13138 if (!isValidState())
13139 return "addrspace(<invalid>)";
13140 return "addrspace(" +
13141 (AssumedAddressSpace == InvalidAddressSpace
13143 : std::to_string(AssumedAddressSpace)) +
13148 uint32_t AssumedAddressSpace = InvalidAddressSpace;
13150 bool takeAddressSpace(uint32_t AS) {
13151 if (AssumedAddressSpace == InvalidAddressSpace) {
13152 AssumedAddressSpace = AS;
13155 return AssumedAddressSpace == AS;
13158 static Value *peelAddrspacecast(
Value *V,
unsigned FlatAS) {
13160 assert(
I->getSrcAddressSpace() != FlatAS &&
13161 "there should not be flat AS -> non-flat AS");
13162 return I->getPointerOperand();
13165 if (
C->getOpcode() == Instruction::AddrSpaceCast) {
13166 assert(
C->getOperand(0)->getType()->getPointerAddressSpace() !=
13168 "there should not be flat AS -> non-flat AS X");
13169 return C->getOperand(0);
13175struct AAAddressSpaceFloating final : AAAddressSpaceImpl {
13176 AAAddressSpaceFloating(
const IRPosition &IRP, Attributor &
A)
13177 : AAAddressSpaceImpl(IRP,
A) {}
13179 void trackStatistics()
const override {
13184struct AAAddressSpaceReturned final : AAAddressSpaceImpl {
13185 AAAddressSpaceReturned(
const IRPosition &IRP, Attributor &
A)
13186 : AAAddressSpaceImpl(IRP,
A) {}
13192 (void)indicatePessimisticFixpoint();
13195 void trackStatistics()
const override {
13200struct AAAddressSpaceCallSiteReturned final : AAAddressSpaceImpl {
13201 AAAddressSpaceCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
13202 : AAAddressSpaceImpl(IRP,
A) {}
13204 void trackStatistics()
const override {
13209struct AAAddressSpaceArgument final : AAAddressSpaceImpl {
13210 AAAddressSpaceArgument(
const IRPosition &IRP, Attributor &
A)
13211 : AAAddressSpaceImpl(IRP,
A) {}
13216struct AAAddressSpaceCallSiteArgument final : AAAddressSpaceImpl {
13217 AAAddressSpaceCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
13218 : AAAddressSpaceImpl(IRP,
A) {}
13224 (void)indicatePessimisticFixpoint();
13227 void trackStatistics()
const override {
13242struct AANoAliasAddrSpaceImpl :
public AANoAliasAddrSpace {
13243 AANoAliasAddrSpaceImpl(
const IRPosition &IRP, Attributor &
A)
13244 : AANoAliasAddrSpace(IRP,
A) {}
13247 assert(getAssociatedType()->isPtrOrPtrVectorTy() &&
13248 "Associated value is not a pointer");
13252 std::optional<unsigned> FlatAS =
A.getInfoCache().getFlatAddressSpace();
13253 if (!FlatAS.has_value()) {
13254 indicatePessimisticFixpoint();
13260 unsigned AS = getAssociatedType()->getPointerAddressSpace();
13261 if (AS != *FlatAS) {
13263 indicateOptimisticFixpoint();
13268 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13269 uint32_t OldAssumed = getAssumed();
13271 auto CheckAddressSpace = [&](
Value &
Obj) {
13275 unsigned AS =
Obj.getType()->getPointerAddressSpace();
13279 removeAS(
Obj.getType()->getPointerAddressSpace());
13283 const AAUnderlyingObjects *AUO =
A.getOrCreateAAFor<AAUnderlyingObjects>(
13284 getIRPosition(),
this, DepClassTy::REQUIRED);
13286 return indicatePessimisticFixpoint();
13288 return OldAssumed == getAssumed() ? ChangeStatus::UNCHANGED
13289 : ChangeStatus::CHANGED;
13294 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13296 unsigned AS = getAssociatedType()->getPointerAddressSpace();
13297 if (AS != FlatAS ||
Map.empty())
13298 return ChangeStatus::UNCHANGED;
13300 LLVMContext &Ctx = getAssociatedValue().getContext();
13301 MDNode *NoAliasASNode =
nullptr;
13302 MDBuilder MDB(Ctx);
13304 for (RangeMap::const_iterator
I =
Map.begin();
I !=
Map.end();
I++) {
13307 unsigned Upper =
I.stop();
13308 unsigned Lower =
I.start();
13309 if (!NoAliasASNode) {
13310 NoAliasASNode = MDB.createRange(APInt(32,
Lower), APInt(32,
Upper + 1));
13313 MDNode *ASRange = MDB.createRange(APInt(32,
Lower), APInt(32,
Upper + 1));
13317 Value *AssociatedValue = &getAssociatedValue();
13320 auto AddNoAliasAttr = [&](
const Use &
U,
bool &) {
13321 if (
U.get() != AssociatedValue)
13324 if (!Inst || Inst->
hasMetadata(LLVMContext::MD_noalias_addrspace))
13331 Inst->
setMetadata(LLVMContext::MD_noalias_addrspace, NoAliasASNode);
13335 (void)
A.checkForAllUses(AddNoAliasAttr, *
this, *AssociatedValue,
13337 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
13341 const std::string getAsStr(Attributor *
A)
const override {
13342 if (!isValidState())
13343 return "<invalid>";
13345 raw_string_ostream OS(Str);
13346 OS <<
"CanNotBeAddrSpace(";
13347 for (RangeMap::const_iterator
I =
Map.begin();
I !=
Map.end();
I++) {
13348 unsigned Upper =
I.stop();
13349 unsigned Lower =
I.start();
13350 OS <<
' ' <<
'[' <<
Upper <<
',' <<
Lower + 1 <<
')';
13357 void removeAS(
unsigned AS) {
13358 RangeMap::iterator
I =
Map.find(AS);
13360 if (
I !=
Map.end()) {
13361 unsigned Upper =
I.stop();
13362 unsigned Lower =
I.start();
13366 if (AS != ~((
unsigned)0) && AS + 1 <=
Upper)
13368 if (AS != 0 &&
Lower <= AS - 1)
13373 void resetASRanges(Attributor &
A) {
13375 Map.insert(0,
A.getInfoCache().getMaxAddrSpace(),
true);
13379struct AANoAliasAddrSpaceFloating final : AANoAliasAddrSpaceImpl {
13380 AANoAliasAddrSpaceFloating(
const IRPosition &IRP, Attributor &
A)
13381 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13383 void trackStatistics()
const override {
13388struct AANoAliasAddrSpaceReturned final : AANoAliasAddrSpaceImpl {
13389 AANoAliasAddrSpaceReturned(
const IRPosition &IRP, Attributor &
A)
13390 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13392 void trackStatistics()
const override {
13397struct AANoAliasAddrSpaceCallSiteReturned final : AANoAliasAddrSpaceImpl {
13398 AANoAliasAddrSpaceCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
13399 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13401 void trackStatistics()
const override {
13406struct AANoAliasAddrSpaceArgument final : AANoAliasAddrSpaceImpl {
13407 AANoAliasAddrSpaceArgument(
const IRPosition &IRP, Attributor &
A)
13408 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13410 void trackStatistics()
const override {
13415struct AANoAliasAddrSpaceCallSiteArgument final : AANoAliasAddrSpaceImpl {
13416 AANoAliasAddrSpaceCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
13417 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13419 void trackStatistics()
const override {
13426struct AAAllocationInfoImpl :
public AAAllocationInfo {
13427 AAAllocationInfoImpl(
const IRPosition &IRP, Attributor &
A)
13428 : AAAllocationInfo(IRP,
A) {}
13430 std::optional<TypeSize> getAllocatedSize()
const override {
13431 assert(isValidState() &&
"the AA is invalid");
13432 return AssumedAllocatedSize;
13435 std::optional<TypeSize> findInitialAllocationSize(Instruction *
I,
13436 const DataLayout &
DL) {
13439 switch (
I->getOpcode()) {
13440 case Instruction::Alloca: {
13445 return std::nullopt;
13451 const IRPosition &IRP = getIRPosition();
13456 return indicatePessimisticFixpoint();
13458 bool IsKnownNoCapture;
13460 A,
this, IRP, DepClassTy::OPTIONAL, IsKnownNoCapture))
13461 return indicatePessimisticFixpoint();
13463 const AAPointerInfo *PI =
13464 A.getOrCreateAAFor<AAPointerInfo>(IRP, *
this, DepClassTy::REQUIRED);
13467 return indicatePessimisticFixpoint();
13470 return indicatePessimisticFixpoint();
13472 const DataLayout &
DL =
A.getDataLayout();
13473 const auto AllocationSize = findInitialAllocationSize(
I,
DL);
13476 if (!AllocationSize)
13477 return indicatePessimisticFixpoint();
13481 if (*AllocationSize == 0)
13482 return indicatePessimisticFixpoint();
13488 return indicatePessimisticFixpoint();
13490 if (BinSize == 0) {
13491 auto NewAllocationSize = std::make_optional<TypeSize>(0,
false);
13492 if (!changeAllocationSize(NewAllocationSize))
13493 return ChangeStatus::UNCHANGED;
13494 return ChangeStatus::CHANGED;
13498 const auto &It = PI->
begin();
13501 if (It->first.Offset != 0)
13502 return indicatePessimisticFixpoint();
13504 uint64_t SizeOfBin = It->first.Offset + It->first.Size;
13506 if (SizeOfBin >= *AllocationSize)
13507 return indicatePessimisticFixpoint();
13509 auto NewAllocationSize = std::make_optional<TypeSize>(SizeOfBin * 8,
false);
13511 if (!changeAllocationSize(NewAllocationSize))
13512 return ChangeStatus::UNCHANGED;
13514 return ChangeStatus::CHANGED;
13520 assert(isValidState() &&
13521 "Manifest should only be called if the state is valid.");
13525 auto FixedAllocatedSizeInBits = getAllocatedSize()->getFixedValue();
13527 unsigned long NumBytesToAllocate = (FixedAllocatedSizeInBits + 7) / 8;
13529 switch (
I->getOpcode()) {
13531 case Instruction::Alloca: {
13535 Type *CharType = Type::getInt8Ty(
I->getContext());
13537 auto *NumBytesToValue =
13538 ConstantInt::get(
I->getContext(), APInt(32, NumBytesToAllocate));
13541 insertPt = std::next(insertPt);
13542 AllocaInst *NewAllocaInst =
13547 return ChangeStatus::CHANGED;
13555 return ChangeStatus::UNCHANGED;
13559 const std::string getAsStr(Attributor *
A)
const override {
13560 if (!isValidState())
13561 return "allocationinfo(<invalid>)";
13562 return "allocationinfo(" +
13563 (AssumedAllocatedSize == HasNoAllocationSize
13565 : std::to_string(AssumedAllocatedSize->getFixedValue())) +
13570 std::optional<TypeSize> AssumedAllocatedSize = HasNoAllocationSize;
13574 bool changeAllocationSize(std::optional<TypeSize>
Size) {
13575 if (AssumedAllocatedSize == HasNoAllocationSize ||
13576 AssumedAllocatedSize !=
Size) {
13577 AssumedAllocatedSize =
Size;
13584struct AAAllocationInfoFloating : AAAllocationInfoImpl {
13585 AAAllocationInfoFloating(
const IRPosition &IRP, Attributor &
A)
13586 : AAAllocationInfoImpl(IRP,
A) {}
13588 void trackStatistics()
const override {
13593struct AAAllocationInfoReturned : AAAllocationInfoImpl {
13594 AAAllocationInfoReturned(
const IRPosition &IRP, Attributor &
A)
13595 : AAAllocationInfoImpl(IRP,
A) {}
13601 (void)indicatePessimisticFixpoint();
13604 void trackStatistics()
const override {
13609struct AAAllocationInfoCallSiteReturned : AAAllocationInfoImpl {
13610 AAAllocationInfoCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
13611 : AAAllocationInfoImpl(IRP,
A) {}
13613 void trackStatistics()
const override {
13618struct AAAllocationInfoArgument : AAAllocationInfoImpl {
13619 AAAllocationInfoArgument(
const IRPosition &IRP, Attributor &
A)
13620 : AAAllocationInfoImpl(IRP,
A) {}
13622 void trackStatistics()
const override {
13627struct AAAllocationInfoCallSiteArgument : AAAllocationInfoImpl {
13628 AAAllocationInfoCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
13629 : AAAllocationInfoImpl(IRP,
A) {}
13634 (void)indicatePessimisticFixpoint();
13637 void trackStatistics()
const override {
13686#define SWITCH_PK_INV(CLASS, PK, POS_NAME) \
13687 case IRPosition::PK: \
13688 llvm_unreachable("Cannot create " #CLASS " for a " POS_NAME " position!");
13690#define SWITCH_PK_CREATE(CLASS, IRP, PK, SUFFIX) \
13691 case IRPosition::PK: \
13692 AA = new (A.Allocator) CLASS##SUFFIX(IRP, A); \
13696#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13697 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13698 CLASS *AA = nullptr; \
13699 switch (IRP.getPositionKind()) { \
13700 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13701 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
13702 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
13703 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13704 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
13705 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13706 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13707 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13712#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13713 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13714 CLASS *AA = nullptr; \
13715 switch (IRP.getPositionKind()) { \
13716 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13717 SWITCH_PK_INV(CLASS, IRP_FUNCTION, "function") \
13718 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13719 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13720 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13721 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
13722 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13723 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13728#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS) \
13729 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13730 CLASS *AA = nullptr; \
13731 switch (IRP.getPositionKind()) { \
13732 SWITCH_PK_CREATE(CLASS, IRP, POS, SUFFIX) \
13734 llvm_unreachable("Cannot create " #CLASS " for position otherthan " #POS \
13740#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13741 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13742 CLASS *AA = nullptr; \
13743 switch (IRP.getPositionKind()) { \
13744 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13745 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13746 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13747 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13748 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13749 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
13750 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13751 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13756#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13757 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13758 CLASS *AA = nullptr; \
13759 switch (IRP.getPositionKind()) { \
13760 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13761 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
13762 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
13763 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13764 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
13765 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13766 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13767 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13772#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13773 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13774 CLASS *AA = nullptr; \
13775 switch (IRP.getPositionKind()) { \
13776 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13777 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13778 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13779 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13780 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13781 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13782 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13783 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13835#undef CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION
13836#undef CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION
13837#undef CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION
13838#undef CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION
13839#undef CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION
13840#undef CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION
13841#undef SWITCH_PK_CREATE
13842#undef SWITCH_PK_INV
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
AMDGPU Register Bank Select
This file implements a class to represent arbitrary precision integral constant values and operations...
ReachingDefInfo InstSet & ToRemove
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Function Alias Analysis false
This file contains the simple types necessary to represent the attributes associated with functions a...
#define STATS_DECLTRACK(NAME, TYPE, MSG)
static std::optional< Constant * > askForAssumedConstant(Attributor &A, const AbstractAttribute &QueryingAA, const IRPosition &IRP, Type &Ty)
static cl::opt< unsigned, true > MaxPotentialValues("attributor-max-potential-values", cl::Hidden, cl::desc("Maximum number of potential values to be " "tracked for each position."), cl::location(llvm::PotentialConstantIntValuesState::MaxPotentialValues), cl::init(7))
static void clampReturnedValueStates(Attributor &A, const AAType &QueryingAA, StateType &S, const IRPosition::CallBaseContext *CBContext=nullptr)
Clamp the information known for all returned values of a function (identified by QueryingAA) into S.
#define STATS_DECLTRACK_FN_ATTR(NAME)
#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxPotentialValuesIterations("attributor-max-potential-values-iterations", cl::Hidden, cl::desc("Maximum number of iterations we keep dismantling potential values."), cl::init(64))
#define STATS_DECLTRACK_CS_ATTR(NAME)
#define PIPE_OPERATOR(CLASS)
static bool mayBeInCycle(const CycleInfo *CI, const Instruction *I, bool HeaderOnly, Cycle **CPtr=nullptr)
#define STATS_DECLTRACK_ARG_ATTR(NAME)
static const Value * stripAndAccumulateOffsets(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Val, const DataLayout &DL, APInt &Offset, bool GetMinOffset, bool AllowNonInbounds, bool UseAssumed=false)
#define STATS_DECLTRACK_CSRET_ATTR(NAME)
static cl::opt< bool > ManifestInternal("attributor-manifest-internal", cl::Hidden, cl::desc("Manifest Attributor internal string attributes."), cl::init(false))
static Value * constructPointer(Value *Ptr, int64_t Offset, IRBuilder< NoFolder > &IRB)
Helper function to create a pointer based on Ptr, and advanced by Offset bytes.
#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define BUILD_STAT_NAME(NAME, TYPE)
static bool isDenselyPacked(Type *Ty, const DataLayout &DL)
Checks if a type could have padding bytes.
#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static const Value * getMinimalBaseOfPointer(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Ptr, int64_t &BytesOffset, const DataLayout &DL, bool AllowNonInbounds=false)
#define STATS_DECLTRACK_FNRET_ATTR(NAME)
#define STATS_DECLTRACK_CSARG_ATTR(NAME)
#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS)
#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxHeapToStackSize("max-heap-to-stack-size", cl::init(128), cl::Hidden)
#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define STATS_DECLTRACK_FLOATING_ATTR(NAME)
#define STATS_DECL(NAME, TYPE, MSG)
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static bool isReachableImpl(SmallVectorImpl< BasicBlock * > &Worklist, const StopSetT &StopSet, const SmallPtrSetImpl< BasicBlock * > *ExclusionSet, const DominatorTree *DT, const LoopInfo *LI)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file declares an analysis pass that computes CycleInfo for LLVM IR, specialized from GenericCycl...
DXIL Forward Handle Accesses
This file defines DenseMapInfo traits for DenseMap.
Machine Check Debug Module
This file implements a map that provides insertion order iteration.
static unsigned getAddressSpace(const Value *V, unsigned MaxLookup)
ConstantRange Range(APInt(BitWidth, Low), APInt(BitWidth, High))
uint64_t IntrinsicInst * II
static StringRef getName(Value *V)
dot regions Print regions of function to dot true view regions View regions of function(with no function bodies)"
Remove Loads Into Fake Uses
This builds on the llvm/ADT/GraphTraits.h file to find the strongly connected components (SCCs) of a ...
std::pair< BasicBlock *, BasicBlock * > Edge
BaseType
A given derived pointer can have multiple base pointers through phi/selects.
This file defines generic set operations that may be used on set's of different types,...
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static SymbolRef::Type getType(const Symbol *Sym)
static void initialize(TargetLibraryInfoImpl &TLI, const Triple &T, const llvm::StringTable &StandardNames, VectorLibrary VecLib)
Initialize the set of available library functions based on the specified target triple.
static unsigned getBitWidth(Type *Ty, const DataLayout &DL)
Returns the bitwidth of the given scalar or pointer type.
static unsigned getSize(unsigned Kind)
LLVM_ABI AACallGraphNode * operator*() const
bool isNoAlias(const MemoryLocation &LocA, const MemoryLocation &LocB)
A trivial helper function to check to see if the specified pointers are no-alias.
Class for arbitrary precision integers.
int64_t getSExtValue() const
Get sign extended value.
CallBase * getInstruction() const
Return the underlying instruction.
bool isCallbackCall() const
Return true if this ACS represents a callback call.
bool isDirectCall() const
Return true if this ACS represents a direct call.
static LLVM_ABI void getCallbackUses(const CallBase &CB, SmallVectorImpl< const Use * > &CallbackUses)
Add operand uses of CB that represent callback uses into CallbackUses.
int getCallArgOperandNo(Argument &Arg) const
Return the operand index of the underlying instruction associated with Arg.
Align getAlign() const
Return the alignment of the memory that is being allocated by the instruction.
unsigned getAddressSpace() const
Return the address space for the allocation.
LLVM_ABI std::optional< TypeSize > getAllocationSize(const DataLayout &DL) const
Get allocation size in bytes.
This class represents an incoming formal argument to a Function.
LLVM_ABI bool hasNoAliasAttr() const
Return true if this argument has the noalias attribute.
LLVM_ABI bool onlyReadsMemory() const
Return true if this argument has the readonly or readnone attribute.
LLVM_ABI bool hasPointeeInMemoryValueAttr() const
Return true if this argument has the byval, sret, inalloca, preallocated, or byref attribute.
LLVM_ABI bool hasReturnedAttr() const
Return true if this argument has the returned attribute.
LLVM_ABI bool hasByValAttr() const
Return true if this argument has the byval attribute.
const Function * getParent() const
unsigned getArgNo() const
Return the index of this formal argument in its containing function.
A function analysis which provides an AssumptionCache.
A cache of @llvm.assume calls within a function.
Functions, function parameters, and return types can have attributes to indicate how they should be t...
static LLVM_ABI Attribute get(LLVMContext &Context, AttrKind Kind, uint64_t Val=0)
Return a uniquified Attribute object.
LLVM_ABI FPClassTest getNoFPClass() const
Return the FPClassTest for nofpclass.
LLVM_ABI Attribute::AttrKind getKindAsEnum() const
Return the attribute's kind as an enum (Attribute::AttrKind).
LLVM_ABI MemoryEffects getMemoryEffects() const
Returns memory effects.
AttrKind
This enumeration lists the attributes that can be associated with parameters, function results,...
static LLVM_ABI Attribute getWithCaptureInfo(LLVMContext &Context, CaptureInfo CI)
static bool isEnumAttrKind(AttrKind Kind)
LLVM_ABI CaptureInfo getCaptureInfo() const
Returns information from captures attribute.
LLVM Basic Block Representation.
LLVM_ABI const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
const Function * getParent() const
Return the enclosing method, or null if none.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
const Instruction & front() const
InstListType::iterator iterator
Instruction iterators...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
BinaryOps getOpcode() const
Conditional or Unconditional Branch instruction.
static BranchInst * Create(BasicBlock *IfTrue, InsertPosition InsertBefore=nullptr)
unsigned getNumSuccessors() const
BasicBlock * getSuccessor(unsigned i) const
Value * getCondition() const
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
LLVM_ABI bool isMustTailCall() const
Tests if this call site must be tail call optimized.
LLVM_ABI bool isIndirectCall() const
Return true if the callsite is an indirect call.
bool isCallee(Value::const_user_iterator UI) const
Determine whether the passed iterator points to the callee operand's Use.
Value * getCalledOperand() const
const Use & getCalledOperandUse() const
const Use & getArgOperandUse(unsigned i) const
Wrappers for getting the Use of a call argument.
LLVM_ABI std::optional< ConstantRange > getRange() const
If this return value has a range attribute, return the value range of the argument.
Value * getArgOperand(unsigned i) const
bool isBundleOperand(unsigned Idx) const
Return true if the operand at index Idx is a bundle operand.
bool isConvergent() const
Determine if the invoke is convergent.
FunctionType * getFunctionType() const
LLVM_ABI Intrinsic::ID getIntrinsicID() const
Returns the intrinsic ID of the intrinsic called or Intrinsic::not_intrinsic if the called function i...
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
unsigned getArgOperandNo(const Use *U) const
Given a use for a arg operand, get the arg operand number that corresponds to it.
unsigned arg_size() const
bool isArgOperand(const Use *U) const
LLVM_ABI Function * getCaller()
Helper to get the caller (the parent function).
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
static CaptureInfo none()
Create CaptureInfo that does not capture any components of the pointer.
Instruction::CastOps getOpcode() const
Return the opcode of this CastInst.
LLVM_ABI bool isIntegerCast() const
There are several places where we need to know if a cast instruction only deals with integer source a...
Type * getDestTy() const
Return the destination type, as a convenience.
bool isEquality() const
Determine if this is an equals/not equals predicate.
bool isFalseWhenEqual() const
This is just a convenience.
Predicate
This enumeration lists the possible predicates for CmpInst subclasses.
bool isTrueWhenEqual() const
This is just a convenience.
Predicate getPredicate() const
Return the predicate for this instruction.
static LLVM_ABI Constant * getExtractElement(Constant *Vec, Constant *Idx, Type *OnlyIfReducedTy=nullptr)
static LLVM_ABI ConstantInt * getTrue(LLVMContext &Context)
This class represents a range of values.
const APInt & getLower() const
Return the lower value for this range.
LLVM_ABI bool isFullSet() const
Return true if this set contains all of the elements possible for this data-type.
LLVM_ABI bool isEmptySet() const
Return true if this set contains no members.
bool isSingleElement() const
Return true if this set contains exactly one member.
static LLVM_ABI ConstantRange makeAllowedICmpRegion(CmpInst::Predicate Pred, const ConstantRange &Other)
Produce the smallest range such that all values that may satisfy the given predicate with any value c...
const APInt & getUpper() const
Return the upper value for this range.
A parsed version of the target data layout string in and methods for querying it.
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
bool contains(const_arg_type_t< KeyT > Val) const
Return true if the specified key is in the map, false otherwise.
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Analysis pass which computes a DominatorTree.
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
LLVM_ABI bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
const BasicBlock & getEntryBlock() const
iterator_range< arg_iterator > args()
const Function & getFunction() const
Argument * getArg(unsigned i) const
bool hasFnAttribute(Attribute::AttrKind Kind) const
Return true if the function has the attribute.
CycleT * getCycle(const BlockT *Block) const
Find the innermost cycle containing a given block.
LLVM_ABI bool isDeclaration() const
Return true if the primary definition of this global value is outside of the current translation unit...
bool hasLocalLinkage() const
static LLVM_ABI bool compare(const APInt &LHS, const APInt &RHS, ICmpInst::Predicate Pred)
Return result of LHS Pred RHS comparison.
Value * CreatePtrAdd(Value *Ptr, Value *Offset, const Twine &Name="", GEPNoWrapFlags NW=GEPNoWrapFlags::none())
ConstantInt * getInt64(uint64_t C)
Get a constant 64-bit value.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
LLVM_ABI Instruction * clone() const
Create a copy of 'this' instruction that is identical in all ways except the following:
LLVM_ABI bool isLifetimeStartOrEnd() const LLVM_READONLY
Return true if the instruction is a llvm.lifetime.start or llvm.lifetime.end marker.
bool mayReadOrWriteMemory() const
Return true if this instruction may read or write memory.
LLVM_ABI bool mayWriteToMemory() const LLVM_READONLY
Return true if this instruction may modify memory.
bool hasMetadata() const
Return true if this instruction has any metadata attached to it.
LLVM_ABI void insertBefore(InstListType::iterator InsertPos)
Insert an unlinked instruction into a basic block immediately before the specified position.
LLVM_ABI InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
LLVM_ABI const Function * getFunction() const
Return the function this instruction belongs to.
LLVM_ABI BasicBlock * getSuccessor(unsigned Idx) const LLVM_READONLY
Return the specified successor. This instruction must be a terminator.
LLVM_ABI bool mayHaveSideEffects() const LLVM_READONLY
Return true if the instruction may have side effects.
bool isTerminator() const
LLVM_ABI bool mayReadFromMemory() const LLVM_READONLY
Return true if this instruction may read memory.
LLVM_ABI void setMetadata(unsigned KindID, MDNode *Node)
Set the metadata of the specified kind to the specified node.
unsigned getOpcode() const
Returns a member of one of the enums like Instruction::Add.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
LLVM_ABI const DataLayout & getDataLayout() const
Get the data layout of the module this instruction belongs to.
This is an important class for using LLVM in a threaded context.
ConstantRange getConstantRange(Value *V, Instruction *CxtI, bool UndefAllowed)
Return the ConstantRange constraint that is known to hold for the specified value at the specified in...
LoopT * getLoopFor(const BlockT *BB) const
Return the inner most loop that BB lives in.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
static LLVM_ABI MDNode * getMostGenericRange(MDNode *A, MDNode *B)
static MemoryEffectsBase readOnly()
bool doesNotAccessMemory() const
Whether this function accesses no memory.
static MemoryEffectsBase argMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
static MemoryEffectsBase inaccessibleMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
bool onlyAccessesInaccessibleMem() const
Whether this function only (at most) accesses inaccessible memory.
ModRefInfo getModRef(Location Loc) const
Get ModRefInfo for the given Location.
bool onlyAccessesArgPointees() const
Whether this function only (at most) accesses argument memory.
bool onlyReadsMemory() const
Whether this function only (at most) reads memory.
static MemoryEffectsBase writeOnly()
static MemoryEffectsBase inaccessibleOrArgMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
static MemoryEffectsBase none()
bool onlyAccessesInaccessibleOrArgMem() const
Whether this function only (at most) accesses argument and inaccessible memory.
static MemoryEffectsBase unknown()
static LLVM_ABI std::optional< MemoryLocation > getOrNone(const Instruction *Inst)
static SizeOffsetValue unknown()
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
static LLVM_ABI PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
Value * getReturnValue() const
Convenience accessor. Returns null if there is no return value.
LLVM_ABI const SCEV * getSCEVAtScope(const SCEV *S, const Loop *L)
Return a SCEV expression for the specified value at the specified scope in the program.
LLVM_ABI const SCEV * getSCEV(Value *V)
Return a SCEV expression for the full generality of the specified expression.
LLVM_ABI unsigned getSmallConstantMaxTripCount(const Loop *L, SmallVectorImpl< const SCEVPredicate * > *Predicates=nullptr)
Returns the upper bound of the loop trip count as a normal unsigned value.
ConstantRange getUnsignedRange(const SCEV *S)
Determine the unsigned range for a particular SCEV.
A vector that has set insertion semantics.
size_type size() const
Determine the number of elements in the SetVector.
bool insert(const value_type &X)
Insert a new element into the SetVector.
bool erase(PtrType Ptr)
Remove pointer from the set.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void append(ItTy in_start, ItTy in_end)
Add the specified range to the end of the SmallVector.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Used to lazily calculate structure layout information for a target machine, based on the DataLayout s...
TypeSize getElementOffset(unsigned Idx) const
TypeSize getElementOffsetInBits(unsigned Idx) const
Class to represent struct types.
unsigned getNumElements() const
Random access to the elements.
Type * getElementType(unsigned N) const
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
LLVM_ABI unsigned getIntegerBitWidth() const
bool isPointerTy() const
True if this is an instance of PointerType.
LLVM_ABI unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
bool isPtrOrPtrVectorTy() const
Return true if this is a pointer type or a vector of pointer types.
bool isIntegerTy() const
True if this is an instance of IntegerType.
bool isVoidTy() const
Return true if this is 'void'.
static LLVM_ABI UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
A Use represents the edge between a Value definition and its users.
User * getUser() const
Returns the User that contains this Use.
const Use & getOperandUse(unsigned i) const
LLVM_ABI bool isDroppable() const
A droppable user is a user for which uses can be dropped without affecting correctness and should be ...
LLVM_ABI bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
Value * getOperand(unsigned i) const
unsigned getNumOperands() const
ValueT lookup(const KeyT &Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
static constexpr uint64_t MaximumAlignment
LLVM_ABI void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
LLVMContext & getContext() const
All values hold a context through their type.
iterator_range< user_iterator > users()
LLVM_ABI const Value * stripAndAccumulateConstantOffsets(const DataLayout &DL, APInt &Offset, bool AllowNonInbounds, bool AllowInvariantGroup=false, function_ref< bool(Value &Value, APInt &Offset)> ExternalAnalysis=nullptr, bool LookThroughIntToPtr=false) const
Accumulate the constant offset this value has compared to a base pointer.
static constexpr unsigned MaxAlignmentExponent
The maximum alignment for instructions.
iterator_range< use_iterator > uses()
LLVM_ABI StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
constexpr ScalarTy getFixedValue() const
constexpr bool isScalable() const
Returns whether the quantity is scaled by a runtime quantity (vscale).
const ParentTy * getParent() const
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
SetVector< Function * >::iterator I
This class implements an extremely fast bulk output stream that can only output to a stream.
A raw_ostream that writes to an std::string.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
Abstract Attribute helper functions.
LLVM_ABI bool isAssumedReadNone(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readnone.
LLVM_ABI bool isAssumedReadOnly(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readonly.
raw_ostream & operator<<(raw_ostream &OS, const RangeTy &R)
LLVM_ABI std::optional< Value * > combineOptionalValuesInAAValueLatice(const std::optional< Value * > &A, const std::optional< Value * > &B, Type *Ty)
Return the combination of A and B such that the result is a possible value of both.
LLVM_ABI bool isValidAtPosition(const ValueAndContext &VAC, InformationCache &InfoCache)
Return true if the value of VAC is a valid at the position of VAC, that is a constant,...
LLVM_ABI bool isAssumedThreadLocalObject(Attributor &A, Value &Obj, const AbstractAttribute &QueryingAA)
Return true if Obj is assumed to be a thread local object.
LLVM_ABI bool isDynamicallyUnique(Attributor &A, const AbstractAttribute &QueryingAA, const Value &V, bool ForAnalysisOnly=true)
Return true if V is dynamically unique, that is, there are no two "instances" of V at runtime with di...
LLVM_ABI bool getPotentialCopiesOfStoredValue(Attributor &A, StoreInst &SI, SmallSetVector< Value *, 4 > &PotentialCopies, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values of the one stored by SI into PotentialCopies.
SmallPtrSet< Instruction *, 4 > InstExclusionSetTy
LLVM_ABI bool isGPU(const Module &M)
Return true iff M target a GPU (and we can use GPU AS reasoning).
ValueScope
Flags to distinguish intra-procedural queries from potentially inter-procedural queries.
LLVM_ABI bool isValidInScope(const Value &V, const Function *Scope)
Return true if V is a valid value in Scope, that is a constant or an instruction/argument of Scope.
LLVM_ABI bool isPotentiallyReachable(Attributor &A, const Instruction &FromI, const Instruction &ToI, const AbstractAttribute &QueryingAA, const AA::InstExclusionSetTy *ExclusionSet=nullptr, std::function< bool(const Function &F)> GoBackwardsCB=nullptr)
Return true if ToI is potentially reachable from FromI without running into any instruction in Exclus...
LLVM_ABI bool isNoSyncInst(Attributor &A, const Instruction &I, const AbstractAttribute &QueryingAA)
Return true if I is a nosync instruction.
bool hasAssumedIRAttr(Attributor &A, const AbstractAttribute *QueryingAA, const IRPosition &IRP, DepClassTy DepClass, bool &IsKnown, bool IgnoreSubsumingPositions=false, const AAType **AAPtr=nullptr)
Helper to avoid creating an AA for IR Attributes that might already be set.
LLVM_ABI bool getPotentiallyLoadedValues(Attributor &A, LoadInst &LI, SmallSetVector< Value *, 4 > &PotentialValues, SmallSetVector< Instruction *, 4 > &PotentialValueOrigins, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values LI could read into PotentialValues.
LLVM_ABI Value * getWithType(Value &V, Type &Ty)
Try to convert V to type Ty without introducing new instructions.
constexpr char Align[]
Key for Kernel::Arg::Metadata::mAlign.
constexpr char Attrs[]
Key for Kernel::Metadata::mAttrs.
@ C
The default llvm calling convention, compatible with C.
@ BasicBlock
Various leaf nodes.
@ Unsupported
This operation is completely unsupported on the target.
@ SingleThread
Synchronized with respect to signal handlers executing in the same thread.
@ CE
Windows NT (Windows on ARM)
@ Valid
The data is already valid.
initializer< Ty > init(const Ty &Val)
LocationClass< Ty > location(Ty &L)
unsigned combineHashValue(unsigned a, unsigned b)
Simplistic combination of 32-bit hash values into 32-bit hash values.
ElementType
The element type of an SRV or UAV resource.
Scope
Defines the scope in which this symbol should be visible: Default – Visible in the public interface o...
std::enable_if_t< detail::IsValidPointer< X, Y >::value, X * > dyn_extract_or_null(Y &&MD)
Extract a Value from Metadata, if any, allowing null.
std::enable_if_t< detail::IsValidPointer< X, Y >::value, X * > extract(Y &&MD)
Extract a Value from Metadata.
@ User
could "use" a pointer
NodeAddr< UseNode * > Use
Context & getContext() const
friend class Instruction
Iterator for Instructions in a `BasicBlock.
LLVM_ABI iterator begin() const
This is an optimization pass for GlobalISel generic memory operations.
bool operator<(int64_t V1, const APSInt &V2)
FunctionAddr VTableAddr Value
LLVM_ATTRIBUTE_ALWAYS_INLINE DynamicAPInt gcd(const DynamicAPInt &A, const DynamicAPInt &B)
LLVM_ABI KnownFPClass computeKnownFPClass(const Value *V, const APInt &DemandedElts, FPClassTest InterestedClasses, const SimplifyQuery &SQ, unsigned Depth=0)
Determine which floating-point classes are valid for V, and return them in KnownFPClass bit sets.
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
LLVM_ABI bool isLegalToPromote(const CallBase &CB, Function *Callee, const char **FailureReason=nullptr)
Return true if the given indirect call site can be made to call Callee.
LLVM_ABI Constant * getInitialValueOfAllocation(const Value *V, const TargetLibraryInfo *TLI, Type *Ty)
If this is a call to an allocation function that initializes memory to a fixed value,...
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
@ Undef
Value of the register doesn't matter.
auto pred_end(const MachineBasicBlock *BB)
unsigned getPointerAddressSpace(const Type *T)
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
FunctionAddr VTableAddr uintptr_t uintptr_t Int32Ty
auto successors(const MachineBasicBlock *BB)
LLVM_ABI bool isRemovableAlloc(const CallBase *V, const TargetLibraryInfo *TLI)
Return true if this is a call to an allocation function that does not have side effects that we are r...
APFloat abs(APFloat X)
Returns the absolute value of the argument.
LLVM_ABI raw_fd_ostream & outs()
This returns a reference to a raw_fd_ostream for standard output.
LLVM_ABI Value * getAllocAlignment(const CallBase *V, const TargetLibraryInfo *TLI)
Gets the alignment argument for an aligned_alloc-like function, using either built-in knowledge based...
auto dyn_cast_if_present(const Y &Val)
dyn_cast_if_present<X> - Functionally identical to dyn_cast, except that a null (or none in the case ...
LLVM_ABI Value * simplifyInstructionWithOperands(Instruction *I, ArrayRef< Value * > NewOps, const SimplifyQuery &Q)
Like simplifyInstruction but the operands of I are replaced with NewOps.
Value * GetPointerBaseWithConstantOffset(Value *Ptr, int64_t &Offset, const DataLayout &DL, bool AllowNonInbounds=true)
Analyze the specified pointer to see if it can be expressed as a base pointer plus a constant offset.
scc_iterator< T > scc_begin(const T &G)
Construct the begin iterator for a deduced graph type T.
LLVM_ABI bool isNoAliasCall(const Value *V)
Return true if this pointer is returned by a noalias function.
MemoryEffectsBase< IRMemLocation > MemoryEffects
Summary of how a function affects memory in the program.
raw_ostream & WriteGraph(raw_ostream &O, const GraphType &G, bool ShortNames=false, const Twine &Title="")
LLVM_ABI bool isSafeToSpeculativelyExecute(const Instruction *I, const Instruction *CtxI=nullptr, AssumptionCache *AC=nullptr, const DominatorTree *DT=nullptr, const TargetLibraryInfo *TLI=nullptr, bool UseVariableInfo=true, bool IgnoreUBImplyingAttrs=true)
Return true if the instruction does not have any effects besides calculating the result and does not ...
bool isa_and_nonnull(const Y &Val)
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
LLVM_ABI ConstantRange getConstantRangeFromMetadata(const MDNode &RangeMD)
Parse out a conservative ConstantRange from !range metadata.
auto map_range(ContainerTy &&C, FuncTy F)
Return a range that applies F to the elements of C.
const Value * getPointerOperand(const Value *V)
A helper function that returns the pointer operand of a load, store or GEP instruction.
LLVM_ABI Value * simplifyInstruction(Instruction *I, const SimplifyQuery &Q)
See if we can compute a simplified version of this instruction.
auto dyn_cast_or_null(const Y &Val)
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
unsigned Log2_32(uint32_t Value)
Return the floor log base 2 of the specified value, -1 if the value is zero.
LLVM_ABI bool isIntrinsicReturningPointerAliasingArgumentWithoutCapturing(const CallBase *Call, bool MustPreserveNullness)
{launder,strip}.invariant.group returns pointer that aliases its argument, and it only captures point...
constexpr bool isPowerOf2_32(uint32_t Value)
Return true if the argument is a power of two > 0.
PotentialValuesState< std::pair< AA::ValueAndContext, AA::ValueScope > > PotentialLLVMValuesState
void sort(IteratorTy Start, IteratorTy End)
LLVM_ABI bool NullPointerIsDefined(const Function *F, unsigned AS=0)
Check whether null pointer dereferencing is considered undefined behavior for a given function or an ...
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool isPointerTy(const Type *T)
LLVM_ABI bool wouldInstructionBeTriviallyDead(const Instruction *I, const TargetLibraryInfo *TLI=nullptr)
Return true if the result produced by the instruction would have no side effects if it was not used.
bool set_union(S1Ty &S1, const S2Ty &S2)
set_union(A, B) - Compute A := A u B, return whether A changed.
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
LLVM_ABI CallBase & promoteCall(CallBase &CB, Function *Callee, CastInst **RetBitCast=nullptr)
Promote the given indirect call site to unconditionally call Callee.
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
LLVM_ABI bool hasAssumption(const Function &F, const KnownAssumptionString &AssumptionStr)
Return true if F has the assumption AssumptionStr attached.
LLVM_ABI RetainedKnowledge getKnowledgeFromUse(const Use *U, ArrayRef< Attribute::AttrKind > AttrKinds)
Return a valid Knowledge associated to the Use U if its Attribute kind is in AttrKinds.
@ Success
The lock was released successfully.
LLVM_ATTRIBUTE_VISIBILITY_DEFAULT AnalysisKey InnerAnalysisManagerProxy< AnalysisManagerT, IRUnitT, ExtraArgTs... >::Key
LLVM_ABI bool isKnownNonZero(const Value *V, const SimplifyQuery &Q, unsigned Depth=0)
Return true if the given value is known to be non-zero when defined.
AtomicOrdering
Atomic ordering for LLVM's memory model.
PotentialValuesState< APInt > PotentialConstantIntValuesState
std::string join(IteratorT Begin, IteratorT End, StringRef Separator)
Joins the strings in the range [Begin, End), adding Separator between the elements.
IRBuilder(LLVMContext &, FolderTy, InserterTy, MDNode *, ArrayRef< OperandBundleDef >) -> IRBuilder< FolderTy, InserterTy >
InterleavedRange< Range > interleaved_array(const Range &R, StringRef Separator=", ")
Output range R as an array of interleaved elements.
ChangeStatus clampStateAndIndicateChange< DerefState >(DerefState &S, const DerefState &R)
void RemapInstruction(Instruction *I, ValueToValueMapTy &VM, RemapFlags Flags=RF_None, ValueMapTypeRemapper *TypeMapper=nullptr, ValueMaterializer *Materializer=nullptr, const MetadataPredicate *IdentityMD=nullptr)
Convert the instruction operands from referencing the current values into those specified by VM.
DWARFExpression::Operation Op
LLVM_ABI bool isGuaranteedNotToBeUndefOrPoison(const Value *V, AssumptionCache *AC=nullptr, const Instruction *CtxI=nullptr, const DominatorTree *DT=nullptr, unsigned Depth=0)
Return true if this function can prove that V does not have undef bits and is never poison.
ArrayRef(const T &OneElt) -> ArrayRef< T >
LLVM_ABI Value * getFreedOperand(const CallBase *CB, const TargetLibraryInfo *TLI)
If this if a call to a free function, return the freed operand.
ChangeStatus clampStateAndIndicateChange(StateType &S, const StateType &R)
Helper function to clamp a state S of type StateType with the information in R and indicate/return if...
constexpr unsigned BitWidth
ValueMap< const Value *, WeakTrackingVH > ValueToValueMapTy
auto pred_begin(const MachineBasicBlock *BB)
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
iterator_range< pointer_iterator< WrappedIteratorT > > make_pointer_range(RangeT &&Range)
LLVM_ABI std::optional< APInt > getAllocSize(const CallBase *CB, const TargetLibraryInfo *TLI, function_ref< const Value *(const Value *)> Mapper=[](const Value *V) { return V;})
Return the size of the requested allocation.
LLVM_ABI DenseSet< StringRef > getAssumptions(const Function &F)
Return the set of all assumptions for the function F.
Align assumeAligned(uint64_t Value)
Treats the value 0 as a 1, so Align is always at least 1.
LLVM_ABI Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
@ OPTIONAL
The target may be valid if the source is not.
@ NONE
Do not track a dependence between source and target.
@ REQUIRED
The target cannot be valid if the source is not.
LLVM_ABI UseCaptureInfo DetermineUseCaptureKind(const Use &U, const Value *Base)
Determine what kind of capture behaviour U may exhibit.
LLVM_ABI Value * simplifyCmpInst(CmpPredicate Predicate, Value *LHS, Value *RHS, const SimplifyQuery &Q)
Given operands for a CmpInst, fold the result or return null.
LLVM_ABI bool mayContainIrreducibleControl(const Function &F, const LoopInfo *LI)
BumpPtrAllocatorImpl<> BumpPtrAllocator
The standard BumpPtrAllocator which just uses the default template parameters.
T bit_floor(T Value)
Returns the largest integral power of two no greater than Value if Value is nonzero.
LLVM_ABI const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=MaxLookupSearchDepth)
This method strips off any GEP address adjustments, pointer casts or llvm.threadlocal....
bool capturesNothing(CaptureComponents CC)
LLVM_ABI bool isIdentifiedObject(const Value *V)
Return true if this pointer refers to a distinct and identifiable object.
constexpr StringRef AssumptionAttrKey
The key we use for assumption attributes.
constexpr bool isCallableCC(CallingConv::ID CC)
GenericCycleInfo< SSAContext > CycleInfo
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
A type to track pointer/struct usage and accesses for AAPointerInfo.
bool forallInterferingAccesses(AA::RangeTy Range, F CB) const
See AAPointerInfo::forallInterferingAccesses.
AAPointerInfo::const_bin_iterator end() const
ChangeStatus addAccess(Attributor &A, const AAPointerInfo::RangeList &Ranges, Instruction &I, std::optional< Value * > Content, AAPointerInfo::AccessKind Kind, Type *Ty, Instruction *RemoteI=nullptr)
Add a new Access to the state at offset Offset and with size Size.
DenseMap< const Instruction *, SmallVector< unsigned > > RemoteIMap
AAPointerInfo::const_bin_iterator begin() const
AAPointerInfo::OffsetInfo ReturnedOffsets
Flag to determine if the underlying pointer is reaching a return statement in the associated function...
State & operator=(State &&R)
State(State &&SIS)=default
const AAPointerInfo::Access & getAccess(unsigned Index) const
SmallVector< AAPointerInfo::Access > AccessList
bool isAtFixpoint() const override
See AbstractState::isAtFixpoint().
bool forallInterferingAccesses(Instruction &I, F CB, AA::RangeTy &Range) const
See AAPointerInfo::forallInterferingAccesses.
static State getWorstState(const State &SIS)
Return the worst possible representable state.
int64_t numOffsetBins() const
AAPointerInfo::OffsetBinsTy OffsetBins
ChangeStatus indicateOptimisticFixpoint() override
See AbstractState::indicateOptimisticFixpoint().
State & operator=(const State &R)
ChangeStatus indicatePessimisticFixpoint() override
See AbstractState::indicatePessimisticFixpoint().
const State & getAssumed() const
static State getBestState(const State &SIS)
Return the best possible representable state.
bool isValidState() const override
See AbstractState::isValidState().
----------------—AAIntraFnReachability Attribute-----------------------—
ReachabilityQueryInfo(const ReachabilityQueryInfo &RQI)
unsigned Hash
Precomputed hash for this RQI.
const Instruction * From
Start here,.
Reachable Result
and remember if it worked:
ReachabilityQueryInfo(const Instruction *From, const ToTy *To)
ReachabilityQueryInfo(Attributor &A, const Instruction &From, const ToTy &To, const AA::InstExclusionSetTy *ES, bool MakeUnique)
Constructor replacement to ensure unique and stable sets are used for the cache.
const ToTy * To
reach this place,
const AA::InstExclusionSetTy * ExclusionSet
without going through any of these instructions,
unsigned computeHashValue() const
An abstract interface for address space information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all align attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
Align getKnownAlign() const
Return known alignment.
static LLVM_ABI const char ID
An abstract attribute for getting assumption information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract state for querying live call edges.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for specializing "dynamic" components of denormal_fpenv to a known denormal mod...
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all dereferenceable attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for llvm::GlobalValue information interference.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for indirect call information interference.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface to track if a value leaves it's defining function instance.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for computing reachability between functions.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool canReach(Attributor &A, const Function &Fn) const
If the function represented by this possition can reach Fn.
virtual bool instructionCanReach(Attributor &A, const Instruction &Inst, const Function &Fn, const AA::InstExclusionSetTy *ExclusionSet=nullptr) const =0
Can Inst reach Fn.
An abstract interface to determine reachability of point A to B.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for identifying pointers from which loads can be marked invariant.
static LLVM_ABI const char ID
Unique ID (due to the unique address).
An abstract interface for liveness abstract attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for memory access kind related attributes (readnone/readonly/writeonly).
bool isAssumedReadOnly() const
Return true if we assume that the underlying value is not accessed (=written) in its respective scope...
bool isKnownReadNone() const
Return true if we know that the underlying value is not read or accessed in its respective scope.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool isAssumedReadNone() const
Return true if we assume that the underlying value is not read or accessed in its respective scope.
An abstract interface for all memory location attributes (readnone/argmemonly/inaccessiblememonly/ina...
static LLVM_ABI std::string getMemoryLocationsAsStr(MemoryLocationsKind MLK)
Return the locations encoded by MLK as a readable string.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
StateType::base_t MemoryLocationsKind
An abstract interface for all nonnull attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for potential address space information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all noalias attributes.
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all nocapture attributes.
@ NO_CAPTURE_MAYBE_RETURNED
If we do not capture the value in memory or through integers we can only communicate it back as a der...
@ NO_CAPTURE
If we do not capture the value in memory, through integers, or as a derived pointer we know it is not...
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool isAssumedNoCaptureMaybeReturned() const
Return true if we assume that the underlying value is not captured in its respective scope but we all...
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI void determineFunctionCaptureCapabilities(const IRPosition &IRP, const Function &F, BitIntegerState &State)
Update State according to the capture capabilities of F for position IRP.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An AbstractAttribute for nofree.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for norecurse.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An AbstractAttribute for noreturn.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isAlignedBarrier(const CallBase &CB, bool ExecutedAligned)
Helper function to determine if CB is an aligned (GPU) barrier.
static LLVM_ABI bool isNonRelaxedAtomic(const Instruction *I)
Helper function used to determine whether an instruction is non-relaxed atomic.
static LLVM_ABI bool isNoSyncIntrinsic(const Instruction *I)
Helper function specific for intrinsics which are potentially volatile.
An abstract interface for all noundef attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for determining the necessity of the convergent attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all nonnull attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See AbstractAttribute::isImpliedByIR(...).
A helper containing a list of offsets computed for a Use.
A container for a list of ranges.
static void set_difference(const RangeList &L, const RangeList &R, RangeList &D)
Copy ranges from L that are not in R, into D.
An abstract interface for struct information.
virtual bool reachesReturn() const =0
OffsetBinsTy::const_iterator const_bin_iterator
virtual const_bin_iterator begin() const =0
DenseMap< AA::RangeTy, SmallSet< unsigned, 4 > > OffsetBinsTy
static LLVM_ABI const char ID
Unique ID (due to the unique address)
virtual int64_t numOffsetBins() const =0
An abstract interface for potential values analysis.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI Value * getSingleValue(Attributor &A, const AbstractAttribute &AA, const IRPosition &IRP, SmallVectorImpl< AA::ValueAndContext > &Values)
Extract the single value in Values if any.
An abstract interface for privatizability.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for undefined behavior.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for getting all assumption underlying objects.
virtual bool forallUnderlyingObjects(function_ref< bool(Value &)> Pred, AA::ValueScope Scope=AA::Interprocedural) const =0
Check Pred on all underlying objects in Scope collected so far.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for range value analysis.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for value simplify abstract attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for willreturn.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
Helper to represent an access offset and size, with logic to deal with uncertainty and check for over...
static constexpr int64_t Unknown
static RangeTy getUnknown()
const Instruction * getCtxI() const
Base struct for all "concrete attribute" deductions.
void print(raw_ostream &OS) const
Helper functions, for debug purposes only.
virtual StateType & getState()=0
Return the internal abstract state for inspection.
An interface to query the internal state of an abstract attribute.
virtual bool isAtFixpoint() const =0
Return if this abstract state is fixed, thus does not need to be updated if information changes as it...
virtual bool isValidState() const =0
Return if this abstract state is in a valid state.
Helper for AA::PointerInfo::Access DenseMap/Set usage ignoring everythign but the instruction.
static unsigned getHashValue(const Access &A)
AAPointerInfo::Access Access
static Access getTombstoneKey()
DenseMapInfo< Instruction * > Base
static bool isEqual(const Access &LHS, const Access &RHS)
static Access getEmptyKey()
constexpr uint64_t value() const
This is a hole in the type system and should not be abused.
std::function< void( const ArgumentReplacementInfo &, Function &, Function::arg_iterator)> CalleeRepairCBTy
Callee repair callback type.
const Argument & getReplacedArg() const
std::function< void(const ArgumentReplacementInfo &, AbstractCallSite, SmallVectorImpl< Value * > &)> ACSRepairCBTy
Abstract call site (ACS) repair callback type.
The fixpoint analysis framework that orchestrates the attribute deduction.
std::function< std::optional< Value * >( const IRPosition &, const AbstractAttribute *, bool &)> SimplifictionCallbackTy
Register CB as a simplification callback.
Specialization of the integer state for a bit-wise encoding.
BitIntegerState & addKnownBits(base_t Bits)
Add the bits in BitsEncoding to the "known bits".
Simple wrapper for a single bit (boolean) state.
static constexpr DenormalFPEnv getDefault()
static Access getTombstoneKey()
static unsigned getHashValue(const Access &A)
static Access getEmptyKey()
AAPointerInfo::Access Access
static bool isEqual(const Access &LHS, const Access &RHS)
static bool isEqual(const AA::RangeTy &A, const AA::RangeTy B)
static AA::RangeTy getTombstoneKey()
static unsigned getHashValue(const AA::RangeTy &Range)
static AA::RangeTy getEmptyKey()
static ReachabilityQueryInfo< ToTy > EmptyKey
static ReachabilityQueryInfo< ToTy > TombstoneKey
static ReachabilityQueryInfo< ToTy > * getEmptyKey()
DenseMapInfo< std::pair< const Instruction *, const ToTy * > > PairDMI
static ReachabilityQueryInfo< ToTy > * getTombstoneKey()
static bool isEqual(const ReachabilityQueryInfo< ToTy > *LHS, const ReachabilityQueryInfo< ToTy > *RHS)
DenseMapInfo< const AA::InstExclusionSetTy * > InstSetDMI
static unsigned getHashValue(const ReachabilityQueryInfo< ToTy > *RQI)
An information struct used to provide DenseMap with the various necessary components for a given valu...
State for dereferenceable attribute.
IncIntegerState DerefBytesState
State representing for dereferenceable bytes.
ChangeStatus manifest(Attributor &A) override
See AbstractAttribute::manifest(...).
Helper to describe and deal with positions in the LLVM-IR.
Function * getAssociatedFunction() const
Return the associated function, if any.
static const IRPosition callsite_returned(const CallBase &CB)
Create a position describing the returned value of CB.
static const IRPosition returned(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the returned value of F.
LLVM_ABI Argument * getAssociatedArgument() const
Return the associated argument, if any.
static const IRPosition value(const Value &V, const CallBaseContext *CBContext=nullptr)
Create a position describing the value of V.
int getCalleeArgNo() const
Return the callee argument number of the associated value if it is an argument or call site argument,...
static const IRPosition inst(const Instruction &I, const CallBaseContext *CBContext=nullptr)
Create a position describing the instruction I.
static const IRPosition callsite_argument(const CallBase &CB, unsigned ArgNo)
Create a position describing the argument of CB at position ArgNo.
@ IRP_ARGUMENT
An attribute for a function argument.
@ IRP_RETURNED
An attribute for the function return value.
@ IRP_CALL_SITE
An attribute for a call site (function scope).
@ IRP_CALL_SITE_RETURNED
An attribute for a call site return value.
@ IRP_FUNCTION
An attribute for a function (scope).
@ IRP_CALL_SITE_ARGUMENT
An attribute for a call site argument.
@ IRP_INVALID
An invalid position.
Instruction * getCtxI() const
Return the context instruction, if any.
static const IRPosition argument(const Argument &Arg, const CallBaseContext *CBContext=nullptr)
Create a position describing the argument Arg.
Type * getAssociatedType() const
Return the type this abstract attribute is associated with.
static const IRPosition function(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the function scope of F.
const CallBaseContext * getCallBaseContext() const
Get the call base context from the position.
Value & getAssociatedValue() const
Return the value this abstract attribute is associated with.
Value & getAnchorValue() const
Return the value this abstract attribute is anchored with.
int getCallSiteArgNo() const
Return the call site argument number of the associated value if it is an argument or call site argume...
static const IRPosition function_scope(const IRPosition &IRP, const CallBaseContext *CBContext=nullptr)
Create a position with function scope matching the "context" of IRP.
Kind getPositionKind() const
Return the associated position kind.
bool isArgumentPosition() const
Return true if the position is an argument or call site argument.
static const IRPosition callsite_function(const CallBase &CB)
Create a position describing the function scope of CB.
Function * getAnchorScope() const
Return the Function surrounding the anchor value.
ConstantRange getKnown() const
Return the known state encoding.
ConstantRange getAssumed() const
Return the assumed state encoding.
base_t getAssumed() const
Return the assumed state encoding.
static constexpr base_t getWorstState()
Helper that allows to insert a new assumption string in the known assumption set by creating a (stati...
FPClassTest KnownFPClasses
Floating-point classes the value could be one of.
A "must be executed context" for a given program point PP is the set of instructions,...
iterator & end()
Return an universal end iterator.
bool findInContextOf(const Instruction *I, const Instruction *PP)
Helper to look for I in the context of PP.
iterator & begin(const Instruction *PP)
Return an iterator to explore the context around PP.
bool checkForAllContext(const Instruction *PP, function_ref< bool(const Instruction *)> Pred)
}
static unsigned MaxPotentialValues
Helper to tie a abstract state implementation to an abstract attribute.
StateType & getState() override
See AbstractAttribute::getState(...).
bool isPassthrough() const
LLVM_ABI bool unionAssumed(std::optional< Value * > Other)
Merge Other into the currently assumed simplified value.
std::optional< Value * > SimplifiedAssociatedValue
An assumed simplified value.
Type * Ty
The type of the original value.