54#include "llvm/IR/IntrinsicsAMDGPU.h"
55#include "llvm/IR/IntrinsicsNVPTX.h"
82#define DEBUG_TYPE "attributor"
86 cl::desc(
"Manifest Attributor internal string attributes."),
99 cl::desc(
"Maximum number of potential values to be "
100 "tracked for each position."),
105 "attributor-max-potential-values-iterations",
cl::Hidden,
107 "Maximum number of iterations we keep dismantling potential values."),
110STATISTIC(NumAAs,
"Number of abstract attributes created");
111STATISTIC(NumIndirectCallsPromoted,
"Number of indirect calls promoted");
126#define BUILD_STAT_MSG_IR_ATTR(TYPE, NAME) \
127 ("Number of " #TYPE " marked '" #NAME "'")
128#define BUILD_STAT_NAME(NAME, TYPE) NumIR##TYPE##_##NAME
129#define STATS_DECL_(NAME, MSG) STATISTIC(NAME, MSG);
130#define STATS_DECL(NAME, TYPE, MSG) \
131 STATS_DECL_(BUILD_STAT_NAME(NAME, TYPE), MSG);
132#define STATS_TRACK(NAME, TYPE) ++(BUILD_STAT_NAME(NAME, TYPE));
133#define STATS_DECLTRACK(NAME, TYPE, MSG) \
134 {STATS_DECL(NAME, TYPE, MSG) STATS_TRACK(NAME, TYPE)}
135#define STATS_DECLTRACK_ARG_ATTR(NAME) \
136 STATS_DECLTRACK(NAME, Arguments, BUILD_STAT_MSG_IR_ATTR(arguments, NAME))
137#define STATS_DECLTRACK_CSARG_ATTR(NAME) \
138 STATS_DECLTRACK(NAME, CSArguments, \
139 BUILD_STAT_MSG_IR_ATTR(call site arguments, NAME))
140#define STATS_DECLTRACK_FN_ATTR(NAME) \
141 STATS_DECLTRACK(NAME, Function, BUILD_STAT_MSG_IR_ATTR(functions, NAME))
142#define STATS_DECLTRACK_CS_ATTR(NAME) \
143 STATS_DECLTRACK(NAME, CS, BUILD_STAT_MSG_IR_ATTR(call site, NAME))
144#define STATS_DECLTRACK_FNRET_ATTR(NAME) \
145 STATS_DECLTRACK(NAME, FunctionReturn, \
146 BUILD_STAT_MSG_IR_ATTR(function returns, NAME))
147#define STATS_DECLTRACK_CSRET_ATTR(NAME) \
148 STATS_DECLTRACK(NAME, CSReturn, \
149 BUILD_STAT_MSG_IR_ATTR(call site returns, NAME))
150#define STATS_DECLTRACK_FLOATING_ATTR(NAME) \
151 STATS_DECLTRACK(NAME, Floating, \
152 ("Number of floating values known to be '" #NAME "'"))
157#define PIPE_OPERATOR(CLASS) \
158 raw_ostream &operator<<(raw_ostream &OS, const CLASS &AA) { \
159 return OS << static_cast<const AbstractAttribute &>(AA); \
216 bool HeaderOnly,
Cycle **CPtr =
nullptr) {
219 auto *BB =
I->getParent();
225 return !HeaderOnly || BB ==
C->getHeader();
236 if (
DL.getTypeSizeInBits(Ty) !=
DL.getTypeAllocSizeInBits(Ty))
261 StartPos +=
DL.getTypeAllocSizeInBits(ElTy);
271 bool AllowVolatile) {
272 if (!AllowVolatile &&
I->isVolatile())
276 return LI->getPointerOperand();
280 return SI->getPointerOperand();
284 return CXI->getPointerOperand();
288 return RMWI->getPointerOperand();
310 bool GetMinOffset,
bool AllowNonInbounds,
311 bool UseAssumed =
false) {
313 auto AttributorAnalysis = [&](
Value &V,
APInt &ROffset) ->
bool {
320 if (!ValueConstantRangeAA)
324 if (
Range.isFullSet())
330 ROffset =
Range.getSignedMin();
332 ROffset =
Range.getSignedMax();
343 const Value *
Ptr, int64_t &BytesOffset,
345 APInt OffsetAPInt(
DL.getIndexTypeSizeInBits(
Ptr->getType()), 0);
348 true, AllowNonInbounds);
356template <
typename AAType,
typename StateType =
typename AAType::StateType,
358 bool RecurseForSelectAndPHI =
true>
360 Attributor &
A,
const AAType &QueryingAA, StateType &S,
362 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp return value states for "
363 << QueryingAA <<
" into " << S <<
"\n");
365 assert((QueryingAA.getIRPosition().getPositionKind() ==
367 QueryingAA.getIRPosition().getPositionKind() ==
369 "Can only clamp returned value states for a function returned or call "
370 "site returned position!");
374 std::optional<StateType>
T;
377 auto CheckReturnValue = [&](
Value &RV) ->
bool {
391 <<
" AA: " <<
AA->getAsStr(&
A) <<
" @ " << RVPos <<
"\n");
392 const StateType &AAS =
AA->getState();
394 T = StateType::getBestState(AAS);
396 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" RV State: " <<
T
398 return T->isValidState();
401 if (!
A.checkForAllReturnedValues(CheckReturnValue, QueryingAA,
403 RecurseForSelectAndPHI))
404 S.indicatePessimisticFixpoint();
411template <
typename AAType,
typename BaseType,
412 typename StateType =
typename BaseType::StateType,
413 bool PropagateCallBaseContext =
false,
415 bool RecurseForSelectAndPHI =
true>
416struct AAReturnedFromReturnedValues :
public BaseType {
417 AAReturnedFromReturnedValues(
const IRPosition &IRP, Attributor &
A)
422 StateType S(StateType::getBestState(this->getState()));
424 RecurseForSelectAndPHI>(
426 PropagateCallBaseContext ? this->getCallBaseContext() : nullptr);
435template <
typename AAType,
typename StateType =
typename AAType::StateType,
437static void clampCallSiteArgumentStates(
Attributor &
A,
const AAType &QueryingAA,
439 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp call site argument states for "
440 << QueryingAA <<
" into " << S <<
"\n");
442 assert(QueryingAA.getIRPosition().getPositionKind() ==
444 "Can only clamp call site argument states for an argument position!");
448 std::optional<StateType>
T;
451 unsigned ArgNo = QueryingAA.getIRPosition().getCallSiteArgNo();
471 LLVM_DEBUG(
dbgs() <<
"[Attributor] ACS: " << *ACS.getInstruction()
472 <<
" AA: " <<
AA->getAsStr(&
A) <<
" @" << ACSArgPos
474 const StateType &AAS =
AA->getState();
476 T = StateType::getBestState(AAS);
478 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" CSA State: " <<
T
480 return T->isValidState();
483 bool UsedAssumedInformation =
false;
484 if (!
A.checkForAllCallSites(CallSiteCheck, QueryingAA,
true,
485 UsedAssumedInformation))
486 S.indicatePessimisticFixpoint();
493template <
typename AAType,
typename BaseType,
494 typename StateType =
typename AAType::StateType,
496bool getArgumentStateFromCallBaseContext(
Attributor &
A,
500 "Expected an 'argument' position !");
506 assert(ArgNo >= 0 &&
"Invalid Arg No!");
520 const StateType &CBArgumentState =
521 static_cast<const StateType &
>(
AA->getState());
523 LLVM_DEBUG(
dbgs() <<
"[Attributor] Briding Call site context to argument"
524 <<
"Position:" << Pos <<
"CB Arg state:" << CBArgumentState
528 State ^= CBArgumentState;
533template <
typename AAType,
typename BaseType,
534 typename StateType =
typename AAType::StateType,
535 bool BridgeCallBaseContext =
false,
537struct AAArgumentFromCallSiteArguments :
public BaseType {
538 AAArgumentFromCallSiteArguments(
const IRPosition &IRP, Attributor &
A)
543 StateType S = StateType::getBestState(this->getState());
545 if (BridgeCallBaseContext) {
547 getArgumentStateFromCallBaseContext<AAType,
BaseType, StateType,
549 A, *
this, this->getIRPosition(), S);
553 clampCallSiteArgumentStates<AAType, StateType, IRAttributeKind>(
A, *
this,
563template <
typename AAType,
typename BaseType,
564 typename StateType =
typename BaseType::StateType,
565 bool IntroduceCallBaseContext =
false,
567struct AACalleeToCallSite :
public BaseType {
568 AACalleeToCallSite(
const IRPosition &IRP, Attributor &
A) :
BaseType(IRP,
A) {}
572 auto IRPKind = this->getIRPosition().getPositionKind();
575 "Can only wrap function returned positions for call site "
576 "returned positions!");
577 auto &S = this->getState();
580 if (IntroduceCallBaseContext)
581 LLVM_DEBUG(
dbgs() <<
"[Attributor] Introducing call base context:" << CB
586 for (
const Function *Callee : Callees) {
590 IntroduceCallBaseContext ? &CB :
nullptr)
592 *
Callee, IntroduceCallBaseContext ? &CB : nullptr);
594 if (Attribute::isEnumAttrKind(IRAttributeKind)) {
597 A,
this, FnPos, DepClassTy::REQUIRED, IsKnown))
603 A.getAAFor<AAType>(*
this, FnPos, DepClassTy::REQUIRED);
607 if (S.isAtFixpoint())
608 return S.isValidState();
612 if (!
A.checkForAllCallees(CalleePred, *
this, CB))
613 return S.indicatePessimisticFixpoint();
619template <
class AAType,
typename StateType =
typename AAType::StateType>
625 auto EIt = Explorer.
begin(CtxI), EEnd = Explorer.
end(CtxI);
626 for (
unsigned u = 0;
u <
Uses.size(); ++
u) {
630 if (Found &&
AA.followUseInMBEC(
A, U, UserI, State))
645template <
class AAType,
typename StateType =
typename AAType::StateType>
646static void followUsesInMBEC(AAType &
AA,
Attributor &
A, StateType &S,
648 const Value &Val =
AA.getIRPosition().getAssociatedValue();
653 A.getInfoCache().getMustBeExecutedContextExplorer();
659 for (
const Use &U : Val.
uses())
662 followUsesInContext<AAType>(
AA,
A, *Explorer, &CtxI,
Uses, S);
664 if (S.isAtFixpoint())
670 if (Br->isConditional())
709 StateType ParentState;
713 ParentState.indicateOptimisticFixpoint();
715 for (
const BasicBlock *BB : Br->successors()) {
716 StateType ChildState;
718 size_t BeforeSize =
Uses.size();
719 followUsesInContext(
AA,
A, *Explorer, &BB->front(),
Uses, ChildState);
722 for (
auto It =
Uses.begin() + BeforeSize; It !=
Uses.end();)
725 ParentState &= ChildState;
799 R.indicatePessimisticFixpoint();
816 BS.indicateOptimisticFixpoint();
822 BS.indicatePessimisticFixpoint();
892 template <
typename F>
899 if (!
Range.mayOverlap(ItRange))
901 bool IsExact =
Range == ItRange && !
Range.offsetOrSizeAreUnknown();
902 for (
auto Index : It.getSecond()) {
912 template <
typename F>
923 for (
unsigned Index : LocalList->getSecond()) {
926 if (
Range.offsetAndSizeAreUnknown())
942 RemoteI = RemoteI ? RemoteI : &
I;
946 bool AccExists =
false;
948 for (
auto Index : LocalList) {
950 if (
A.getLocalInst() == &
I) {
959 <<
"[AAPointerInfo] Inserting access in new offset bins\n";);
961 for (
auto Key : ToAdd) {
968 AccessList.emplace_back(&
I, RemoteI, Ranges, Content, Kind, Ty);
970 "New Access should have been at AccIndex");
971 LocalList.push_back(AccIndex);
980 auto Before = Current;
982 if (Current == Before)
985 auto &ExistingRanges = Before.getRanges();
986 auto &NewRanges = Current.getRanges();
993 <<
"[AAPointerInfo] Removing access from old offset bins\n";);
1000 "Expected bin to actually contain the Access.");
1001 Bin.erase(AccIndex);
1022struct AAPointerInfoImpl
1023 :
public StateWrapper<AA::PointerInfo::State, AAPointerInfo> {
1028 const std::string getAsStr(
Attributor *
A)
const override {
1029 return std::string(
"PointerInfo ") +
1030 (isValidState() ? (std::string(
"#") +
1031 std::to_string(OffsetBins.size()) +
" bins")
1036 [](int64_t O) {
return std::to_string(O); }),
1044 return AAPointerInfo::manifest(
A);
1047 const_bin_iterator
begin()
const override {
return State::begin(); }
1048 const_bin_iterator
end()
const override {
return State::end(); }
1049 int64_t numOffsetBins()
const override {
return State::numOffsetBins(); }
1050 bool reachesReturn()
const override {
1051 return !ReturnedOffsets.isUnassigned();
1053 void addReturnedOffsetsTo(OffsetInfo &OI)
const override {
1054 if (ReturnedOffsets.isUnknown()) {
1059 OffsetInfo MergedOI;
1060 for (
auto Offset : ReturnedOffsets) {
1061 OffsetInfo TmpOI = OI;
1063 MergedOI.merge(TmpOI);
1065 OI = std::move(MergedOI);
1068 ChangeStatus setReachesReturn(
const OffsetInfo &ReachedReturnedOffsets) {
1069 if (ReturnedOffsets.isUnknown())
1070 return ChangeStatus::UNCHANGED;
1071 if (ReachedReturnedOffsets.isUnknown()) {
1072 ReturnedOffsets.setUnknown();
1073 return ChangeStatus::CHANGED;
1075 if (ReturnedOffsets.merge(ReachedReturnedOffsets))
1076 return ChangeStatus::CHANGED;
1077 return ChangeStatus::UNCHANGED;
1080 bool forallInterferingAccesses(
1082 function_ref<
bool(
const AAPointerInfo::Access &,
bool)> CB)
1084 return State::forallInterferingAccesses(
Range, CB);
1087 bool forallInterferingAccesses(
1088 Attributor &
A,
const AbstractAttribute &QueryingAA, Instruction &
I,
1089 bool FindInterferingWrites,
bool FindInterferingReads,
1090 function_ref<
bool(
const Access &,
bool)> UserCB,
bool &HasBeenWrittenTo,
1092 function_ref<
bool(
const Access &)> SkipCB)
const override {
1093 HasBeenWrittenTo =
false;
1095 SmallPtrSet<const Access *, 8> DominatingWrites;
1103 const auto *ExecDomainAA =
A.lookupAAFor<AAExecutionDomain>(
1105 bool AllInSameNoSyncFn = IsAssumedNoSync;
1106 bool InstIsExecutedByInitialThreadOnly =
1107 ExecDomainAA && ExecDomainAA->isExecutedByInitialThreadOnly(
I);
1114 bool InstIsExecutedInAlignedRegion =
1115 FindInterferingReads && ExecDomainAA &&
1116 ExecDomainAA->isExecutedInAlignedRegion(
A,
I);
1118 if (InstIsExecutedInAlignedRegion || InstIsExecutedByInitialThreadOnly)
1119 A.recordDependence(*ExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1121 InformationCache &InfoCache =
A.getInfoCache();
1122 bool IsThreadLocalObj =
1131 auto CanIgnoreThreadingForInst = [&](
const Instruction &
I) ->
bool {
1132 if (IsThreadLocalObj || AllInSameNoSyncFn)
1134 const auto *FnExecDomainAA =
1135 I.getFunction() == &
Scope
1137 :
A.lookupAAFor<AAExecutionDomain>(
1140 if (!FnExecDomainAA)
1142 if (InstIsExecutedInAlignedRegion ||
1143 (FindInterferingWrites &&
1144 FnExecDomainAA->isExecutedInAlignedRegion(
A,
I))) {
1145 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1148 if (InstIsExecutedByInitialThreadOnly &&
1149 FnExecDomainAA->isExecutedByInitialThreadOnly(
I)) {
1150 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1159 auto CanIgnoreThreading = [&](
const Access &Acc) ->
bool {
1160 return CanIgnoreThreadingForInst(*Acc.getRemoteInst()) ||
1161 (Acc.getRemoteInst() != Acc.getLocalInst() &&
1162 CanIgnoreThreadingForInst(*Acc.getLocalInst()));
1166 bool IsKnownNoRecurse;
1174 bool InstInKernel =
A.getInfoCache().isKernel(Scope);
1175 bool ObjHasKernelLifetime =
false;
1176 const bool UseDominanceReasoning =
1177 FindInterferingWrites && IsKnownNoRecurse;
1178 const DominatorTree *DT =
1188 case AA::GPUAddressSpace::Shared:
1189 case AA::GPUAddressSpace::Constant:
1190 case AA::GPUAddressSpace::Local:
1202 std::function<bool(
const Function &)> IsLiveInCalleeCB;
1207 const Function *AIFn = AI->getFunction();
1208 ObjHasKernelLifetime =
A.getInfoCache().isKernel(*AIFn);
1209 bool IsKnownNoRecurse;
1212 IsKnownNoRecurse)) {
1213 IsLiveInCalleeCB = [AIFn](
const Function &Fn) {
return AIFn != &Fn; };
1218 ObjHasKernelLifetime = HasKernelLifetime(GV, *GV->getParent());
1219 if (ObjHasKernelLifetime)
1220 IsLiveInCalleeCB = [&
A](
const Function &Fn) {
1221 return !
A.getInfoCache().isKernel(Fn);
1229 auto AccessCB = [&](
const Access &Acc,
bool Exact) {
1230 Function *AccScope = Acc.getRemoteInst()->getFunction();
1231 bool AccInSameScope = AccScope == &
Scope;
1235 if (InstInKernel && ObjHasKernelLifetime && !AccInSameScope &&
1236 A.getInfoCache().isKernel(*AccScope))
1239 if (Exact && Acc.isMustAccess() && Acc.getRemoteInst() != &
I) {
1240 if (Acc.isWrite() || (
isa<LoadInst>(
I) && Acc.isWriteOrAssumption()))
1241 ExclusionSet.
insert(Acc.getRemoteInst());
1244 if ((!FindInterferingWrites || !Acc.isWriteOrAssumption()) &&
1245 (!FindInterferingReads || !Acc.isRead()))
1248 bool Dominates = FindInterferingWrites && DT && Exact &&
1249 Acc.isMustAccess() && AccInSameScope &&
1252 DominatingWrites.
insert(&Acc);
1256 AllInSameNoSyncFn &= Acc.getRemoteInst()->getFunction() == &
Scope;
1258 InterferingAccesses.
push_back({&Acc, Exact});
1261 if (!State::forallInterferingAccesses(
I, AccessCB,
Range))
1264 HasBeenWrittenTo = !DominatingWrites.
empty();
1268 for (
const Access *Acc : DominatingWrites) {
1269 if (!LeastDominatingWriteInst) {
1270 LeastDominatingWriteInst = Acc->getRemoteInst();
1271 }
else if (DT->
dominates(LeastDominatingWriteInst,
1272 Acc->getRemoteInst())) {
1273 LeastDominatingWriteInst = Acc->getRemoteInst();
1278 auto CanSkipAccess = [&](
const Access &Acc,
bool Exact) {
1279 if (SkipCB && SkipCB(Acc))
1281 if (!CanIgnoreThreading(Acc))
1287 bool ReadChecked = !FindInterferingReads;
1288 bool WriteChecked = !FindInterferingWrites;
1294 &ExclusionSet, IsLiveInCalleeCB))
1299 if (!WriteChecked) {
1301 &ExclusionSet, IsLiveInCalleeCB))
1302 WriteChecked =
true;
1316 if (!WriteChecked && HasBeenWrittenTo &&
1317 Acc.getRemoteInst()->getFunction() != &Scope) {
1319 const auto *FnReachabilityAA =
A.getAAFor<AAInterFnReachability>(
1321 if (FnReachabilityAA) {
1327 if (!FnReachabilityAA->instructionCanReach(
1328 A, *LeastDominatingWriteInst,
1329 *Acc.getRemoteInst()->getFunction(), &ExclusionSet))
1330 WriteChecked =
true;
1337 if (ReadChecked && WriteChecked)
1340 if (!DT || !UseDominanceReasoning)
1342 if (!DominatingWrites.count(&Acc))
1344 return LeastDominatingWriteInst != Acc.getRemoteInst();
1349 for (
auto &It : InterferingAccesses) {
1350 if ((!AllInSameNoSyncFn && !IsThreadLocalObj && !ExecDomainAA) ||
1351 !CanSkipAccess(*It.first, It.second)) {
1352 if (!UserCB(*It.first, It.second))
1360 const AAPointerInfo &OtherAA,
1362 using namespace AA::PointerInfo;
1364 return indicatePessimisticFixpoint();
1367 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1368 bool IsByval = OtherAAImpl.getAssociatedArgument()->hasByValAttr();
1369 Changed |= setReachesReturn(OtherAAImpl.ReturnedOffsets);
1372 const auto &State = OtherAAImpl.getState();
1373 for (
const auto &It : State) {
1374 for (
auto Index : It.getSecond()) {
1375 const auto &RAcc = State.getAccess(Index);
1376 if (IsByval && !RAcc.isRead())
1378 bool UsedAssumedInformation =
false;
1380 auto Content =
A.translateArgumentToCallSiteContent(
1381 RAcc.getContent(), CB, *
this, UsedAssumedInformation);
1382 AK =
AccessKind(AK & (IsByval ? AccessKind::AK_R : AccessKind::AK_RW));
1383 AK =
AccessKind(AK | (RAcc.isMayAccess() ? AK_MAY : AK_MUST));
1385 Changed |= addAccess(
A, RAcc.getRanges(), CB, Content, AK,
1386 RAcc.getType(), RAcc.getRemoteInst());
1392 ChangeStatus translateAndAddState(Attributor &
A,
const AAPointerInfo &OtherAA,
1393 const OffsetInfo &Offsets, CallBase &CB,
1395 using namespace AA::PointerInfo;
1397 return indicatePessimisticFixpoint();
1399 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1403 const auto &State = OtherAAImpl.getState();
1404 for (
const auto &It : State) {
1405 for (
auto Index : It.getSecond()) {
1406 const auto &RAcc = State.getAccess(Index);
1407 if (!IsMustAcc && RAcc.isAssumption())
1409 for (
auto Offset : Offsets) {
1413 if (!NewRanges.isUnknown()) {
1414 NewRanges.addToAllOffsets(Offset);
1419 Changed |= addAccess(
A, NewRanges, CB, RAcc.getContent(), AK,
1420 RAcc.getType(), RAcc.getRemoteInst());
1429 void trackPointerInfoStatistics(
const IRPosition &IRP)
const {}
1432 void dumpState(raw_ostream &O) {
1433 for (
auto &It : OffsetBins) {
1434 O <<
"[" << It.first.Offset <<
"-" << It.first.Offset + It.first.Size
1435 <<
"] : " << It.getSecond().size() <<
"\n";
1436 for (
auto AccIndex : It.getSecond()) {
1437 auto &Acc = AccessList[AccIndex];
1438 O <<
" - " << Acc.getKind() <<
" - " << *Acc.getLocalInst() <<
"\n";
1439 if (Acc.getLocalInst() != Acc.getRemoteInst())
1440 O <<
" --> " << *Acc.getRemoteInst()
1442 if (!Acc.isWrittenValueYetUndetermined()) {
1444 O <<
" - c: func " << Acc.getWrittenValue()->getName()
1446 else if (Acc.getWrittenValue())
1447 O <<
" - c: " << *Acc.getWrittenValue() <<
"\n";
1449 O <<
" - c: <unknown>\n";
1456struct AAPointerInfoFloating :
public AAPointerInfoImpl {
1458 AAPointerInfoFloating(
const IRPosition &IRP, Attributor &
A)
1459 : AAPointerInfoImpl(IRP,
A) {}
1462 bool handleAccess(Attributor &
A, Instruction &
I,
1463 std::optional<Value *> Content,
AccessKind Kind,
1466 using namespace AA::PointerInfo;
1468 const DataLayout &
DL =
A.getDataLayout();
1469 TypeSize AccessSize =
DL.getTypeStoreSize(&Ty);
1478 if (!VT || VT->getElementCount().isScalable() ||
1480 (*Content)->getType() != VT ||
1481 DL.getTypeStoreSize(VT->getElementType()).isScalable()) {
1492 int64_t ElementSize =
DL.getTypeStoreSize(ElementType).getFixedValue();
1497 for (
int i = 0, e = VT->getElementCount().getFixedValue(); i != e; ++i) {
1499 ConstContent, ConstantInt::get(
Int32Ty, i));
1506 for (
auto &ElementOffset : ElementOffsets)
1507 ElementOffset += ElementSize;
1520 bool collectConstantsForGEP(Attributor &
A,
const DataLayout &
DL,
1521 OffsetInfo &UsrOI,
const OffsetInfo &PtrOI,
1522 const GEPOperator *
GEP);
1525 void trackStatistics()
const override {
1526 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1530bool AAPointerInfoFloating::collectConstantsForGEP(Attributor &
A,
1531 const DataLayout &
DL,
1533 const OffsetInfo &PtrOI,
1534 const GEPOperator *
GEP) {
1535 unsigned BitWidth =
DL.getIndexTypeSizeInBits(
GEP->getType());
1536 SmallMapVector<Value *, APInt, 4> VariableOffsets;
1539 assert(!UsrOI.isUnknown() && !PtrOI.isUnknown() &&
1540 "Don't look for constant values if the offset has already been "
1541 "determined to be unknown.");
1543 if (!
GEP->collectOffset(
DL,
BitWidth, VariableOffsets, ConstantOffset)) {
1549 << (VariableOffsets.
empty() ?
"" :
"not") <<
" constant "
1553 Union.addToAll(ConstantOffset.getSExtValue());
1558 for (
const auto &VI : VariableOffsets) {
1559 auto *PotentialConstantsAA =
A.getAAFor<AAPotentialConstantValues>(
1561 if (!PotentialConstantsAA || !PotentialConstantsAA->isValidState()) {
1567 if (PotentialConstantsAA->undefIsContained())
1574 auto &AssumedSet = PotentialConstantsAA->getAssumedSet();
1575 if (AssumedSet.empty())
1579 for (
const auto &ConstOffset : AssumedSet) {
1580 auto CopyPerOffset =
Union;
1581 CopyPerOffset.addToAll(ConstOffset.getSExtValue() *
1582 VI.second.getZExtValue());
1583 Product.merge(CopyPerOffset);
1588 UsrOI = std::move(Union);
1592ChangeStatus AAPointerInfoFloating::updateImpl(Attributor &
A) {
1593 using namespace AA::PointerInfo;
1595 const DataLayout &
DL =
A.getDataLayout();
1596 Value &AssociatedValue = getAssociatedValue();
1598 DenseMap<Value *, OffsetInfo> OffsetInfoMap;
1599 OffsetInfoMap[&AssociatedValue].
insert(0);
1601 auto HandlePassthroughUser = [&](
Value *Usr,
Value *CurPtr,
bool &Follow) {
1612 "CurPtr does not exist in the map!");
1614 auto &UsrOI = OffsetInfoMap[Usr];
1615 auto &PtrOI = OffsetInfoMap[CurPtr];
1616 assert(!PtrOI.isUnassigned() &&
1617 "Cannot pass through if the input Ptr was not visited!");
1623 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
1625 User *Usr =
U.getUser();
1626 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Analyze " << *CurPtr <<
" in " << *Usr
1629 "The current pointer offset should have been seeded!");
1630 assert(!OffsetInfoMap[CurPtr].isUnassigned() &&
1631 "Current pointer should be assigned");
1635 return HandlePassthroughUser(Usr, CurPtr, Follow);
1637 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled constant user " << *CE
1645 auto &UsrOI = OffsetInfoMap[Usr];
1646 auto &PtrOI = OffsetInfoMap[CurPtr];
1648 if (UsrOI.isUnknown())
1651 if (PtrOI.isUnknown()) {
1657 Follow = collectConstantsForGEP(
A,
DL, UsrOI, PtrOI,
GEP);
1663 return HandlePassthroughUser(Usr, CurPtr, Follow);
1668 if (RI->getFunction() == getAssociatedFunction()) {
1669 auto &PtrOI = OffsetInfoMap[CurPtr];
1670 Changed |= setReachesReturn(PtrOI);
1683 auto &UsrOI = PhiIt->second;
1684 auto &PtrOI = OffsetInfoMap[CurPtr];
1688 if (PtrOI.isUnknown()) {
1689 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand offset unknown "
1690 << *CurPtr <<
" in " << *
PHI <<
"\n");
1691 Follow = !UsrOI.isUnknown();
1697 if (UsrOI == PtrOI) {
1698 assert(!PtrOI.isUnassigned() &&
1699 "Cannot assign if the current Ptr was not visited!");
1700 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant (so far)");
1710 auto It = OffsetInfoMap.
find(CurPtrBase);
1711 if (It == OffsetInfoMap.
end()) {
1712 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand is too complex "
1713 << *CurPtr <<
" in " << *
PHI
1714 <<
" (base: " << *CurPtrBase <<
")\n");
1728 A.getInfoCache().getAnalysisResultForFunction<CycleAnalysis>(
1729 *
PHI->getFunction());
1731 auto BaseOI = It->getSecond();
1732 BaseOI.addToAll(
Offset.getZExtValue());
1733 if (IsFirstPHIUser || BaseOI == UsrOI) {
1734 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant " << *CurPtr
1735 <<
" in " << *Usr <<
"\n");
1736 return HandlePassthroughUser(Usr, CurPtr, Follow);
1740 dbgs() <<
"[AAPointerInfo] PHI operand pointer offset mismatch "
1741 << *CurPtr <<
" in " << *
PHI <<
"\n");
1760 if (!handleAccess(
A, *LoadI,
nullptr, AK,
1761 OffsetInfoMap[CurPtr].Offsets,
Changed,
1767 return II->isAssumeLikeIntrinsic();
1778 }
while (FromI && FromI != ToI);
1783 auto IsValidAssume = [&](IntrinsicInst &IntrI) {
1784 if (IntrI.getIntrinsicID() != Intrinsic::assume)
1787 if (IntrI.getParent() == BB) {
1788 if (IsImpactedInRange(LoadI->getNextNode(), &IntrI))
1794 if ((*PredIt) != BB)
1799 if (SuccBB == IntrBB)
1805 if (IsImpactedInRange(LoadI->getNextNode(), BB->
getTerminator()))
1807 if (IsImpactedInRange(&IntrBB->
front(), &IntrI))
1813 std::pair<Value *, IntrinsicInst *> Assumption;
1814 for (
const Use &LoadU : LoadI->uses()) {
1816 if (!CmpI->isEquality() || !CmpI->isTrueWhenEqual())
1818 for (
const Use &CmpU : CmpI->uses()) {
1820 if (!IsValidAssume(*IntrI))
1822 int Idx = CmpI->getOperandUse(0) == LoadU;
1823 Assumption = {CmpI->getOperand(Idx), IntrI};
1828 if (Assumption.first)
1833 if (!Assumption.first || !Assumption.second)
1837 << *Assumption.second <<
": " << *LoadI
1838 <<
" == " << *Assumption.first <<
"\n");
1839 bool UsedAssumedInformation =
false;
1840 std::optional<Value *> Content =
nullptr;
1841 if (Assumption.first)
1843 A.getAssumedSimplified(*Assumption.first, *
this,
1845 return handleAccess(
1846 A, *Assumption.second, Content, AccessKind::AK_ASSUMPTION,
1847 OffsetInfoMap[CurPtr].Offsets,
Changed, *LoadI->getType());
1852 for (
auto *OtherOp : OtherOps) {
1853 if (OtherOp == CurPtr) {
1856 <<
"[AAPointerInfo] Escaping use in store like instruction " <<
I
1868 bool UsedAssumedInformation =
false;
1869 std::optional<Value *> Content =
nullptr;
1871 Content =
A.getAssumedSimplified(
1873 return handleAccess(
A,
I, Content, AK, OffsetInfoMap[CurPtr].Offsets,
1878 return HandleStoreLike(*StoreI, StoreI->getValueOperand(),
1879 *StoreI->getValueOperand()->getType(),
1880 {StoreI->getValueOperand()}, AccessKind::AK_W);
1882 return HandleStoreLike(*RMWI,
nullptr, *RMWI->getValOperand()->getType(),
1883 {RMWI->getValOperand()}, AccessKind::AK_RW);
1885 return HandleStoreLike(
1886 *CXI,
nullptr, *CXI->getNewValOperand()->getType(),
1887 {CXI->getCompareOperand(), CXI->getNewValOperand()},
1894 A.getInfoCache().getTargetLibraryInfoForFunction(*CB->
getFunction());
1899 const auto *CSArgPI =
A.getAAFor<AAPointerInfo>(
1905 Changed = translateAndAddState(
A, *CSArgPI, OffsetInfoMap[CurPtr], *CB,
1908 if (!CSArgPI->reachesReturn())
1909 return isValidState();
1912 if (!Callee ||
Callee->arg_size() <= ArgNo)
1914 bool UsedAssumedInformation =
false;
1915 auto ReturnedValue =
A.getAssumedSimplified(
1920 auto *Arg =
Callee->getArg(ArgNo);
1921 if (ReturnedArg && Arg != ReturnedArg)
1923 bool IsRetMustAcc = IsArgMustAcc && (ReturnedArg == Arg);
1924 const auto *CSRetPI =
A.getAAFor<AAPointerInfo>(
1928 OffsetInfo OI = OffsetInfoMap[CurPtr];
1929 CSArgPI->addReturnedOffsetsTo(OI);
1931 translateAndAddState(
A, *CSRetPI, OI, *CB, IsRetMustAcc) |
Changed;
1932 return isValidState();
1934 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Call user not handled " << *CB
1939 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] User not handled " << *Usr <<
"\n");
1942 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
1943 assert(OffsetInfoMap.
count(OldU) &&
"Old use should be known already!");
1944 assert(!OffsetInfoMap[OldU].isUnassigned() &&
"Old use should be assinged");
1945 if (OffsetInfoMap.
count(NewU)) {
1947 if (!(OffsetInfoMap[NewU] == OffsetInfoMap[OldU])) {
1948 dbgs() <<
"[AAPointerInfo] Equivalent use callback failed: "
1949 << OffsetInfoMap[NewU] <<
" vs " << OffsetInfoMap[OldU]
1953 return OffsetInfoMap[NewU] == OffsetInfoMap[OldU];
1956 return HandlePassthroughUser(NewU.get(), OldU.
get(), Unused);
1958 if (!
A.checkForAllUses(UsePred, *
this, AssociatedValue,
1960 true, EquivalentUseCB)) {
1961 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Check for all uses failed, abort!\n");
1962 return indicatePessimisticFixpoint();
1966 dbgs() <<
"Accesses by bin after update:\n";
1973struct AAPointerInfoReturned final : AAPointerInfoImpl {
1974 AAPointerInfoReturned(
const IRPosition &IRP, Attributor &
A)
1975 : AAPointerInfoImpl(IRP,
A) {}
1979 return indicatePessimisticFixpoint();
1983 void trackStatistics()
const override {
1984 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1988struct AAPointerInfoArgument final : AAPointerInfoFloating {
1989 AAPointerInfoArgument(
const IRPosition &IRP, Attributor &
A)
1990 : AAPointerInfoFloating(IRP,
A) {}
1993 void trackStatistics()
const override {
1994 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1998struct AAPointerInfoCallSiteArgument final : AAPointerInfoFloating {
1999 AAPointerInfoCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
2000 : AAPointerInfoFloating(IRP,
A) {}
2004 using namespace AA::PointerInfo;
2010 if (
auto Length =
MI->getLengthInBytes())
2011 LengthVal =
Length->getSExtValue();
2012 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
2015 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled memory intrinsic "
2017 return indicatePessimisticFixpoint();
2020 ArgNo == 0 ? AccessKind::AK_MUST_WRITE : AccessKind::AK_MUST_READ;
2022 Changed | addAccess(
A, {0, LengthVal}, *
MI,
nullptr,
Kind,
nullptr);
2025 dbgs() <<
"Accesses by bin after update:\n";
2036 Argument *Arg = getAssociatedArgument();
2040 A.getAAFor<AAPointerInfo>(*
this, ArgPos, DepClassTy::REQUIRED);
2041 if (ArgAA && ArgAA->getState().isValidState())
2042 return translateAndAddStateFromCallee(
A, *ArgAA,
2045 return indicatePessimisticFixpoint();
2048 bool IsKnownNoCapture;
2050 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNoCapture))
2051 return indicatePessimisticFixpoint();
2053 bool IsKnown =
false;
2055 return ChangeStatus::UNCHANGED;
2058 ReadOnly ? AccessKind::AK_MAY_READ : AccessKind::AK_MAY_READ_WRITE;
2064 void trackStatistics()
const override {
2065 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2069struct AAPointerInfoCallSiteReturned final : AAPointerInfoFloating {
2070 AAPointerInfoCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
2071 : AAPointerInfoFloating(IRP,
A) {}
2074 void trackStatistics()
const override {
2075 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2083struct AANoUnwindImpl : AANoUnwind {
2084 AANoUnwindImpl(
const IRPosition &IRP, Attributor &
A) : AANoUnwind(IRP,
A) {}
2090 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2094 const std::string getAsStr(Attributor *
A)
const override {
2095 return getAssumed() ?
"nounwind" :
"may-unwind";
2101 (unsigned)Instruction::Invoke, (
unsigned)Instruction::CallBr,
2102 (unsigned)Instruction::Call, (
unsigned)Instruction::CleanupRet,
2103 (unsigned)Instruction::CatchSwitch, (
unsigned)Instruction::Resume};
2106 if (!
I.mayThrow(
true))
2110 bool IsKnownNoUnwind;
2118 bool UsedAssumedInformation =
false;
2119 if (!
A.checkForAllInstructions(CheckForNoUnwind, *
this, Opcodes,
2120 UsedAssumedInformation))
2121 return indicatePessimisticFixpoint();
2123 return ChangeStatus::UNCHANGED;
2127struct AANoUnwindFunction final :
public AANoUnwindImpl {
2128 AANoUnwindFunction(
const IRPosition &IRP, Attributor &
A)
2129 : AANoUnwindImpl(IRP,
A) {}
2136struct AANoUnwindCallSite final
2137 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl> {
2138 AANoUnwindCallSite(
const IRPosition &IRP, Attributor &
A)
2139 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl>(IRP,
A) {}
2150 case Intrinsic::nvvm_barrier_cta_sync_aligned_all:
2151 case Intrinsic::nvvm_barrier_cta_sync_aligned_count:
2152 case Intrinsic::nvvm_barrier0_and:
2153 case Intrinsic::nvvm_barrier0_or:
2154 case Intrinsic::nvvm_barrier0_popc:
2156 case Intrinsic::amdgcn_s_barrier:
2157 if (ExecutedAligned)
2180 switch (
I->getOpcode()) {
2181 case Instruction::AtomicRMW:
2184 case Instruction::Store:
2187 case Instruction::Load:
2192 "New atomic operations need to be known in the attributor.");
2204 return !
MI->isVolatile();
2220 const std::string getAsStr(Attributor *
A)
const override {
2221 return getAssumed() ?
"nosync" :
"may-sync";
2237 if (
I.mayReadOrWriteMemory())
2251 bool UsedAssumedInformation =
false;
2252 if (!
A.checkForAllReadWriteInstructions(CheckRWInstForNoSync, *
this,
2253 UsedAssumedInformation) ||
2254 !
A.checkForAllCallLikeInstructions(CheckForNoSync, *
this,
2255 UsedAssumedInformation))
2256 return indicatePessimisticFixpoint();
2261struct AANoSyncFunction final :
public AANoSyncImpl {
2262 AANoSyncFunction(
const IRPosition &IRP, Attributor &
A)
2263 : AANoSyncImpl(IRP,
A) {}
2270struct AANoSyncCallSite final : AACalleeToCallSite<AANoSync, AANoSyncImpl> {
2271 AANoSyncCallSite(
const IRPosition &IRP, Attributor &
A)
2272 : AACalleeToCallSite<AANoSync, AANoSyncImpl>(IRP,
A) {}
2282struct AANoFreeImpl :
public AANoFree {
2283 AANoFreeImpl(
const IRPosition &IRP, Attributor &
A) : AANoFree(IRP,
A) {}
2289 DepClassTy::NONE, IsKnown));
2299 DepClassTy::REQUIRED, IsKnown);
2302 bool UsedAssumedInformation =
false;
2303 if (!
A.checkForAllCallLikeInstructions(CheckForNoFree, *
this,
2304 UsedAssumedInformation))
2305 return indicatePessimisticFixpoint();
2306 return ChangeStatus::UNCHANGED;
2310 const std::string getAsStr(Attributor *
A)
const override {
2311 return getAssumed() ?
"nofree" :
"may-free";
2315struct AANoFreeFunction final :
public AANoFreeImpl {
2316 AANoFreeFunction(
const IRPosition &IRP, Attributor &
A)
2317 : AANoFreeImpl(IRP,
A) {}
2324struct AANoFreeCallSite final : AACalleeToCallSite<AANoFree, AANoFreeImpl> {
2325 AANoFreeCallSite(
const IRPosition &IRP, Attributor &
A)
2326 : AACalleeToCallSite<AANoFree, AANoFreeImpl>(IRP,
A) {}
2333struct AANoFreeFloating : AANoFreeImpl {
2334 AANoFreeFloating(
const IRPosition &IRP, Attributor &
A)
2335 : AANoFreeImpl(IRP,
A) {}
2342 const IRPosition &IRP = getIRPosition();
2347 DepClassTy::OPTIONAL, IsKnown))
2348 return ChangeStatus::UNCHANGED;
2350 Value &AssociatedValue = getIRPosition().getAssociatedValue();
2351 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
2363 DepClassTy::REQUIRED, IsKnown);
2380 if (!
A.checkForAllUses(Pred, *
this, AssociatedValue))
2381 return indicatePessimisticFixpoint();
2383 return ChangeStatus::UNCHANGED;
2388struct AANoFreeArgument final : AANoFreeFloating {
2389 AANoFreeArgument(
const IRPosition &IRP, Attributor &
A)
2390 : AANoFreeFloating(IRP,
A) {}
2397struct AANoFreeCallSiteArgument final : AANoFreeFloating {
2398 AANoFreeCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
2399 : AANoFreeFloating(IRP,
A) {}
2407 Argument *Arg = getAssociatedArgument();
2409 return indicatePessimisticFixpoint();
2413 DepClassTy::REQUIRED, IsKnown))
2414 return ChangeStatus::UNCHANGED;
2415 return indicatePessimisticFixpoint();
2423struct AANoFreeReturned final : AANoFreeFloating {
2424 AANoFreeReturned(
const IRPosition &IRP, Attributor &
A)
2425 : AANoFreeFloating(IRP,
A) {
2440 void trackStatistics()
const override {}
2444struct AANoFreeCallSiteReturned final : AANoFreeFloating {
2445 AANoFreeCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
2446 : AANoFreeFloating(IRP,
A) {}
2449 return ChangeStatus::UNCHANGED;
2460 bool IgnoreSubsumingPositions) {
2462 AttrKinds.
push_back(Attribute::NonNull);
2465 AttrKinds.
push_back(Attribute::Dereferenceable);
2466 if (
A.hasAttr(IRP, AttrKinds, IgnoreSubsumingPositions, Attribute::NonNull))
2473 if (!Fn->isDeclaration()) {
2483 bool UsedAssumedInformation =
false;
2484 if (!
A.checkForAllInstructions(
2486 Worklist.push_back({*cast<ReturnInst>(I).getReturnValue(), &I});
2490 UsedAssumedInformation,
false,
true))
2502 Attribute::NonNull)});
2507static int64_t getKnownNonNullAndDerefBytesForUse(
2508 Attributor &
A,
const AbstractAttribute &QueryingAA,
Value &AssociatedValue,
2509 const Use *U,
const Instruction *
I,
bool &IsNonNull,
bool &TrackUse) {
2512 const Value *UseV =
U->get();
2533 const DataLayout &
DL =
A.getInfoCache().getDL();
2537 U, {Attribute::NonNull, Attribute::Dereferenceable})) {
2554 bool IsKnownNonNull;
2557 IsNonNull |= IsKnownNonNull;
2560 return DerefAA ? DerefAA->getKnownDereferenceableBytes() : 0;
2564 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
2565 Loc->Size.isScalable() ||
I->isVolatile())
2571 if (
Base &&
Base == &AssociatedValue) {
2572 int64_t DerefBytes = Loc->Size.getValue() +
Offset;
2574 return std::max(int64_t(0), DerefBytes);
2581 int64_t DerefBytes = Loc->Size.getValue();
2583 return std::max(int64_t(0), DerefBytes);
2589struct AANonNullImpl : AANonNull {
2590 AANonNullImpl(
const IRPosition &IRP, Attributor &
A) : AANonNull(IRP,
A) {}
2594 Value &
V = *getAssociatedValue().stripPointerCasts();
2596 indicatePessimisticFixpoint();
2600 if (Instruction *CtxI = getCtxI())
2601 followUsesInMBEC(*
this,
A, getState(), *CtxI);
2605 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
2606 AANonNull::StateType &State) {
2607 bool IsNonNull =
false;
2608 bool TrackUse =
false;
2609 getKnownNonNullAndDerefBytesForUse(
A, *
this, getAssociatedValue(), U,
I,
2610 IsNonNull, TrackUse);
2611 State.setKnown(IsNonNull);
2616 const std::string getAsStr(Attributor *
A)
const override {
2617 return getAssumed() ?
"nonnull" :
"may-null";
2622struct AANonNullFloating :
public AANonNullImpl {
2623 AANonNullFloating(
const IRPosition &IRP, Attributor &
A)
2624 : AANonNullImpl(IRP,
A) {}
2628 auto CheckIRP = [&](
const IRPosition &IRP) {
2629 bool IsKnownNonNull;
2631 A, *
this, IRP, DepClassTy::OPTIONAL, IsKnownNonNull);
2635 bool UsedAssumedInformation =
false;
2636 Value *AssociatedValue = &getAssociatedValue();
2638 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
2643 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
2649 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2650 A, this, IRPosition::value(*Op), DepClassTy::OPTIONAL,
2653 return ChangeStatus::UNCHANGED;
2657 DepClassTy::OPTIONAL, IsKnown) &&
2660 DepClassTy::OPTIONAL, IsKnown))
2661 return ChangeStatus::UNCHANGED;
2668 if (AVIRP == getIRPosition() || !CheckIRP(AVIRP))
2669 return indicatePessimisticFixpoint();
2670 return ChangeStatus::UNCHANGED;
2673 for (
const auto &VAC : Values)
2675 return indicatePessimisticFixpoint();
2677 return ChangeStatus::UNCHANGED;
2685struct AANonNullReturned final
2686 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2687 false, AANonNull::IRAttributeKind, false> {
2688 AANonNullReturned(
const IRPosition &IRP, Attributor &
A)
2689 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2694 const std::string getAsStr(Attributor *
A)
const override {
2695 return getAssumed() ?
"nonnull" :
"may-null";
2703struct AANonNullArgument final
2704 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl> {
2705 AANonNullArgument(
const IRPosition &IRP, Attributor &
A)
2706 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl>(IRP,
A) {}
2712struct AANonNullCallSiteArgument final : AANonNullFloating {
2713 AANonNullCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
2714 : AANonNullFloating(IRP,
A) {}
2721struct AANonNullCallSiteReturned final
2722 : AACalleeToCallSite<AANonNull, AANonNullImpl> {
2723 AANonNullCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
2724 : AACalleeToCallSite<AANonNull, AANonNullImpl>(IRP,
A) {}
2733struct AAMustProgressImpl :
public AAMustProgress {
2734 AAMustProgressImpl(
const IRPosition &IRP, Attributor &
A)
2735 : AAMustProgress(IRP,
A) {}
2741 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2746 const std::string getAsStr(Attributor *
A)
const override {
2747 return getAssumed() ?
"mustprogress" :
"may-not-progress";
2751struct AAMustProgressFunction final : AAMustProgressImpl {
2752 AAMustProgressFunction(
const IRPosition &IRP, Attributor &
A)
2753 : AAMustProgressImpl(IRP,
A) {}
2759 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnown)) {
2761 return indicateOptimisticFixpoint();
2762 return ChangeStatus::UNCHANGED;
2765 auto CheckForMustProgress = [&](AbstractCallSite ACS) {
2767 bool IsKnownMustProgress;
2769 A,
this, IPos, DepClassTy::REQUIRED, IsKnownMustProgress,
2773 bool AllCallSitesKnown =
true;
2774 if (!
A.checkForAllCallSites(CheckForMustProgress, *
this,
2777 return indicatePessimisticFixpoint();
2779 return ChangeStatus::UNCHANGED;
2783 void trackStatistics()
const override {
2789struct AAMustProgressCallSite final : AAMustProgressImpl {
2790 AAMustProgressCallSite(
const IRPosition &IRP, Attributor &
A)
2791 : AAMustProgressImpl(IRP,
A) {}
2800 bool IsKnownMustProgress;
2802 A,
this, FnPos, DepClassTy::REQUIRED, IsKnownMustProgress))
2803 return indicatePessimisticFixpoint();
2804 return ChangeStatus::UNCHANGED;
2808 void trackStatistics()
const override {
2817struct AANoRecurseImpl :
public AANoRecurse {
2818 AANoRecurseImpl(
const IRPosition &IRP, Attributor &
A) : AANoRecurse(IRP,
A) {}
2824 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2829 const std::string getAsStr(Attributor *
A)
const override {
2830 return getAssumed() ?
"norecurse" :
"may-recurse";
2834struct AANoRecurseFunction final : AANoRecurseImpl {
2835 AANoRecurseFunction(
const IRPosition &IRP, Attributor &
A)
2836 : AANoRecurseImpl(IRP,
A) {}
2842 auto CallSitePred = [&](AbstractCallSite ACS) {
2843 bool IsKnownNoRecurse;
2847 DepClassTy::NONE, IsKnownNoRecurse))
2849 return IsKnownNoRecurse;
2851 bool UsedAssumedInformation =
false;
2852 if (
A.checkForAllCallSites(CallSitePred, *
this,
true,
2853 UsedAssumedInformation)) {
2859 if (!UsedAssumedInformation)
2860 indicateOptimisticFixpoint();
2861 return ChangeStatus::UNCHANGED;
2864 const AAInterFnReachability *EdgeReachability =
2865 A.getAAFor<AAInterFnReachability>(*
this, getIRPosition(),
2866 DepClassTy::REQUIRED);
2867 if (EdgeReachability && EdgeReachability->
canReach(
A, *getAnchorScope()))
2868 return indicatePessimisticFixpoint();
2869 return ChangeStatus::UNCHANGED;
2876struct AANoRecurseCallSite final
2877 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl> {
2878 AANoRecurseCallSite(
const IRPosition &IRP, Attributor &
A)
2879 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl>(IRP,
A) {}
2889struct AANonConvergentImpl :
public AANonConvergent {
2890 AANonConvergentImpl(
const IRPosition &IRP, Attributor &
A)
2891 : AANonConvergent(IRP,
A) {}
2894 const std::string getAsStr(Attributor *
A)
const override {
2895 return getAssumed() ?
"non-convergent" :
"may-be-convergent";
2899struct AANonConvergentFunction final : AANonConvergentImpl {
2900 AANonConvergentFunction(
const IRPosition &IRP, Attributor &
A)
2901 : AANonConvergentImpl(IRP,
A) {}
2907 auto CalleeIsNotConvergent = [&](
Instruction &Inst) {
2910 if (!Callee ||
Callee->isIntrinsic()) {
2913 if (
Callee->isDeclaration()) {
2914 return !
Callee->hasFnAttribute(Attribute::Convergent);
2916 const auto *ConvergentAA =
A.getAAFor<AANonConvergent>(
2918 return ConvergentAA && ConvergentAA->isAssumedNotConvergent();
2921 bool UsedAssumedInformation =
false;
2922 if (!
A.checkForAllCallLikeInstructions(CalleeIsNotConvergent, *
this,
2923 UsedAssumedInformation)) {
2924 return indicatePessimisticFixpoint();
2926 return ChangeStatus::UNCHANGED;
2930 if (isKnownNotConvergent() &&
2931 A.hasAttr(getIRPosition(), Attribute::Convergent)) {
2932 A.removeAttrs(getIRPosition(), {Attribute::Convergent});
2933 return ChangeStatus::CHANGED;
2935 return ChangeStatus::UNCHANGED;
2945struct AAUndefinedBehaviorImpl :
public AAUndefinedBehavior {
2946 AAUndefinedBehaviorImpl(
const IRPosition &IRP, Attributor &
A)
2947 : AAUndefinedBehavior(IRP,
A) {}
2952 const size_t UBPrevSize = KnownUBInsts.size();
2953 const size_t NoUBPrevSize = AssumedNoUBInsts.size();
2957 if (
I.isVolatile() &&
I.mayWriteToMemory())
2961 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2970 "Expected pointer operand of memory accessing instruction");
2974 std::optional<Value *> SimplifiedPtrOp =
2975 stopOnUndefOrAssumed(
A, PtrOp, &
I);
2976 if (!SimplifiedPtrOp || !*SimplifiedPtrOp)
2978 const Value *PtrOpVal = *SimplifiedPtrOp;
2984 AssumedNoUBInsts.insert(&
I);
2996 AssumedNoUBInsts.insert(&
I);
2998 KnownUBInsts.insert(&
I);
3007 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3014 if (BrInst->isUnconditional())
3019 std::optional<Value *> SimplifiedCond =
3020 stopOnUndefOrAssumed(
A, BrInst->getCondition(), BrInst);
3021 if (!SimplifiedCond || !*SimplifiedCond)
3023 AssumedNoUBInsts.insert(&
I);
3031 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3040 for (
unsigned idx = 0; idx < CB.
arg_size(); idx++) {
3046 if (idx >=
Callee->arg_size())
3058 bool IsKnownNoUndef;
3060 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNoUndef);
3061 if (!IsKnownNoUndef)
3063 bool UsedAssumedInformation =
false;
3064 std::optional<Value *> SimplifiedVal =
3067 if (UsedAssumedInformation)
3069 if (SimplifiedVal && !*SimplifiedVal)
3072 KnownUBInsts.insert(&
I);
3078 bool IsKnownNonNull;
3080 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNonNull);
3082 KnownUBInsts.insert(&
I);
3091 std::optional<Value *> SimplifiedRetValue =
3092 stopOnUndefOrAssumed(
A, RI.getReturnValue(), &
I);
3093 if (!SimplifiedRetValue || !*SimplifiedRetValue)
3111 bool IsKnownNonNull;
3116 KnownUBInsts.insert(&
I);
3122 bool UsedAssumedInformation =
false;
3123 A.checkForAllInstructions(InspectMemAccessInstForUB, *
this,
3124 {Instruction::Load, Instruction::Store,
3125 Instruction::AtomicCmpXchg,
3126 Instruction::AtomicRMW},
3127 UsedAssumedInformation,
3129 A.checkForAllInstructions(InspectBrInstForUB, *
this, {Instruction::Br},
3130 UsedAssumedInformation,
3132 A.checkForAllCallLikeInstructions(InspectCallSiteForUB, *
this,
3133 UsedAssumedInformation);
3137 if (!getAnchorScope()->getReturnType()->isVoidTy()) {
3139 if (!
A.isAssumedDead(ReturnIRP,
this,
nullptr, UsedAssumedInformation)) {
3140 bool IsKnownNoUndef;
3142 A,
this, ReturnIRP, DepClassTy::NONE, IsKnownNoUndef);
3144 A.checkForAllInstructions(InspectReturnInstForUB, *
this,
3145 {Instruction::Ret}, UsedAssumedInformation,
3150 if (NoUBPrevSize != AssumedNoUBInsts.size() ||
3151 UBPrevSize != KnownUBInsts.size())
3152 return ChangeStatus::CHANGED;
3153 return ChangeStatus::UNCHANGED;
3156 bool isKnownToCauseUB(Instruction *
I)
const override {
3157 return KnownUBInsts.count(
I);
3160 bool isAssumedToCauseUB(Instruction *
I)
const override {
3167 switch (
I->getOpcode()) {
3168 case Instruction::Load:
3169 case Instruction::Store:
3170 case Instruction::AtomicCmpXchg:
3171 case Instruction::AtomicRMW:
3172 return !AssumedNoUBInsts.count(
I);
3173 case Instruction::Br: {
3175 if (BrInst->isUnconditional())
3177 return !AssumedNoUBInsts.count(
I);
3186 if (KnownUBInsts.empty())
3187 return ChangeStatus::UNCHANGED;
3188 for (Instruction *
I : KnownUBInsts)
3189 A.changeToUnreachableAfterManifest(
I);
3190 return ChangeStatus::CHANGED;
3194 const std::string getAsStr(Attributor *
A)
const override {
3195 return getAssumed() ?
"undefined-behavior" :
"no-ub";
3223 SmallPtrSet<Instruction *, 8> KnownUBInsts;
3227 SmallPtrSet<Instruction *, 8> AssumedNoUBInsts;
3238 std::optional<Value *> stopOnUndefOrAssumed(Attributor &
A,
Value *V,
3240 bool UsedAssumedInformation =
false;
3241 std::optional<Value *> SimplifiedV =
3244 if (!UsedAssumedInformation) {
3249 KnownUBInsts.insert(
I);
3250 return std::nullopt;
3257 KnownUBInsts.insert(
I);
3258 return std::nullopt;
3264struct AAUndefinedBehaviorFunction final : AAUndefinedBehaviorImpl {
3265 AAUndefinedBehaviorFunction(
const IRPosition &IRP, Attributor &
A)
3266 : AAUndefinedBehaviorImpl(IRP,
A) {}
3269 void trackStatistics()
const override {
3270 STATS_DECL(UndefinedBehaviorInstruction, Instruction,
3271 "Number of instructions known to have UB");
3273 KnownUBInsts.size();
3284static bool mayContainUnboundedCycle(Function &
F, Attributor &
A) {
3285 ScalarEvolution *SE =
3286 A.getInfoCache().getAnalysisResultForFunction<ScalarEvolutionAnalysis>(
F);
3287 LoopInfo *LI =
A.getInfoCache().getAnalysisResultForFunction<LoopAnalysis>(
F);
3293 for (scc_iterator<Function *> SCCI =
scc_begin(&
F); !SCCI.isAtEnd(); ++SCCI)
3294 if (SCCI.hasCycle())
3304 for (
auto *L : LI->getLoopsInPreorder()) {
3311struct AAWillReturnImpl :
public AAWillReturn {
3312 AAWillReturnImpl(
const IRPosition &IRP, Attributor &
A)
3313 : AAWillReturn(IRP,
A) {}
3319 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
3324 bool isImpliedByMustprogressAndReadonly(Attributor &
A,
bool KnownOnly) {
3325 if (!
A.hasAttr(getIRPosition(), {Attribute::MustProgress}))
3330 return IsKnown || !KnownOnly;
3336 if (isImpliedByMustprogressAndReadonly(
A,
false))
3337 return ChangeStatus::UNCHANGED;
3343 A,
this, IPos, DepClassTy::REQUIRED, IsKnown)) {
3349 bool IsKnownNoRecurse;
3351 A,
this, IPos, DepClassTy::REQUIRED, IsKnownNoRecurse);
3354 bool UsedAssumedInformation =
false;
3355 if (!
A.checkForAllCallLikeInstructions(CheckForWillReturn, *
this,
3356 UsedAssumedInformation))
3357 return indicatePessimisticFixpoint();
3359 return ChangeStatus::UNCHANGED;
3363 const std::string getAsStr(Attributor *
A)
const override {
3364 return getAssumed() ?
"willreturn" :
"may-noreturn";
3368struct AAWillReturnFunction final : AAWillReturnImpl {
3369 AAWillReturnFunction(
const IRPosition &IRP, Attributor &
A)
3370 : AAWillReturnImpl(IRP,
A) {}
3374 AAWillReturnImpl::initialize(
A);
3377 assert(
F &&
"Did expect an anchor function");
3378 if (
F->isDeclaration() || mayContainUnboundedCycle(*
F,
A))
3379 indicatePessimisticFixpoint();
3387struct AAWillReturnCallSite final
3388 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl> {
3389 AAWillReturnCallSite(
const IRPosition &IRP, Attributor &
A)
3390 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl>(IRP,
A) {}
3394 if (isImpliedByMustprogressAndReadonly(
A,
false))
3395 return ChangeStatus::UNCHANGED;
3397 return AACalleeToCallSite::updateImpl(
A);
3419 const ToTy *
To =
nullptr;
3446 if (!ES || ES->
empty()) {
3447 ExclusionSet = nullptr;
3448 }
else if (MakeUnique) {
3449 ExclusionSet =
A.getInfoCache().getOrCreateUniqueBlockExecutionSet(ES);
3474 if (!PairDMI::isEqual({LHS->From, LHS->To}, {RHS->From, RHS->To}))
3476 return InstSetDMI::isEqual(LHS->ExclusionSet, RHS->ExclusionSet);
3480#define DefineKeys(ToTy) \
3482 ReachabilityQueryInfo<ToTy> \
3483 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::EmptyKey = \
3484 ReachabilityQueryInfo<ToTy>( \
3485 DenseMapInfo<const Instruction *>::getEmptyKey(), \
3486 DenseMapInfo<const ToTy *>::getEmptyKey()); \
3488 ReachabilityQueryInfo<ToTy> \
3489 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::TombstoneKey = \
3490 ReachabilityQueryInfo<ToTy>( \
3491 DenseMapInfo<const Instruction *>::getTombstoneKey(), \
3492 DenseMapInfo<const ToTy *>::getTombstoneKey());
3501template <
typename BaseTy,
typename ToTy>
3502struct CachedReachabilityAA :
public BaseTy {
3503 using RQITy = ReachabilityQueryInfo<ToTy>;
3505 CachedReachabilityAA(
const IRPosition &IRP, Attributor &
A) : BaseTy(IRP,
A) {}
3508 bool isQueryAA()
const override {
return true; }
3513 for (
unsigned u = 0,
e = QueryVector.size();
u <
e; ++
u) {
3514 RQITy *RQI = QueryVector[
u];
3515 if (RQI->Result == RQITy::Reachable::No &&
3517 Changed = ChangeStatus::CHANGED;
3523 bool IsTemporaryRQI) = 0;
3525 bool rememberResult(Attributor &
A,
typename RQITy::Reachable
Result,
3526 RQITy &RQI,
bool UsedExclusionSet,
bool IsTemporaryRQI) {
3531 QueryCache.erase(&RQI);
3537 if (
Result == RQITy::Reachable::Yes || !UsedExclusionSet) {
3538 RQITy PlainRQI(RQI.From, RQI.To);
3539 if (!QueryCache.count(&PlainRQI)) {
3540 RQITy *RQIPtr =
new (
A.Allocator) RQITy(RQI.From, RQI.To);
3542 QueryVector.push_back(RQIPtr);
3543 QueryCache.insert(RQIPtr);
3548 if (IsTemporaryRQI &&
Result != RQITy::Reachable::Yes && UsedExclusionSet) {
3549 assert((!RQI.ExclusionSet || !RQI.ExclusionSet->empty()) &&
3550 "Did not expect empty set!");
3551 RQITy *RQIPtr =
new (
A.Allocator)
3552 RQITy(
A, *RQI.From, *RQI.To, RQI.ExclusionSet,
true);
3553 assert(RQIPtr->Result == RQITy::Reachable::No &&
"Already reachable?");
3555 assert(!QueryCache.count(RQIPtr));
3556 QueryVector.push_back(RQIPtr);
3557 QueryCache.insert(RQIPtr);
3560 if (
Result == RQITy::Reachable::No && IsTemporaryRQI)
3561 A.registerForUpdate(*
this);
3562 return Result == RQITy::Reachable::Yes;
3565 const std::string getAsStr(Attributor *
A)
const override {
3567 return "#queries(" + std::to_string(QueryVector.size()) +
")";
3570 bool checkQueryCache(Attributor &
A, RQITy &StackRQI,
3571 typename RQITy::Reachable &
Result) {
3572 if (!this->getState().isValidState()) {
3573 Result = RQITy::Reachable::Yes;
3579 if (StackRQI.ExclusionSet) {
3580 RQITy PlainRQI(StackRQI.From, StackRQI.To);
3581 auto It = QueryCache.find(&PlainRQI);
3582 if (It != QueryCache.end() && (*It)->Result == RQITy::Reachable::No) {
3583 Result = RQITy::Reachable::No;
3588 auto It = QueryCache.find(&StackRQI);
3589 if (It != QueryCache.end()) {
3596 QueryCache.insert(&StackRQI);
3602 DenseSet<RQITy *> QueryCache;
3605struct AAIntraFnReachabilityFunction final
3606 :
public CachedReachabilityAA<AAIntraFnReachability, Instruction> {
3607 using Base = CachedReachabilityAA<AAIntraFnReachability, Instruction>;
3608 AAIntraFnReachabilityFunction(
const IRPosition &IRP, Attributor &
A)
3610 DT =
A.getInfoCache().getAnalysisResultForFunction<DominatorTreeAnalysis>(
3614 bool isAssumedReachable(
3615 Attributor &
A,
const Instruction &From,
const Instruction &To,
3617 auto *NonConstThis =
const_cast<AAIntraFnReachabilityFunction *
>(
this);
3621 RQITy StackRQI(
A, From, To, ExclusionSet,
false);
3623 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
3624 return NonConstThis->isReachableImpl(
A, StackRQI,
3626 return Result == RQITy::Reachable::Yes;
3633 A.getAAFor<AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3636 [&](
const auto &DeadEdge) {
3637 return LivenessAA->isEdgeDead(DeadEdge.first,
3641 return LivenessAA->isAssumedDead(BB);
3643 return ChangeStatus::UNCHANGED;
3647 return Base::updateImpl(
A);
3651 bool IsTemporaryRQI)
override {
3653 bool UsedExclusionSet =
false;
3658 while (IP && IP != &To) {
3659 if (ExclusionSet && IP != Origin && ExclusionSet->
count(IP)) {
3660 UsedExclusionSet =
true;
3668 const BasicBlock *FromBB = RQI.From->getParent();
3669 const BasicBlock *ToBB = RQI.To->getParent();
3671 "Not an intra-procedural query!");
3675 if (FromBB == ToBB &&
3676 WillReachInBlock(*RQI.From, *RQI.To, RQI.ExclusionSet))
3677 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3682 if (!WillReachInBlock(ToBB->
front(), *RQI.To, RQI.ExclusionSet))
3683 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3687 SmallPtrSet<const BasicBlock *, 16> ExclusionBlocks;
3688 if (RQI.ExclusionSet)
3689 for (
auto *
I : *RQI.ExclusionSet)
3690 if (
I->getFunction() == Fn)
3691 ExclusionBlocks.
insert(
I->getParent());
3694 if (ExclusionBlocks.
count(FromBB) &&
3697 return rememberResult(
A, RQITy::Reachable::No, RQI,
true, IsTemporaryRQI);
3700 A.getAAFor<AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3701 if (LivenessAA && LivenessAA->isAssumedDead(ToBB)) {
3702 DeadBlocks.insert(ToBB);
3703 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3707 SmallPtrSet<const BasicBlock *, 16> Visited;
3711 DenseSet<std::pair<const BasicBlock *, const BasicBlock *>> LocalDeadEdges;
3712 while (!Worklist.
empty()) {
3714 if (!Visited.
insert(BB).second)
3716 for (
const BasicBlock *SuccBB :
successors(BB)) {
3717 if (LivenessAA && LivenessAA->isEdgeDead(BB, SuccBB)) {
3718 LocalDeadEdges.
insert({BB, SuccBB});
3723 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3726 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3729 if (ExclusionBlocks.
count(SuccBB)) {
3730 UsedExclusionSet =
true;
3737 DeadEdges.insert_range(LocalDeadEdges);
3738 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3743 void trackStatistics()
const override {}
3748 DenseSet<const BasicBlock *> DeadBlocks;
3752 DenseSet<std::pair<const BasicBlock *, const BasicBlock *>> DeadEdges;
3755 const DominatorTree *DT =
nullptr;
3763 bool IgnoreSubsumingPositions) {
3764 assert(ImpliedAttributeKind == Attribute::NoAlias &&
3765 "Unexpected attribute kind");
3771 IgnoreSubsumingPositions =
true;
3782 if (
A.hasAttr(IRP, {Attribute::ByVal, Attribute::NoAlias},
3783 IgnoreSubsumingPositions, Attribute::NoAlias))
3793 "Noalias is a pointer attribute");
3796 const std::string getAsStr(
Attributor *
A)
const override {
3797 return getAssumed() ?
"noalias" :
"may-alias";
3802struct AANoAliasFloating final : AANoAliasImpl {
3803 AANoAliasFloating(
const IRPosition &IRP, Attributor &
A)
3804 : AANoAliasImpl(IRP,
A) {}
3809 return indicatePessimisticFixpoint();
3813 void trackStatistics()
const override {
3819struct AANoAliasArgument final
3820 : AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl> {
3821 using Base = AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl>;
3822 AANoAliasArgument(
const IRPosition &IRP, Attributor &
A) :
Base(IRP,
A) {}
3835 DepClassTy::OPTIONAL, IsKnownNoSycn))
3836 return Base::updateImpl(
A);
3841 return Base::updateImpl(
A);
3845 bool UsedAssumedInformation =
false;
3846 if (
A.checkForAllCallSites(
3847 [](AbstractCallSite ACS) { return !ACS.isCallbackCall(); }, *
this,
3848 true, UsedAssumedInformation))
3849 return Base::updateImpl(
A);
3857 return indicatePessimisticFixpoint();
3864struct AANoAliasCallSiteArgument final : AANoAliasImpl {
3865 AANoAliasCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
3866 : AANoAliasImpl(IRP,
A) {}
3870 bool mayAliasWithArgument(Attributor &
A, AAResults *&AAR,
3871 const AAMemoryBehavior &MemBehaviorAA,
3872 const CallBase &CB,
unsigned OtherArgNo) {
3874 if (this->getCalleeArgNo() == (
int)OtherArgNo)
3882 auto *CBArgMemBehaviorAA =
A.getAAFor<AAMemoryBehavior>(
3886 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadNone()) {
3887 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3894 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadOnly() &&
3896 A.recordDependence(MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3897 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3903 AAR =
A.getInfoCache().getAnalysisResultForFunction<AAManager>(
3907 bool IsAliasing = !AAR || !AAR->
isNoAlias(&getAssociatedValue(), ArgOp);
3909 "callsite arguments: "
3910 << getAssociatedValue() <<
" " << *ArgOp <<
" => "
3911 << (IsAliasing ?
"" :
"no-") <<
"alias \n");
3916 bool isKnownNoAliasDueToNoAliasPreservation(
3917 Attributor &
A, AAResults *&AAR,
const AAMemoryBehavior &MemBehaviorAA) {
3930 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
3946 bool IsKnownNoCapture;
3949 DepClassTy::OPTIONAL, IsKnownNoCapture))
3955 A, *UserI, *getCtxI(), *
this,
nullptr,
3956 [ScopeFn](
const Function &Fn) {
return &Fn != ScopeFn; }))
3971 LLVM_DEBUG(
dbgs() <<
"[AANoAliasCSArg] Unknown user: " << *UserI <<
"\n");
3975 bool IsKnownNoCapture;
3976 const AANoCapture *NoCaptureAA =
nullptr;
3978 A,
this, VIRP, DepClassTy::NONE, IsKnownNoCapture,
false, &NoCaptureAA);
3979 if (!IsAssumedNoCapture &&
3981 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue())) {
3983 dbgs() <<
"[AANoAliasCSArg] " << getAssociatedValue()
3984 <<
" cannot be noalias as it is potentially captured\n");
3989 A.recordDependence(*NoCaptureAA, *
this, DepClassTy::OPTIONAL);
3995 for (
unsigned OtherArgNo = 0; OtherArgNo < CB.
arg_size(); OtherArgNo++)
3996 if (mayAliasWithArgument(
A, AAR, MemBehaviorAA, CB, OtherArgNo))
4006 auto *MemBehaviorAA =
4007 A.getAAFor<AAMemoryBehavior>(*
this, getIRPosition(), DepClassTy::NONE);
4009 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
4010 return ChangeStatus::UNCHANGED;
4013 bool IsKnownNoAlias;
4016 A,
this, VIRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
4018 <<
" is not no-alias at the definition\n");
4019 return indicatePessimisticFixpoint();
4022 AAResults *AAR =
nullptr;
4023 if (MemBehaviorAA &&
4024 isKnownNoAliasDueToNoAliasPreservation(
A, AAR, *MemBehaviorAA)) {
4026 dbgs() <<
"[AANoAlias] No-Alias deduced via no-alias preservation\n");
4027 return ChangeStatus::UNCHANGED;
4030 return indicatePessimisticFixpoint();
4038struct AANoAliasReturned final : AANoAliasImpl {
4039 AANoAliasReturned(
const IRPosition &IRP, Attributor &
A)
4040 : AANoAliasImpl(IRP,
A) {}
4045 auto CheckReturnValue = [&](
Value &RV) ->
bool {
4056 bool IsKnownNoAlias;
4058 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoAlias))
4061 bool IsKnownNoCapture;
4062 const AANoCapture *NoCaptureAA =
nullptr;
4064 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
4066 return IsAssumedNoCapture ||
4070 if (!
A.checkForAllReturnedValues(CheckReturnValue, *
this))
4071 return indicatePessimisticFixpoint();
4073 return ChangeStatus::UNCHANGED;
4081struct AANoAliasCallSiteReturned final
4082 : AACalleeToCallSite<AANoAlias, AANoAliasImpl> {
4083 AANoAliasCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
4084 : AACalleeToCallSite<AANoAlias, AANoAliasImpl>(IRP,
A) {}
4094struct AAIsDeadValueImpl :
public AAIsDead {
4095 AAIsDeadValueImpl(
const IRPosition &IRP, Attributor &
A) : AAIsDead(IRP,
A) {}
4098 bool isAssumedDead()
const override {
return isAssumed(IS_DEAD); }
4101 bool isKnownDead()
const override {
return isKnown(IS_DEAD); }
4104 bool isAssumedDead(
const BasicBlock *BB)
const override {
return false; }
4107 bool isKnownDead(
const BasicBlock *BB)
const override {
return false; }
4110 bool isAssumedDead(
const Instruction *
I)
const override {
4111 return I == getCtxI() && isAssumedDead();
4115 bool isKnownDead(
const Instruction *
I)
const override {
4116 return isAssumedDead(
I) && isKnownDead();
4120 const std::string getAsStr(Attributor *
A)
const override {
4121 return isAssumedDead() ?
"assumed-dead" :
"assumed-live";
4125 bool areAllUsesAssumedDead(Attributor &
A,
Value &V) {
4127 if (
V.getType()->isVoidTy() ||
V.use_empty())
4133 if (!
A.isRunOn(*
I->getFunction()))
4135 bool UsedAssumedInformation =
false;
4136 std::optional<Constant *>
C =
4137 A.getAssumedConstant(V, *
this, UsedAssumedInformation);
4142 auto UsePred = [&](
const Use &
U,
bool &Follow) {
return false; };
4147 return A.checkForAllUses(UsePred, *
this, V,
false,
4148 DepClassTy::REQUIRED,
4153 bool isAssumedSideEffectFree(Attributor &
A, Instruction *
I) {
4163 bool IsKnownNoUnwind;
4165 A,
this, CallIRP, DepClassTy::OPTIONAL, IsKnownNoUnwind))
4173struct AAIsDeadFloating :
public AAIsDeadValueImpl {
4174 AAIsDeadFloating(
const IRPosition &IRP, Attributor &
A)
4175 : AAIsDeadValueImpl(IRP,
A) {}
4179 AAIsDeadValueImpl::initialize(
A);
4182 indicatePessimisticFixpoint();
4187 if (!isAssumedSideEffectFree(
A,
I)) {
4189 indicatePessimisticFixpoint();
4191 removeAssumedBits(HAS_NO_EFFECT);
4195 bool isDeadFence(Attributor &
A, FenceInst &FI) {
4196 const auto *ExecDomainAA =
A.lookupAAFor<AAExecutionDomain>(
4198 if (!ExecDomainAA || !ExecDomainAA->isNoOpFence(FI))
4200 A.recordDependence(*ExecDomainAA, *
this, DepClassTy::OPTIONAL);
4204 bool isDeadStore(Attributor &
A, StoreInst &SI,
4205 SmallSetVector<Instruction *, 8> *AssumeOnlyInst =
nullptr) {
4207 if (
SI.isVolatile())
4213 bool UsedAssumedInformation =
false;
4214 if (!AssumeOnlyInst) {
4215 PotentialCopies.clear();
4217 UsedAssumedInformation)) {
4220 <<
"[AAIsDead] Could not determine potential copies of store!\n");
4224 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Store has " << PotentialCopies.size()
4225 <<
" potential copies.\n");
4227 InformationCache &InfoCache =
A.getInfoCache();
4230 UsedAssumedInformation))
4234 auto &UserI = cast<Instruction>(*U.getUser());
4235 if (InfoCache.isOnlyUsedByAssume(UserI)) {
4237 AssumeOnlyInst->insert(&UserI);
4240 return A.isAssumedDead(U,
this,
nullptr, UsedAssumedInformation);
4246 <<
" is assumed live!\n");
4252 const std::string getAsStr(Attributor *
A)
const override {
4256 return "assumed-dead-store";
4259 return "assumed-dead-fence";
4260 return AAIsDeadValueImpl::getAsStr(
A);
4267 if (!isDeadStore(
A, *SI))
4268 return indicatePessimisticFixpoint();
4270 if (!isDeadFence(
A, *FI))
4271 return indicatePessimisticFixpoint();
4273 if (!isAssumedSideEffectFree(
A,
I))
4274 return indicatePessimisticFixpoint();
4275 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4276 return indicatePessimisticFixpoint();
4281 bool isRemovableStore()
const override {
4282 return isAssumed(IS_REMOVABLE) &&
isa<StoreInst>(&getAssociatedValue());
4287 Value &
V = getAssociatedValue();
4294 SmallSetVector<Instruction *, 8> AssumeOnlyInst;
4295 bool IsDead = isDeadStore(
A, *SI, &AssumeOnlyInst);
4298 A.deleteAfterManifest(*
I);
4299 for (
size_t i = 0; i < AssumeOnlyInst.
size(); ++i) {
4301 for (
auto *Usr : AOI->
users())
4303 A.deleteAfterManifest(*AOI);
4309 A.deleteAfterManifest(*FI);
4313 A.deleteAfterManifest(*
I);
4321 void trackStatistics()
const override {
4327 SmallSetVector<Value *, 4> PotentialCopies;
4330struct AAIsDeadArgument :
public AAIsDeadFloating {
4331 AAIsDeadArgument(
const IRPosition &IRP, Attributor &
A)
4332 : AAIsDeadFloating(IRP,
A) {}
4336 Argument &Arg = *getAssociatedArgument();
4337 if (
A.isValidFunctionSignatureRewrite(Arg, {}))
4338 if (
A.registerFunctionSignatureRewrite(
4342 return ChangeStatus::CHANGED;
4344 return ChangeStatus::UNCHANGED;
4351struct AAIsDeadCallSiteArgument :
public AAIsDeadValueImpl {
4352 AAIsDeadCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
4353 : AAIsDeadValueImpl(IRP,
A) {}
4357 AAIsDeadValueImpl::initialize(
A);
4359 indicatePessimisticFixpoint();
4368 Argument *Arg = getAssociatedArgument();
4370 return indicatePessimisticFixpoint();
4372 auto *ArgAA =
A.getAAFor<AAIsDead>(*
this, ArgPos, DepClassTy::REQUIRED);
4374 return indicatePessimisticFixpoint();
4383 "Expected undef values to be filtered out!");
4385 if (
A.changeUseAfterManifest(U, UV))
4386 return ChangeStatus::CHANGED;
4387 return ChangeStatus::UNCHANGED;
4394struct AAIsDeadCallSiteReturned :
public AAIsDeadFloating {
4395 AAIsDeadCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
4396 : AAIsDeadFloating(IRP,
A) {}
4399 bool isAssumedDead()
const override {
4400 return AAIsDeadFloating::isAssumedDead() && IsAssumedSideEffectFree;
4405 AAIsDeadFloating::initialize(
A);
4407 indicatePessimisticFixpoint();
4412 IsAssumedSideEffectFree = isAssumedSideEffectFree(
A, getCtxI());
4418 if (IsAssumedSideEffectFree && !isAssumedSideEffectFree(
A, getCtxI())) {
4419 IsAssumedSideEffectFree =
false;
4420 Changed = ChangeStatus::CHANGED;
4422 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4423 return indicatePessimisticFixpoint();
4428 void trackStatistics()
const override {
4429 if (IsAssumedSideEffectFree)
4436 const std::string getAsStr(Attributor *
A)
const override {
4437 return isAssumedDead()
4439 : (getAssumed() ?
"assumed-dead-users" :
"assumed-live");
4443 bool IsAssumedSideEffectFree =
true;
4446struct AAIsDeadReturned :
public AAIsDeadValueImpl {
4447 AAIsDeadReturned(
const IRPosition &IRP, Attributor &
A)
4448 : AAIsDeadValueImpl(IRP,
A) {}
4453 bool UsedAssumedInformation =
false;
4454 A.checkForAllInstructions([](Instruction &) {
return true; }, *
this,
4455 {Instruction::Ret}, UsedAssumedInformation);
4457 auto PredForCallSite = [&](AbstractCallSite ACS) {
4458 if (ACS.isCallbackCall() || !ACS.getInstruction())
4460 return areAllUsesAssumedDead(
A, *ACS.getInstruction());
4463 if (!
A.checkForAllCallSites(PredForCallSite, *
this,
true,
4464 UsedAssumedInformation))
4465 return indicatePessimisticFixpoint();
4467 return ChangeStatus::UNCHANGED;
4473 bool AnyChange =
false;
4474 UndefValue &UV = *
UndefValue::get(getAssociatedFunction()->getReturnType());
4481 bool UsedAssumedInformation =
false;
4482 A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
4483 UsedAssumedInformation);
4484 return AnyChange ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
4491struct AAIsDeadFunction :
public AAIsDead {
4492 AAIsDeadFunction(
const IRPosition &IRP, Attributor &
A) : AAIsDead(IRP,
A) {}
4497 assert(
F &&
"Did expect an anchor function");
4498 if (!isAssumedDeadInternalFunction(
A)) {
4499 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4500 assumeLive(
A,
F->getEntryBlock());
4504 bool isAssumedDeadInternalFunction(Attributor &
A) {
4505 if (!getAnchorScope()->hasLocalLinkage())
4507 bool UsedAssumedInformation =
false;
4508 return A.checkForAllCallSites([](AbstractCallSite) {
return false; }, *
this,
4509 true, UsedAssumedInformation);
4513 const std::string getAsStr(Attributor *
A)
const override {
4514 return "Live[#BB " + std::to_string(AssumedLiveBlocks.size()) +
"/" +
4515 std::to_string(getAnchorScope()->
size()) +
"][#TBEP " +
4516 std::to_string(ToBeExploredFrom.size()) +
"][#KDE " +
4517 std::to_string(KnownDeadEnds.size()) +
"]";
4522 assert(getState().isValidState() &&
4523 "Attempted to manifest an invalid state!");
4528 if (AssumedLiveBlocks.empty()) {
4529 A.deleteAfterManifest(
F);
4530 return ChangeStatus::CHANGED;
4536 bool Invoke2CallAllowed = !mayCatchAsynchronousExceptions(
F);
4538 KnownDeadEnds.set_union(ToBeExploredFrom);
4539 for (
const Instruction *DeadEndI : KnownDeadEnds) {
4543 bool IsKnownNoReturn;
4551 A.registerInvokeWithDeadSuccessor(
const_cast<InvokeInst &
>(*
II));
4553 A.changeToUnreachableAfterManifest(
4554 const_cast<Instruction *
>(DeadEndI->getNextNode()));
4555 HasChanged = ChangeStatus::CHANGED;
4558 STATS_DECL(AAIsDead, BasicBlock,
"Number of dead basic blocks deleted.");
4559 for (BasicBlock &BB :
F)
4560 if (!AssumedLiveBlocks.count(&BB)) {
4561 A.deleteAfterManifest(BB);
4563 HasChanged = ChangeStatus::CHANGED;
4572 bool isEdgeDead(
const BasicBlock *From,
const BasicBlock *To)
const override {
4575 "Used AAIsDead of the wrong function");
4576 return isValidState() && !AssumedLiveEdges.count(std::make_pair(From, To));
4580 void trackStatistics()
const override {}
4583 bool isAssumedDead()
const override {
return false; }
4586 bool isKnownDead()
const override {
return false; }
4589 bool isAssumedDead(
const BasicBlock *BB)
const override {
4591 "BB must be in the same anchor scope function.");
4595 return !AssumedLiveBlocks.count(BB);
4599 bool isKnownDead(
const BasicBlock *BB)
const override {
4600 return getKnown() && isAssumedDead(BB);
4604 bool isAssumedDead(
const Instruction *
I)
const override {
4605 assert(
I->getParent()->getParent() == getAnchorScope() &&
4606 "Instruction must be in the same anchor scope function.");
4613 if (!AssumedLiveBlocks.count(
I->getParent()))
4619 if (KnownDeadEnds.count(PrevI) || ToBeExploredFrom.count(PrevI))
4627 bool isKnownDead(
const Instruction *
I)
const override {
4628 return getKnown() && isAssumedDead(
I);
4633 bool assumeLive(Attributor &
A,
const BasicBlock &BB) {
4634 if (!AssumedLiveBlocks.insert(&BB).second)
4641 for (
const Instruction &
I : BB)
4644 if (
F->hasLocalLinkage())
4645 A.markLiveInternalFunction(*
F);
4651 SmallSetVector<const Instruction *, 8> ToBeExploredFrom;
4654 SmallSetVector<const Instruction *, 8> KnownDeadEnds;
4657 DenseSet<std::pair<const BasicBlock *, const BasicBlock *>> AssumedLiveEdges;
4660 DenseSet<const BasicBlock *> AssumedLiveBlocks;
4664identifyAliveSuccessors(Attributor &
A,
const CallBase &CB,
4665 AbstractAttribute &AA,
4666 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4669 bool IsKnownNoReturn;
4672 return !IsKnownNoReturn;
4681identifyAliveSuccessors(Attributor &
A,
const InvokeInst &
II,
4682 AbstractAttribute &AA,
4683 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4684 bool UsedAssumedInformation =
4690 if (AAIsDeadFunction::mayCatchAsynchronousExceptions(*
II.getFunction())) {
4691 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4695 bool IsKnownNoUnwind;
4698 UsedAssumedInformation |= !IsKnownNoUnwind;
4700 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4703 return UsedAssumedInformation;
4707identifyAliveSuccessors(Attributor &
A,
const BranchInst &BI,
4708 AbstractAttribute &AA,
4709 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4710 bool UsedAssumedInformation =
false;
4714 std::optional<Constant *>
C =
4715 A.getAssumedConstant(*BI.
getCondition(), AA, UsedAssumedInformation);
4725 UsedAssumedInformation =
false;
4728 return UsedAssumedInformation;
4732identifyAliveSuccessors(Attributor &
A,
const SwitchInst &SI,
4733 AbstractAttribute &AA,
4734 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4735 bool UsedAssumedInformation =
false;
4739 UsedAssumedInformation)) {
4741 for (
const BasicBlock *SuccBB :
successors(
SI.getParent()))
4746 if (Values.
empty() ||
4747 (Values.
size() == 1 &&
4750 return UsedAssumedInformation;
4753 Type &Ty = *
SI.getCondition()->getType();
4754 SmallPtrSet<ConstantInt *, 8>
Constants;
4755 auto CheckForConstantInt = [&](
Value *
V) {
4763 if (!
all_of(Values, [&](AA::ValueAndContext &VAC) {
4764 return CheckForConstantInt(
VAC.getValue());
4766 for (
const BasicBlock *SuccBB :
successors(
SI.getParent()))
4768 return UsedAssumedInformation;
4771 unsigned MatchedCases = 0;
4772 for (
const auto &CaseIt :
SI.cases()) {
4773 if (
Constants.count(CaseIt.getCaseValue())) {
4775 AliveSuccessors.
push_back(&CaseIt.getCaseSuccessor()->front());
4782 AliveSuccessors.
push_back(&
SI.getDefaultDest()->front());
4783 return UsedAssumedInformation;
4789 if (AssumedLiveBlocks.empty()) {
4790 if (isAssumedDeadInternalFunction(
A))
4794 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4795 assumeLive(
A,
F->getEntryBlock());
4799 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Live [" << AssumedLiveBlocks.size() <<
"/"
4800 << getAnchorScope()->
size() <<
"] BBs and "
4801 << ToBeExploredFrom.size() <<
" exploration points and "
4802 << KnownDeadEnds.size() <<
" known dead ends\n");
4807 ToBeExploredFrom.end());
4808 decltype(ToBeExploredFrom) NewToBeExploredFrom;
4811 while (!Worklist.
empty()) {
4818 I =
I->getNextNode();
4820 AliveSuccessors.
clear();
4822 bool UsedAssumedInformation =
false;
4823 switch (
I->getOpcode()) {
4827 "Expected non-terminators to be handled already!");
4828 for (
const BasicBlock *SuccBB :
successors(
I->getParent()))
4831 case Instruction::Call:
4833 *
this, AliveSuccessors);
4835 case Instruction::Invoke:
4837 *
this, AliveSuccessors);
4839 case Instruction::Br:
4841 *
this, AliveSuccessors);
4843 case Instruction::Switch:
4845 *
this, AliveSuccessors);
4849 if (UsedAssumedInformation) {
4850 NewToBeExploredFrom.insert(
I);
4851 }
else if (AliveSuccessors.
empty() ||
4852 (
I->isTerminator() &&
4853 AliveSuccessors.
size() <
I->getNumSuccessors())) {
4854 if (KnownDeadEnds.insert(
I))
4859 << AliveSuccessors.
size() <<
" UsedAssumedInformation: "
4860 << UsedAssumedInformation <<
"\n");
4862 for (
const Instruction *AliveSuccessor : AliveSuccessors) {
4863 if (!
I->isTerminator()) {
4864 assert(AliveSuccessors.size() == 1 &&
4865 "Non-terminator expected to have a single successor!");
4869 auto Edge = std::make_pair(
I->getParent(), AliveSuccessor->getParent());
4870 if (AssumedLiveEdges.insert(
Edge).second)
4872 if (assumeLive(
A, *AliveSuccessor->getParent()))
4879 if (NewToBeExploredFrom.size() != ToBeExploredFrom.size() ||
4880 llvm::any_of(NewToBeExploredFrom, [&](
const Instruction *
I) {
4881 return !ToBeExploredFrom.count(I);
4884 ToBeExploredFrom = std::move(NewToBeExploredFrom);
4893 if (ToBeExploredFrom.empty() &&
4894 getAnchorScope()->
size() == AssumedLiveBlocks.size() &&
4895 llvm::all_of(KnownDeadEnds, [](
const Instruction *DeadEndI) {
4896 return DeadEndI->isTerminator() && DeadEndI->getNumSuccessors() == 0;
4898 return indicatePessimisticFixpoint();
4903struct AAIsDeadCallSite final : AAIsDeadFunction {
4904 AAIsDeadCallSite(
const IRPosition &IRP, Attributor &
A)
4905 : AAIsDeadFunction(IRP,
A) {}
4914 "supported for call sites yet!");
4919 return indicatePessimisticFixpoint();
4923 void trackStatistics()
const override {}
4930struct AADereferenceableImpl : AADereferenceable {
4931 AADereferenceableImpl(
const IRPosition &IRP, Attributor &
A)
4932 : AADereferenceable(IRP,
A) {}
4933 using StateType = DerefState;
4937 Value &
V = *getAssociatedValue().stripPointerCasts();
4939 A.getAttrs(getIRPosition(),
4940 {Attribute::Dereferenceable, Attribute::DereferenceableOrNull},
4943 takeKnownDerefBytesMaximum(Attr.getValueAsInt());
4946 bool IsKnownNonNull;
4948 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNonNull);
4950 bool CanBeNull, CanBeFreed;
4951 takeKnownDerefBytesMaximum(
V.getPointerDereferenceableBytes(
4952 A.getDataLayout(), CanBeNull, CanBeFreed));
4954 if (Instruction *CtxI = getCtxI())
4955 followUsesInMBEC(*
this,
A, getState(), *CtxI);
4960 StateType &getState()
override {
return *
this; }
4961 const StateType &getState()
const override {
return *
this; }
4965 void addAccessedBytesForUse(Attributor &
A,
const Use *U,
const Instruction *
I,
4966 DerefState &State) {
4967 const Value *UseV =
U->get();
4972 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
I->isVolatile())
4977 Loc->Ptr,
Offset,
A.getDataLayout(),
true);
4978 if (
Base &&
Base == &getAssociatedValue())
4979 State.addAccessedBytes(
Offset, Loc->Size.getValue());
4983 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
4984 AADereferenceable::StateType &State) {
4985 bool IsNonNull =
false;
4986 bool TrackUse =
false;
4987 int64_t DerefBytes = getKnownNonNullAndDerefBytesForUse(
4988 A, *
this, getAssociatedValue(), U,
I, IsNonNull, TrackUse);
4989 LLVM_DEBUG(
dbgs() <<
"[AADereferenceable] Deref bytes: " << DerefBytes
4990 <<
" for instruction " << *
I <<
"\n");
4992 addAccessedBytesForUse(
A, U,
I, State);
4993 State.takeKnownDerefBytesMaximum(DerefBytes);
5000 bool IsKnownNonNull;
5002 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5003 if (IsAssumedNonNull &&
5004 A.hasAttr(getIRPosition(), Attribute::DereferenceableOrNull)) {
5005 A.removeAttrs(getIRPosition(), {Attribute::DereferenceableOrNull});
5006 return ChangeStatus::CHANGED;
5011 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
5012 SmallVectorImpl<Attribute> &Attrs)
const override {
5014 bool IsKnownNonNull;
5016 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5017 if (IsAssumedNonNull)
5018 Attrs.emplace_back(Attribute::getWithDereferenceableBytes(
5019 Ctx, getAssumedDereferenceableBytes()));
5021 Attrs.emplace_back(Attribute::getWithDereferenceableOrNullBytes(
5022 Ctx, getAssumedDereferenceableBytes()));
5026 const std::string getAsStr(Attributor *
A)
const override {
5027 if (!getAssumedDereferenceableBytes())
5028 return "unknown-dereferenceable";
5029 bool IsKnownNonNull;
5030 bool IsAssumedNonNull =
false;
5033 *
A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5034 return std::string(
"dereferenceable") +
5035 (IsAssumedNonNull ?
"" :
"_or_null") +
5036 (isAssumedGlobal() ?
"_globally" :
"") +
"<" +
5037 std::to_string(getKnownDereferenceableBytes()) +
"-" +
5038 std::to_string(getAssumedDereferenceableBytes()) +
">" +
5039 (!
A ?
" [non-null is unknown]" :
"");
5044struct AADereferenceableFloating : AADereferenceableImpl {
5045 AADereferenceableFloating(
const IRPosition &IRP, Attributor &
A)
5046 : AADereferenceableImpl(IRP,
A) {}
5051 bool UsedAssumedInformation =
false;
5053 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5055 Values.
push_back({getAssociatedValue(), getCtxI()});
5058 Stripped = Values.
size() != 1 ||
5059 Values.
front().getValue() != &getAssociatedValue();
5062 const DataLayout &
DL =
A.getDataLayout();
5065 auto VisitValueCB = [&](
const Value &
V) ->
bool {
5067 DL.getIndexSizeInBits(
V.getType()->getPointerAddressSpace());
5068 APInt
Offset(IdxWidth, 0);
5073 const auto *AA =
A.getAAFor<AADereferenceable>(
5075 int64_t DerefBytes = 0;
5076 if (!AA || (!Stripped &&
this == AA)) {
5079 bool CanBeNull, CanBeFreed;
5081 Base->getPointerDereferenceableBytes(
DL, CanBeNull, CanBeFreed);
5082 T.GlobalState.indicatePessimisticFixpoint();
5085 DerefBytes =
DS.DerefBytesState.getAssumed();
5086 T.GlobalState &=
DS.GlobalState;
5092 int64_t OffsetSExt =
Offset.getSExtValue();
5096 T.takeAssumedDerefBytesMinimum(
5097 std::max(int64_t(0), DerefBytes - OffsetSExt));
5102 T.takeKnownDerefBytesMaximum(
5103 std::max(int64_t(0), DerefBytes - OffsetSExt));
5104 T.indicatePessimisticFixpoint();
5105 }
else if (OffsetSExt > 0) {
5111 T.indicatePessimisticFixpoint();
5115 return T.isValidState();
5118 for (
const auto &VAC : Values)
5119 if (!VisitValueCB(*
VAC.getValue()))
5120 return indicatePessimisticFixpoint();
5126 void trackStatistics()
const override {
5132struct AADereferenceableReturned final
5133 : AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl> {
5135 AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl>;
5136 AADereferenceableReturned(
const IRPosition &IRP, Attributor &
A)
5140 void trackStatistics()
const override {
5146struct AADereferenceableArgument final
5147 : AAArgumentFromCallSiteArguments<AADereferenceable,
5148 AADereferenceableImpl> {
5150 AAArgumentFromCallSiteArguments<AADereferenceable, AADereferenceableImpl>;
5151 AADereferenceableArgument(
const IRPosition &IRP, Attributor &
A)
5155 void trackStatistics()
const override {
5161struct AADereferenceableCallSiteArgument final : AADereferenceableFloating {
5162 AADereferenceableCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
5163 : AADereferenceableFloating(IRP,
A) {}
5166 void trackStatistics()
const override {
5172struct AADereferenceableCallSiteReturned final
5173 : AACalleeToCallSite<AADereferenceable, AADereferenceableImpl> {
5174 using Base = AACalleeToCallSite<AADereferenceable, AADereferenceableImpl>;
5175 AADereferenceableCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
5179 void trackStatistics()
const override {
5189static unsigned getKnownAlignForUse(Attributor &
A, AAAlign &QueryingAA,
5190 Value &AssociatedValue,
const Use *U,
5191 const Instruction *
I,
bool &TrackUse) {
5200 if (
GEP->hasAllConstantIndices())
5205 switch (
II->getIntrinsicID()) {
5206 case Intrinsic::ptrmask: {
5208 const auto *ConstVals =
A.getAAFor<AAPotentialConstantValues>(
5210 const auto *AlignAA =
A.getAAFor<AAAlign>(
5212 if (ConstVals && ConstVals->isValidState() && ConstVals->isAtFixpoint()) {
5213 unsigned ShiftValue = std::min(ConstVals->getAssumedMinTrailingZeros(),
5215 Align ConstAlign(UINT64_C(1) << ShiftValue);
5216 if (ConstAlign >= AlignAA->getKnownAlign())
5217 return Align(1).value();
5220 return AlignAA->getKnownAlign().
value();
5238 MA = MaybeAlign(AlignAA->getKnownAlign());
5241 const DataLayout &
DL =
A.getDataLayout();
5242 const Value *UseV =
U->get();
5244 if (
SI->getPointerOperand() == UseV)
5245 MA =
SI->getAlign();
5247 if (LI->getPointerOperand() == UseV)
5248 MA = LI->getAlign();
5250 if (AI->getPointerOperand() == UseV)
5251 MA = AI->getAlign();
5253 if (AI->getPointerOperand() == UseV)
5254 MA = AI->getAlign();
5260 unsigned Alignment = MA->value();
5264 if (
Base == &AssociatedValue) {
5269 uint32_t
gcd = std::gcd(uint32_t(
abs((int32_t)
Offset)), Alignment);
5277struct AAAlignImpl : AAAlign {
5278 AAAlignImpl(
const IRPosition &IRP, Attributor &
A) : AAAlign(IRP,
A) {}
5283 A.getAttrs(getIRPosition(), {Attribute::Alignment},
Attrs);
5285 takeKnownMaximum(Attr.getValueAsInt());
5287 Value &
V = *getAssociatedValue().stripPointerCasts();
5288 takeKnownMaximum(
V.getPointerAlignment(
A.getDataLayout()).value());
5290 if (Instruction *CtxI = getCtxI())
5291 followUsesInMBEC(*
this,
A, getState(), *CtxI);
5299 Value &AssociatedValue = getAssociatedValue();
5301 return ChangeStatus::UNCHANGED;
5303 for (
const Use &U : AssociatedValue.
uses()) {
5305 if (
SI->getPointerOperand() == &AssociatedValue)
5306 if (
SI->getAlign() < getAssumedAlign()) {
5308 "Number of times alignment added to a store");
5309 SI->setAlignment(getAssumedAlign());
5310 InstrChanged = ChangeStatus::CHANGED;
5313 if (LI->getPointerOperand() == &AssociatedValue)
5314 if (LI->getAlign() < getAssumedAlign()) {
5315 LI->setAlignment(getAssumedAlign());
5317 "Number of times alignment added to a load");
5318 InstrChanged = ChangeStatus::CHANGED;
5321 if (RMW->getPointerOperand() == &AssociatedValue) {
5322 if (RMW->getAlign() < getAssumedAlign()) {
5324 "Number of times alignment added to atomicrmw");
5326 RMW->setAlignment(getAssumedAlign());
5327 InstrChanged = ChangeStatus::CHANGED;
5331 if (CAS->getPointerOperand() == &AssociatedValue) {
5332 if (CAS->getAlign() < getAssumedAlign()) {
5334 "Number of times alignment added to cmpxchg");
5335 CAS->setAlignment(getAssumedAlign());
5336 InstrChanged = ChangeStatus::CHANGED;
5344 Align InheritAlign =
5345 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5346 if (InheritAlign >= getAssumedAlign())
5347 return InstrChanged;
5348 return Changed | InstrChanged;
5356 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
5357 SmallVectorImpl<Attribute> &Attrs)
const override {
5358 if (getAssumedAlign() > 1)
5360 Attribute::getWithAlignment(Ctx,
Align(getAssumedAlign())));
5364 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
5365 AAAlign::StateType &State) {
5366 bool TrackUse =
false;
5368 unsigned int KnownAlign =
5369 getKnownAlignForUse(
A, *
this, getAssociatedValue(), U,
I, TrackUse);
5370 State.takeKnownMaximum(KnownAlign);
5376 const std::string getAsStr(Attributor *
A)
const override {
5377 return "align<" + std::to_string(getKnownAlign().value()) +
"-" +
5378 std::to_string(getAssumedAlign().value()) +
">";
5383struct AAAlignFloating : AAAlignImpl {
5384 AAAlignFloating(
const IRPosition &IRP, Attributor &
A) : AAAlignImpl(IRP,
A) {}
5388 const DataLayout &
DL =
A.getDataLayout();
5391 bool UsedAssumedInformation =
false;
5393 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5395 Values.
push_back({getAssociatedValue(), getCtxI()});
5398 Stripped = Values.
size() != 1 ||
5399 Values.
front().getValue() != &getAssociatedValue();
5403 auto VisitValueCB = [&](
Value &
V) ->
bool {
5407 DepClassTy::REQUIRED);
5408 if (!AA || (!Stripped &&
this == AA)) {
5410 unsigned Alignment = 1;
5423 Alignment =
V.getPointerAlignment(
DL).value();
5426 T.takeKnownMaximum(Alignment);
5427 T.indicatePessimisticFixpoint();
5430 const AAAlign::StateType &
DS = AA->
getState();
5433 return T.isValidState();
5436 for (
const auto &VAC : Values) {
5437 if (!VisitValueCB(*
VAC.getValue()))
5438 return indicatePessimisticFixpoint();
5451struct AAAlignReturned final
5452 : AAReturnedFromReturnedValues<AAAlign, AAAlignImpl> {
5453 using Base = AAReturnedFromReturnedValues<AAAlign, AAAlignImpl>;
5454 AAAlignReturned(
const IRPosition &IRP, Attributor &
A) :
Base(IRP,
A) {}
5461struct AAAlignArgument final
5462 : AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl> {
5463 using Base = AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl>;
5464 AAAlignArgument(
const IRPosition &IRP, Attributor &
A) :
Base(IRP,
A) {}
5471 if (
A.getInfoCache().isInvolvedInMustTailCall(*getAssociatedArgument()))
5472 return ChangeStatus::UNCHANGED;
5473 return Base::manifest(
A);
5480struct AAAlignCallSiteArgument final : AAAlignFloating {
5481 AAAlignCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
5482 : AAAlignFloating(IRP,
A) {}
5489 if (Argument *Arg = getAssociatedArgument())
5490 if (
A.getInfoCache().isInvolvedInMustTailCall(*Arg))
5491 return ChangeStatus::UNCHANGED;
5493 Align InheritAlign =
5494 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5495 if (InheritAlign >= getAssumedAlign())
5496 Changed = ChangeStatus::UNCHANGED;
5503 if (Argument *Arg = getAssociatedArgument()) {
5506 const auto *ArgAlignAA =
A.getAAFor<AAAlign>(
5509 takeKnownMaximum(ArgAlignAA->getKnownAlign().value());
5519struct AAAlignCallSiteReturned final
5520 : AACalleeToCallSite<AAAlign, AAAlignImpl> {
5521 using Base = AACalleeToCallSite<AAAlign, AAAlignImpl>;
5522 AAAlignCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
5528 switch (
II->getIntrinsicID()) {
5529 case Intrinsic::ptrmask: {
5533 const auto *ConstVals =
A.getAAFor<AAPotentialConstantValues>(
5535 if (ConstVals && ConstVals->isValidState()) {
5536 unsigned ShiftValue =
5537 std::min(ConstVals->getAssumedMinTrailingZeros(),
5538 Value::MaxAlignmentExponent);
5539 Alignment =
Align(UINT64_C(1) << ShiftValue);
5543 const auto *AlignAA =
5545 DepClassTy::REQUIRED);
5547 Alignment = std::max(AlignAA->getAssumedAlign(), Alignment);
5554 std::min(this->getAssumedAlign(), Alignment).value());
5561 return Base::updateImpl(
A);
5570struct AANoReturnImpl :
public AANoReturn {
5571 AANoReturnImpl(
const IRPosition &IRP, Attributor &
A) : AANoReturn(IRP,
A) {}
5577 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
5582 const std::string getAsStr(Attributor *
A)
const override {
5583 return getAssumed() ?
"noreturn" :
"may-return";
5588 auto CheckForNoReturn = [](
Instruction &) {
return false; };
5589 bool UsedAssumedInformation =
false;
5590 if (!
A.checkForAllInstructions(CheckForNoReturn, *
this,
5591 {(unsigned)Instruction::Ret},
5592 UsedAssumedInformation))
5593 return indicatePessimisticFixpoint();
5594 return ChangeStatus::UNCHANGED;
5598struct AANoReturnFunction final : AANoReturnImpl {
5599 AANoReturnFunction(
const IRPosition &IRP, Attributor &
A)
5600 : AANoReturnImpl(IRP,
A) {}
5607struct AANoReturnCallSite final
5608 : AACalleeToCallSite<AANoReturn, AANoReturnImpl> {
5609 AANoReturnCallSite(
const IRPosition &IRP, Attributor &
A)
5610 : AACalleeToCallSite<AANoReturn, AANoReturnImpl>(IRP,
A) {}
5621struct AAInstanceInfoImpl :
public AAInstanceInfo {
5622 AAInstanceInfoImpl(
const IRPosition &IRP, Attributor &
A)
5623 : AAInstanceInfo(IRP,
A) {}
5627 Value &
V = getAssociatedValue();
5629 if (
C->isThreadDependent())
5630 indicatePessimisticFixpoint();
5632 indicateOptimisticFixpoint();
5638 indicateOptimisticFixpoint();
5643 A.getInfoCache().getAnalysisResultForFunction<CycleAnalysis>(
5646 indicatePessimisticFixpoint();
5656 Value &
V = getAssociatedValue();
5659 Scope =
I->getFunction();
5662 if (!
Scope->hasLocalLinkage())
5666 return indicateOptimisticFixpoint();
5668 bool IsKnownNoRecurse;
5674 auto UsePred = [&](
const Use &
U,
bool &Follow) {
5689 if (!Callee || !
Callee->hasLocalLinkage())
5693 const auto *ArgInstanceInfoAA =
A.getAAFor<AAInstanceInfo>(
5695 DepClassTy::OPTIONAL);
5696 if (!ArgInstanceInfoAA ||
5697 !ArgInstanceInfoAA->isAssumedUniqueForAnalysis())
5702 A, *CB, *Scope, *
this,
nullptr,
5703 [Scope](
const Function &Fn) {
return &Fn !=
Scope; }))
5710 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
5712 auto *
Ptr =
SI->getPointerOperand()->stripPointerCasts();
5720 if (!
A.checkForAllUses(UsePred, *
this, V,
true,
5721 DepClassTy::OPTIONAL,
5722 true, EquivalentUseCB))
5723 return indicatePessimisticFixpoint();
5729 const std::string getAsStr(Attributor *
A)
const override {
5730 return isAssumedUniqueForAnalysis() ?
"<unique [fAa]>" :
"<unknown>";
5734 void trackStatistics()
const override {}
5738struct AAInstanceInfoFloating : AAInstanceInfoImpl {
5739 AAInstanceInfoFloating(
const IRPosition &IRP, Attributor &
A)
5740 : AAInstanceInfoImpl(IRP,
A) {}
5744struct AAInstanceInfoArgument final : AAInstanceInfoFloating {
5745 AAInstanceInfoArgument(
const IRPosition &IRP, Attributor &
A)
5746 : AAInstanceInfoFloating(IRP,
A) {}
5750struct AAInstanceInfoCallSiteArgument final : AAInstanceInfoImpl {
5751 AAInstanceInfoCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
5752 : AAInstanceInfoImpl(IRP,
A) {}
5760 Argument *Arg = getAssociatedArgument();
5762 return indicatePessimisticFixpoint();
5765 A.getAAFor<AAInstanceInfo>(*
this, ArgPos, DepClassTy::REQUIRED);
5767 return indicatePessimisticFixpoint();
5773struct AAInstanceInfoReturned final : AAInstanceInfoImpl {
5774 AAInstanceInfoReturned(
const IRPosition &IRP, Attributor &
A)
5775 : AAInstanceInfoImpl(IRP,
A) {
5791struct AAInstanceInfoCallSiteReturned final : AAInstanceInfoFloating {
5792 AAInstanceInfoCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
5793 : AAInstanceInfoFloating(IRP,
A) {}
5800 bool IgnoreSubsumingPositions) {
5801 assert(ImpliedAttributeKind == Attribute::Captures &&
5802 "Unexpected attribute kind");
5812 V.getType()->getPointerAddressSpace() == 0)) {
5817 A.getAttrs(IRP, {Attribute::Captures}, Attrs,
5827 {Attribute::Captures, Attribute::ByVal}, Attrs,
5864 bool ReadOnly =
F.onlyReadsMemory();
5865 bool NoThrow =
F.doesNotThrow();
5866 bool IsVoidReturn =
F.getReturnType()->isVoidTy();
5867 if (ReadOnly && NoThrow && IsVoidReturn) {
5880 if (NoThrow && IsVoidReturn)
5885 if (!NoThrow || ArgNo < 0 ||
5886 !
F.getAttributes().hasAttrSomewhere(Attribute::Returned))
5889 for (
unsigned U = 0, E =
F.arg_size(); U < E; ++U)
5890 if (
F.hasParamAttribute(U, Attribute::Returned)) {
5891 if (U ==
unsigned(ArgNo))
5918 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
5919 SmallVectorImpl<Attribute> &Attrs)
const override {
5920 if (!isAssumedNoCaptureMaybeReturned())
5923 if (isArgumentPosition()) {
5924 if (isAssumedNoCapture())
5925 Attrs.emplace_back(Attribute::get(Ctx, Attribute::Captures));
5927 Attrs.emplace_back(Attribute::get(Ctx,
"no-capture-maybe-returned"));
5932 const std::string getAsStr(Attributor *
A)
const override {
5933 if (isKnownNoCapture())
5934 return "known not-captured";
5935 if (isAssumedNoCapture())
5936 return "assumed not-captured";
5937 if (isKnownNoCaptureMaybeReturned())
5938 return "known not-captured-maybe-returned";
5939 if (isAssumedNoCaptureMaybeReturned())
5940 return "assumed not-captured-maybe-returned";
5941 return "assumed-captured";
5946 bool checkUse(Attributor &
A, AANoCapture::StateType &State,
const Use &U,
5949 LLVM_DEBUG(
dbgs() <<
"[AANoCapture] Check use: " << *
U.get() <<
" in "
5955 return isCapturedIn(State,
true,
true,
5962 return isCapturedIn(State,
true,
true,
5968 return isCapturedIn(State,
false,
false,
5970 return isCapturedIn(State,
true,
true,
5978 return isCapturedIn(State,
true,
true,
5985 bool IsKnownNoCapture;
5986 const AANoCapture *ArgNoCaptureAA =
nullptr;
5988 A,
this, CSArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
5990 if (IsAssumedNoCapture)
5991 return isCapturedIn(State,
false,
false,
5995 return isCapturedIn(State,
false,
false,
6000 return isCapturedIn(State,
true,
true,
6007 static bool isCapturedIn(AANoCapture::StateType &State,
bool CapturedInMem,
6008 bool CapturedInInt,
bool CapturedInRet) {
6009 LLVM_DEBUG(
dbgs() <<
" - captures [Mem " << CapturedInMem <<
"|Int "
6010 << CapturedInInt <<
"|Ret " << CapturedInRet <<
"]\n");
6022 const IRPosition &IRP = getIRPosition();
6026 return indicatePessimisticFixpoint();
6033 return indicatePessimisticFixpoint();
6041 T.addKnownBits(NOT_CAPTURED_IN_MEM);
6043 addKnownBits(NOT_CAPTURED_IN_MEM);
6050 auto CheckReturnedArgs = [&](
bool &UsedAssumedInformation) {
6054 UsedAssumedInformation))
6056 bool SeenConstant =
false;
6057 for (
const AA::ValueAndContext &VAC : Values) {
6061 SeenConstant =
true;
6063 VAC.getValue() == getAssociatedArgument())
6069 bool IsKnownNoUnwind;
6072 bool IsVoidTy =
F->getReturnType()->isVoidTy();
6073 bool UsedAssumedInformation =
false;
6074 if (IsVoidTy || CheckReturnedArgs(UsedAssumedInformation)) {
6075 T.addKnownBits(NOT_CAPTURED_IN_RET);
6076 if (
T.isKnown(NOT_CAPTURED_IN_MEM))
6078 if (IsKnownNoUnwind && (IsVoidTy || !UsedAssumedInformation)) {
6079 addKnownBits(NOT_CAPTURED_IN_RET);
6080 if (isKnown(NOT_CAPTURED_IN_MEM))
6081 return indicateOptimisticFixpoint();
6086 auto UseCheck = [&](
const Use &
U,
bool &Follow) ->
bool {
6095 return checkUse(
A,
T, U, Follow);
6098 if (!
A.checkForAllUses(UseCheck, *
this, *V))
6099 return indicatePessimisticFixpoint();
6102 auto Assumed = S.getAssumed();
6103 S.intersectAssumedBits(
T.getAssumed());
6104 if (!isAssumedNoCaptureMaybeReturned())
6105 return indicatePessimisticFixpoint();
6111struct AANoCaptureArgument final : AANoCaptureImpl {
6112 AANoCaptureArgument(
const IRPosition &IRP, Attributor &
A)
6113 : AANoCaptureImpl(IRP,
A) {}
6120struct AANoCaptureCallSiteArgument final : AANoCaptureImpl {
6121 AANoCaptureCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
6122 : AANoCaptureImpl(IRP,
A) {}
6130 Argument *Arg = getAssociatedArgument();
6132 return indicatePessimisticFixpoint();
6134 bool IsKnownNoCapture;
6135 const AANoCapture *ArgAA =
nullptr;
6137 A,
this, ArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
6139 return ChangeStatus::UNCHANGED;
6141 return indicatePessimisticFixpoint();
6146 void trackStatistics()
const override {
6152struct AANoCaptureFloating final : AANoCaptureImpl {
6153 AANoCaptureFloating(
const IRPosition &IRP, Attributor &
A)
6154 : AANoCaptureImpl(IRP,
A) {}
6157 void trackStatistics()
const override {
6163struct AANoCaptureReturned final : AANoCaptureImpl {
6164 AANoCaptureReturned(
const IRPosition &IRP, Attributor &
A)
6165 : AANoCaptureImpl(IRP,
A) {
6180 void trackStatistics()
const override {}
6184struct AANoCaptureCallSiteReturned final : AANoCaptureImpl {
6185 AANoCaptureCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
6186 : AANoCaptureImpl(IRP,
A) {}
6192 determineFunctionCaptureCapabilities(getIRPosition(), *
F, *
this);
6196 void trackStatistics()
const override {
6213 dbgs() <<
"[ValueSimplify] is assumed to be "
6216 dbgs() <<
"[ValueSimplify] is assumed to be <none>\n";
6228 if (getAssociatedValue().
getType()->isVoidTy())
6229 indicatePessimisticFixpoint();
6230 if (
A.hasSimplificationCallback(getIRPosition()))
6231 indicatePessimisticFixpoint();
6235 const std::string getAsStr(Attributor *
A)
const override {
6237 dbgs() <<
"SAV: " << (bool)SimplifiedAssociatedValue <<
" ";
6238 if (SimplifiedAssociatedValue && *SimplifiedAssociatedValue)
6239 dbgs() <<
"SAV: " << **SimplifiedAssociatedValue <<
" ";
6241 return isValidState() ? (isAtFixpoint() ?
"simplified" :
"maybe-simple")
6246 void trackStatistics()
const override {}
6249 std::optional<Value *>
6250 getAssumedSimplifiedValue(Attributor &
A)
const override {
6251 return SimplifiedAssociatedValue;
6258 static Value *ensureType(Attributor &
A,
Value &V,
Type &Ty, Instruction *CtxI,
6262 if (CtxI &&
V.getType()->canLosslesslyBitCastTo(&Ty))
6264 : BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6273 static Value *reproduceInst(Attributor &
A,
6274 const AbstractAttribute &QueryingAA,
6275 Instruction &
I,
Type &Ty, Instruction *CtxI,
6277 assert(CtxI &&
"Cannot reproduce an instruction without context!");
6278 if (
Check && (
I.mayReadFromMemory() ||
6283 Value *NewOp = reproduceValue(
A, QueryingAA, *
Op, Ty, CtxI,
Check, VMap);
6285 assert(
Check &&
"Manifest of new value unexpectedly failed!");
6307 static Value *reproduceValue(Attributor &
A,
6308 const AbstractAttribute &QueryingAA,
Value &V,
6309 Type &Ty, Instruction *CtxI,
bool Check,
6311 if (
const auto &NewV = VMap.
lookup(&V))
6313 bool UsedAssumedInformation =
false;
6314 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
6316 if (!SimpleV.has_value())
6320 EffectiveV = *SimpleV;
6325 return ensureType(
A, *EffectiveV, Ty, CtxI,
Check);
6327 if (
Value *NewV = reproduceInst(
A, QueryingAA, *
I, Ty, CtxI,
Check, VMap))
6328 return ensureType(
A, *NewV, Ty, CtxI,
Check);
6334 Value *manifestReplacementValue(Attributor &
A, Instruction *CtxI)
const {
6335 Value *NewV = SimplifiedAssociatedValue
6336 ? *SimplifiedAssociatedValue
6338 if (NewV && NewV != &getAssociatedValue()) {
6342 if (reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6344 return reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6352 bool checkAndUpdate(Attributor &
A,
const AbstractAttribute &QueryingAA,
6353 const IRPosition &IRP,
bool Simplify =
true) {
6354 bool UsedAssumedInformation =
false;
6357 QueryingValueSimplified =
A.getAssumedSimplified(
6359 return unionAssumed(QueryingValueSimplified);
6363 template <
typename AAType>
bool askSimplifiedValueFor(Attributor &
A) {
6364 if (!getAssociatedValue().
getType()->isIntegerTy())
6369 A.getAAFor<AAType>(*
this, getIRPosition(), DepClassTy::NONE);
6373 std::optional<Constant *> COpt = AA->getAssumedConstant(
A);
6376 SimplifiedAssociatedValue = std::nullopt;
6377 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6380 if (
auto *
C = *COpt) {
6381 SimplifiedAssociatedValue =
C;
6382 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6388 bool askSimplifiedValueForOtherAAs(Attributor &
A) {
6389 if (askSimplifiedValueFor<AAValueConstantRange>(
A))
6391 if (askSimplifiedValueFor<AAPotentialConstantValues>(
A))
6399 for (
auto &U : getAssociatedValue().uses()) {
6404 IP =
PHI->getIncomingBlock(U)->getTerminator();
6405 if (
auto *NewV = manifestReplacementValue(
A, IP)) {
6407 <<
" -> " << *NewV <<
" :: " << *
this <<
"\n");
6408 if (
A.changeUseAfterManifest(U, *NewV))
6409 Changed = ChangeStatus::CHANGED;
6413 return Changed | AAValueSimplify::manifest(
A);
6418 SimplifiedAssociatedValue = &getAssociatedValue();
6419 return AAValueSimplify::indicatePessimisticFixpoint();
6423struct AAValueSimplifyArgument final : AAValueSimplifyImpl {
6424 AAValueSimplifyArgument(
const IRPosition &IRP, Attributor &
A)
6425 : AAValueSimplifyImpl(IRP,
A) {}
6428 AAValueSimplifyImpl::initialize(
A);
6429 if (
A.hasAttr(getIRPosition(),
6430 {Attribute::InAlloca, Attribute::Preallocated,
6431 Attribute::StructRet, Attribute::Nest, Attribute::ByVal},
6433 indicatePessimisticFixpoint();
6440 Argument *Arg = getAssociatedArgument();
6446 return indicatePessimisticFixpoint();
6449 auto Before = SimplifiedAssociatedValue;
6451 auto PredForCallSite = [&](AbstractCallSite ACS) {
6452 const IRPosition &ACSArgPos =
6463 bool UsedAssumedInformation =
false;
6464 std::optional<Constant *> SimpleArgOp =
6465 A.getAssumedConstant(ACSArgPos, *
this, UsedAssumedInformation);
6472 return unionAssumed(*SimpleArgOp);
6477 bool UsedAssumedInformation =
false;
6478 if (hasCallBaseContext() &&
6479 getCallBaseContext()->getCalledOperand() == Arg->
getParent())
6481 AbstractCallSite(&getCallBaseContext()->getCalledOperandUse()));
6483 Success =
A.checkForAllCallSites(PredForCallSite, *
this,
true,
6484 UsedAssumedInformation);
6487 if (!askSimplifiedValueForOtherAAs(
A))
6488 return indicatePessimisticFixpoint();
6491 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6492 : ChangeStatus ::CHANGED;
6496 void trackStatistics()
const override {
6501struct AAValueSimplifyReturned : AAValueSimplifyImpl {
6502 AAValueSimplifyReturned(
const IRPosition &IRP, Attributor &
A)
6503 : AAValueSimplifyImpl(IRP,
A) {}
6506 std::optional<Value *>
6507 getAssumedSimplifiedValue(Attributor &
A)
const override {
6508 if (!isValidState())
6510 return SimplifiedAssociatedValue;
6515 auto Before = SimplifiedAssociatedValue;
6519 return checkAndUpdate(
6524 bool UsedAssumedInformation =
false;
6525 if (!
A.checkForAllInstructions(ReturnInstCB, *
this, {Instruction::Ret},
6526 UsedAssumedInformation))
6527 if (!askSimplifiedValueForOtherAAs(
A))
6528 return indicatePessimisticFixpoint();
6531 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6532 : ChangeStatus ::CHANGED;
6538 return ChangeStatus::UNCHANGED;
6542 void trackStatistics()
const override {
6547struct AAValueSimplifyFloating : AAValueSimplifyImpl {
6548 AAValueSimplifyFloating(
const IRPosition &IRP, Attributor &
A)
6549 : AAValueSimplifyImpl(IRP,
A) {}
6553 AAValueSimplifyImpl::initialize(
A);
6554 Value &
V = getAnchorValue();
6558 indicatePessimisticFixpoint();
6563 auto Before = SimplifiedAssociatedValue;
6564 if (!askSimplifiedValueForOtherAAs(
A))
6565 return indicatePessimisticFixpoint();
6568 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6569 : ChangeStatus ::CHANGED;
6573 void trackStatistics()
const override {
6578struct AAValueSimplifyFunction : AAValueSimplifyImpl {
6579 AAValueSimplifyFunction(
const IRPosition &IRP, Attributor &
A)
6580 : AAValueSimplifyImpl(IRP,
A) {}
6584 SimplifiedAssociatedValue =
nullptr;
6585 indicateOptimisticFixpoint();
6590 "AAValueSimplify(Function|CallSite)::updateImpl will not be called");
6593 void trackStatistics()
const override {
6598struct AAValueSimplifyCallSite : AAValueSimplifyFunction {
6599 AAValueSimplifyCallSite(
const IRPosition &IRP, Attributor &
A)
6600 : AAValueSimplifyFunction(IRP,
A) {}
6602 void trackStatistics()
const override {
6607struct AAValueSimplifyCallSiteReturned : AAValueSimplifyImpl {
6608 AAValueSimplifyCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
6609 : AAValueSimplifyImpl(IRP,
A) {}
6612 AAValueSimplifyImpl::initialize(
A);
6613 Function *Fn = getAssociatedFunction();
6614 assert(Fn &&
"Did expect an associted function");
6615 for (Argument &Arg : Fn->
args()) {
6620 checkAndUpdate(
A, *
this, IRP))
6621 indicateOptimisticFixpoint();
6623 indicatePessimisticFixpoint();
6631 return indicatePessimisticFixpoint();
6634 void trackStatistics()
const override {
6639struct AAValueSimplifyCallSiteArgument : AAValueSimplifyFloating {
6640 AAValueSimplifyCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
6641 : AAValueSimplifyFloating(IRP,
A) {}
6647 auto *FloatAA =
A.lookupAAFor<AAValueSimplify>(
6649 if (FloatAA && FloatAA->getState().isValidState())
6652 if (
auto *NewV = manifestReplacementValue(
A, getCtxI())) {
6654 ->getArgOperandUse(getCallSiteArgNo());
6655 if (
A.changeUseAfterManifest(U, *NewV))
6656 Changed = ChangeStatus::CHANGED;
6659 return Changed | AAValueSimplify::manifest(
A);
6662 void trackStatistics()
const override {
6670struct AAHeapToStackFunction final :
public AAHeapToStack {
6672 struct AllocationInfo {
6684 } Status = STACK_DUE_TO_USE;
6688 bool HasPotentiallyFreeingUnknownUses =
false;
6692 bool MoveAllocaIntoEntry =
true;
6695 SmallSetVector<CallBase *, 1> PotentialFreeCalls{};
6698 struct DeallocationInfo {
6706 bool MightFreeUnknownObjects =
false;
6709 SmallSetVector<CallBase *, 1> PotentialAllocationCalls{};
6712 AAHeapToStackFunction(
const IRPosition &IRP, Attributor &
A)
6713 : AAHeapToStack(IRP,
A) {}
6715 ~AAHeapToStackFunction()
override {
6718 for (
auto &It : AllocationInfos)
6719 It.second->~AllocationInfo();
6720 for (
auto &It : DeallocationInfos)
6721 It.second->~DeallocationInfo();
6725 AAHeapToStack::initialize(
A);
6728 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6735 DeallocationInfos[CB] =
new (
A.Allocator) DeallocationInfo{CB, FreedOp};
6742 auto *I8Ty = Type::getInt8Ty(CB->
getParent()->getContext());
6744 AllocationInfo *AI =
new (
A.Allocator) AllocationInfo{CB};
6745 AllocationInfos[CB] = AI;
6747 TLI->getLibFunc(*CB, AI->LibraryFunctionId);
6753 bool UsedAssumedInformation =
false;
6754 bool Success =
A.checkForAllCallLikeInstructions(
6755 AllocationIdentifierCB, *
this, UsedAssumedInformation,
6759 assert(
Success &&
"Did not expect the call base visit callback to fail!");
6762 [](
const IRPosition &,
const AbstractAttribute *,
6763 bool &) -> std::optional<Value *> {
return nullptr; };
6764 for (
const auto &It : AllocationInfos)
6767 for (
const auto &It : DeallocationInfos)
6772 const std::string getAsStr(Attributor *
A)
const override {
6773 unsigned NumH2SMallocs = 0, NumInvalidMallocs = 0;
6774 for (
const auto &It : AllocationInfos) {
6775 if (It.second->Status == AllocationInfo::INVALID)
6776 ++NumInvalidMallocs;
6780 return "[H2S] Mallocs Good/Bad: " + std::to_string(NumH2SMallocs) +
"/" +
6781 std::to_string(NumInvalidMallocs);
6785 void trackStatistics()
const override {
6787 MallocCalls, Function,
6788 "Number of malloc/calloc/aligned_alloc calls converted to allocas");
6789 for (
const auto &It : AllocationInfos)
6790 if (It.second->Status != AllocationInfo::INVALID)
6794 bool isAssumedHeapToStack(
const CallBase &CB)
const override {
6796 if (AllocationInfo *AI =
6797 AllocationInfos.lookup(
const_cast<CallBase *
>(&CB)))
6798 return AI->Status != AllocationInfo::INVALID;
6802 bool isAssumedHeapToStackRemovedFree(CallBase &CB)
const override {
6803 if (!isValidState())
6806 for (
const auto &It : AllocationInfos) {
6807 AllocationInfo &AI = *It.second;
6808 if (AI.Status == AllocationInfo::INVALID)
6811 if (AI.PotentialFreeCalls.count(&CB))
6819 assert(getState().isValidState() &&
6820 "Attempted to manifest an invalid state!");
6824 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6826 for (
auto &It : AllocationInfos) {
6827 AllocationInfo &AI = *It.second;
6828 if (AI.Status == AllocationInfo::INVALID)
6831 for (CallBase *FreeCall : AI.PotentialFreeCalls) {
6832 LLVM_DEBUG(
dbgs() <<
"H2S: Removing free call: " << *FreeCall <<
"\n");
6833 A.deleteAfterManifest(*FreeCall);
6834 HasChanged = ChangeStatus::CHANGED;
6837 LLVM_DEBUG(
dbgs() <<
"H2S: Removing malloc-like call: " << *AI.CB
6840 auto Remark = [&](OptimizationRemark
OR) {
6842 if (TLI->getLibFunc(*AI.CB, IsAllocShared))
6843 if (IsAllocShared == LibFunc___kmpc_alloc_shared)
6844 return OR <<
"Moving globalized variable to the stack.";
6845 return OR <<
"Moving memory allocation from the heap to the stack.";
6847 if (AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
6848 A.emitRemark<OptimizationRemark>(AI.CB,
"OMP110",
Remark);
6850 A.emitRemark<OptimizationRemark>(AI.CB,
"HeapToStack",
Remark);
6852 const DataLayout &
DL =
A.getInfoCache().getDL();
6854 std::optional<APInt> SizeAPI =
getSize(
A, *
this, AI);
6856 Size = ConstantInt::get(AI.CB->getContext(), *SizeAPI);
6858 LLVMContext &Ctx = AI.CB->getContext();
6859 ObjectSizeOpts Opts;
6860 ObjectSizeOffsetEvaluator Eval(
DL, TLI, Ctx, Opts);
6861 SizeOffsetValue SizeOffsetPair = Eval.compute(AI.CB);
6868 ?
F->getEntryBlock().begin()
6869 : AI.CB->getIterator();
6872 if (MaybeAlign RetAlign = AI.CB->getRetAlign())
6873 Alignment = std::max(Alignment, *RetAlign);
6875 std::optional<APInt> AlignmentAPI = getAPInt(
A, *
this, *Align);
6876 assert(AlignmentAPI && AlignmentAPI->getZExtValue() > 0 &&
6877 "Expected an alignment during manifest!");
6879 std::max(Alignment,
assumeAligned(AlignmentAPI->getZExtValue()));
6883 unsigned AS =
DL.getAllocaAddrSpace();
6885 new AllocaInst(Type::getInt8Ty(
F->getContext()), AS,
Size, Alignment,
6886 AI.CB->getName() +
".h2s", IP);
6888 if (Alloca->
getType() != AI.CB->getType())
6889 Alloca = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6890 Alloca, AI.CB->getType(),
"malloc_cast", AI.CB->getIterator());
6892 auto *I8Ty = Type::getInt8Ty(
F->getContext());
6895 "Must be able to materialize initial memory state of allocation");
6900 auto *NBB =
II->getNormalDest();
6902 A.deleteAfterManifest(*AI.CB);
6904 A.deleteAfterManifest(*AI.CB);
6913 Builder.CreateMemSet(Alloca, InitVal,
Size, std::nullopt);
6915 HasChanged = ChangeStatus::CHANGED;
6921 std::optional<APInt> getAPInt(Attributor &
A,
const AbstractAttribute &AA,
6923 bool UsedAssumedInformation =
false;
6924 std::optional<Constant *> SimpleV =
6925 A.getAssumedConstant(V, AA, UsedAssumedInformation);
6927 return APInt(64, 0);
6929 return CI->getValue();
6930 return std::nullopt;
6933 std::optional<APInt>
getSize(Attributor &
A,
const AbstractAttribute &AA,
6934 AllocationInfo &AI) {
6935 auto Mapper = [&](
const Value *
V) ->
const Value * {
6936 bool UsedAssumedInformation =
false;
6937 if (std::optional<Constant *> SimpleV =
6938 A.getAssumedConstant(*V, AA, UsedAssumedInformation))
6945 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6951 MapVector<CallBase *, AllocationInfo *> AllocationInfos;
6955 MapVector<CallBase *, DeallocationInfo *> DeallocationInfos;
6960ChangeStatus AAHeapToStackFunction::updateImpl(Attributor &
A) {
6963 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6965 const auto *LivenessAA =
6968 MustBeExecutedContextExplorer *Explorer =
6969 A.getInfoCache().getMustBeExecutedContextExplorer();
6971 bool StackIsAccessibleByOtherThreads =
6972 A.getInfoCache().stackIsAccessibleByOtherThreads();
6975 A.getInfoCache().getAnalysisResultForFunction<LoopAnalysis>(*F);
6976 std::optional<bool> MayContainIrreducibleControl;
6978 if (&
F->getEntryBlock() == &BB)
6980 if (!MayContainIrreducibleControl.has_value())
6982 if (*MayContainIrreducibleControl)
6991 bool HasUpdatedFrees =
false;
6993 auto UpdateFrees = [&]() {
6994 HasUpdatedFrees =
true;
6996 for (
auto &It : DeallocationInfos) {
6997 DeallocationInfo &DI = *It.second;
7000 if (DI.MightFreeUnknownObjects)
7004 bool UsedAssumedInformation =
false;
7005 if (
A.isAssumedDead(*DI.CB,
this, LivenessAA, UsedAssumedInformation,
7012 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown underlying object for free!\n");
7013 DI.MightFreeUnknownObjects =
true;
7026 DI.MightFreeUnknownObjects =
true;
7030 AllocationInfo *AI = AllocationInfos.lookup(ObjCB);
7032 LLVM_DEBUG(
dbgs() <<
"[H2S] Free of a non-allocation object: " << *Obj
7034 DI.MightFreeUnknownObjects =
true;
7038 DI.PotentialAllocationCalls.insert(ObjCB);
7042 auto FreeCheck = [&](AllocationInfo &AI) {
7046 if (!StackIsAccessibleByOtherThreads) {
7051 dbgs() <<
"[H2S] found an escaping use, stack is not accessible by "
7052 "other threads and function is not nosync:\n");
7056 if (!HasUpdatedFrees)
7060 if (AI.PotentialFreeCalls.size() != 1) {
7062 << AI.PotentialFreeCalls.size() <<
"\n");
7065 CallBase *UniqueFree = *AI.PotentialFreeCalls.begin();
7066 DeallocationInfo *DI = DeallocationInfos.lookup(UniqueFree);
7069 dbgs() <<
"[H2S] unique free call was not known as deallocation call "
7070 << *UniqueFree <<
"\n");
7073 if (DI->MightFreeUnknownObjects) {
7075 dbgs() <<
"[H2S] unique free call might free unknown allocations\n");
7078 if (DI->PotentialAllocationCalls.empty())
7080 if (DI->PotentialAllocationCalls.size() > 1) {
7082 << DI->PotentialAllocationCalls.size()
7083 <<
" different allocations\n");
7086 if (*DI->PotentialAllocationCalls.begin() != AI.CB) {
7089 <<
"[H2S] unique free call not known to free this allocation but "
7090 << **DI->PotentialAllocationCalls.begin() <<
"\n");
7095 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared) {
7097 if (!Explorer || !Explorer->findInContextOf(UniqueFree, CtxI)) {
7098 LLVM_DEBUG(
dbgs() <<
"[H2S] unique free call might not be executed "
7099 "with the allocation "
7100 << *UniqueFree <<
"\n");
7107 auto UsesCheck = [&](AllocationInfo &AI) {
7108 bool ValidUsesOnly =
true;
7110 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
7115 if (
SI->getValueOperand() ==
U.get()) {
7117 <<
"[H2S] escaping store to memory: " << *UserI <<
"\n");
7118 ValidUsesOnly =
false;
7127 if (DeallocationInfos.count(CB)) {
7128 AI.PotentialFreeCalls.insert(CB);
7135 bool IsKnownNoCapture;
7144 if (!IsAssumedNoCapture ||
7145 (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7146 !IsAssumedNoFree)) {
7147 AI.HasPotentiallyFreeingUnknownUses |= !IsAssumedNoFree;
7150 auto Remark = [&](OptimizationRemarkMissed ORM) {
7152 <<
"Could not move globalized variable to the stack. "
7153 "Variable is potentially captured in call. Mark "
7154 "parameter as `__attribute__((noescape))` to override.";
7157 if (ValidUsesOnly &&
7158 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
7159 A.emitRemark<OptimizationRemarkMissed>(CB,
"OMP113",
Remark);
7162 ValidUsesOnly =
false;
7175 ValidUsesOnly =
false;
7178 if (!
A.checkForAllUses(Pred, *
this, *AI.CB,
false,
7180 [&](
const Use &OldU,
const Use &NewU) {
7181 auto *SI = dyn_cast<StoreInst>(OldU.getUser());
7182 return !SI || StackIsAccessibleByOtherThreads ||
7183 AA::isAssumedThreadLocalObject(
7184 A, *SI->getPointerOperand(), *this);
7187 return ValidUsesOnly;
7192 for (
auto &It : AllocationInfos) {
7193 AllocationInfo &AI = *It.second;
7194 if (AI.Status == AllocationInfo::INVALID)
7198 std::optional<APInt> APAlign = getAPInt(
A, *
this, *Align);
7202 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown allocation alignment: " << *AI.CB
7204 AI.Status = AllocationInfo::INVALID;
7209 !APAlign->isPowerOf2()) {
7210 LLVM_DEBUG(
dbgs() <<
"[H2S] Invalid allocation alignment: " << APAlign
7212 AI.Status = AllocationInfo::INVALID;
7219 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7224 dbgs() <<
"[H2S] Unknown allocation size: " << *AI.CB <<
"\n";
7226 dbgs() <<
"[H2S] Allocation size too large: " << *AI.CB <<
" vs. "
7230 AI.Status = AllocationInfo::INVALID;
7236 switch (AI.Status) {
7237 case AllocationInfo::STACK_DUE_TO_USE:
7240 AI.Status = AllocationInfo::STACK_DUE_TO_FREE;
7242 case AllocationInfo::STACK_DUE_TO_FREE:
7245 AI.Status = AllocationInfo::INVALID;
7248 case AllocationInfo::INVALID:
7255 bool IsGlobalizedLocal =
7256 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared;
7257 if (AI.MoveAllocaIntoEntry &&
7258 (!
Size.has_value() ||
7259 (!IsGlobalizedLocal && IsInLoop(*AI.CB->getParent()))))
7260 AI.MoveAllocaIntoEntry =
false;
7269struct AAPrivatizablePtrImpl :
public AAPrivatizablePtr {
7270 AAPrivatizablePtrImpl(
const IRPosition &IRP, Attributor &
A)
7271 : AAPrivatizablePtr(IRP,
A), PrivatizableType(std::nullopt) {}
7274 AAPrivatizablePtr::indicatePessimisticFixpoint();
7275 PrivatizableType =
nullptr;
7276 return ChangeStatus::CHANGED;
7282 virtual std::optional<Type *> identifyPrivatizableType(Attributor &
A) = 0;
7286 std::optional<Type *> combineTypes(std::optional<Type *> T0,
7287 std::optional<Type *>
T1) {
7297 std::optional<Type *> getPrivatizableType()
const override {
7298 return PrivatizableType;
7301 const std::string getAsStr(Attributor *
A)
const override {
7302 return isAssumedPrivatizablePtr() ?
"[priv]" :
"[no-priv]";
7306 std::optional<Type *> PrivatizableType;
7311struct AAPrivatizablePtrArgument final :
public AAPrivatizablePtrImpl {
7312 AAPrivatizablePtrArgument(
const IRPosition &IRP, Attributor &
A)
7313 : AAPrivatizablePtrImpl(IRP,
A) {}
7316 std::optional<Type *> identifyPrivatizableType(Attributor &
A)
override {
7319 bool UsedAssumedInformation =
false;
7321 A.getAttrs(getIRPosition(), {Attribute::ByVal},
Attrs,
7323 if (!
Attrs.empty() &&
7324 A.checkForAllCallSites([](AbstractCallSite ACS) { return true; }, *
this,
7325 true, UsedAssumedInformation))
7326 return Attrs[0].getValueAsType();
7328 std::optional<Type *> Ty;
7329 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
7337 auto CallSiteCheck = [&](AbstractCallSite ACS) {
7346 A.getAAFor<AAPrivatizablePtr>(*
this, ACSArgPos, DepClassTy::REQUIRED);
7349 std::optional<Type *> CSTy = PrivCSArgAA->getPrivatizableType();
7352 dbgs() <<
"[AAPrivatizablePtr] ACSPos: " << ACSArgPos <<
", CSTy: ";
7356 dbgs() <<
"<nullptr>";
7361 Ty = combineTypes(Ty, CSTy);
7364 dbgs() <<
" : New Type: ";
7366 (*Ty)->print(
dbgs());
7368 dbgs() <<
"<nullptr>";
7377 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7378 UsedAssumedInformation))
7385 PrivatizableType = identifyPrivatizableType(
A);
7386 if (!PrivatizableType)
7387 return ChangeStatus::UNCHANGED;
7388 if (!*PrivatizableType)
7389 return indicatePessimisticFixpoint();
7394 DepClassTy::OPTIONAL);
7397 if (!
A.hasAttr(getIRPosition(), Attribute::ByVal) &&
7400 return indicatePessimisticFixpoint();
7406 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7410 Function &Fn = *getIRPosition().getAnchorScope();
7412 A.getInfoCache().getAnalysisResultForFunction<TargetIRAnalysis>(Fn);
7414 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Missing TTI for function "
7416 return indicatePessimisticFixpoint();
7419 auto CallSiteCheck = [&](AbstractCallSite ACS) {
7426 bool UsedAssumedInformation =
false;
7427 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7428 UsedAssumedInformation)) {
7430 dbgs() <<
"[AAPrivatizablePtr] ABI incompatibility detected for "
7432 return indicatePessimisticFixpoint();
7436 Argument *Arg = getAssociatedArgument();
7437 if (!
A.isValidFunctionSignatureRewrite(*Arg, ReplacementTypes)) {
7439 return indicatePessimisticFixpoint();
7446 auto IsCompatiblePrivArgOfCallback = [&](CallBase &CB) {
7449 for (
const Use *U : CallbackUses) {
7450 AbstractCallSite CBACS(U);
7451 assert(CBACS && CBACS.isCallbackCall());
7452 for (Argument &CBArg : CBACS.getCalledFunction()->args()) {
7453 int CBArgNo = CBACS.getCallArgOperandNo(CBArg);
7457 <<
"[AAPrivatizablePtr] Argument " << *Arg
7458 <<
"check if can be privatized in the context of its parent ("
7460 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7462 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7463 <<
")\n[AAPrivatizablePtr] " << CBArg <<
" : "
7464 << CBACS.getCallArgOperand(CBArg) <<
" vs "
7466 <<
"[AAPrivatizablePtr] " << CBArg <<
" : "
7467 << CBACS.getCallArgOperandNo(CBArg) <<
" vs " << ArgNo <<
"\n";
7470 if (CBArgNo !=
int(ArgNo))
7472 const auto *CBArgPrivAA =
A.getAAFor<AAPrivatizablePtr>(
7474 if (CBArgPrivAA && CBArgPrivAA->isValidState()) {
7475 auto CBArgPrivTy = CBArgPrivAA->getPrivatizableType();
7478 if (*CBArgPrivTy == PrivatizableType)
7483 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7484 <<
" cannot be privatized in the context of its parent ("
7486 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7488 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7489 <<
").\n[AAPrivatizablePtr] for which the argument "
7490 "privatization is not compatible.\n";
7500 auto IsCompatiblePrivArgOfDirectCS = [&](AbstractCallSite ACS) {
7504 "Expected a direct call operand for callback call operand");
7509 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7510 <<
" check if be privatized in the context of its parent ("
7512 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7514 << DCArgNo <<
"@" << DCCallee->
getName() <<
").\n";
7517 if (
unsigned(DCArgNo) < DCCallee->
arg_size()) {
7518 const auto *DCArgPrivAA =
A.getAAFor<AAPrivatizablePtr>(
7520 DepClassTy::REQUIRED);
7521 if (DCArgPrivAA && DCArgPrivAA->isValidState()) {
7522 auto DCArgPrivTy = DCArgPrivAA->getPrivatizableType();
7525 if (*DCArgPrivTy == PrivatizableType)
7531 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7532 <<
" cannot be privatized in the context of its parent ("
7534 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7537 <<
").\n[AAPrivatizablePtr] for which the argument "
7538 "privatization is not compatible.\n";
7546 auto IsCompatiblePrivArgOfOtherCallSite = [&](AbstractCallSite ACS) {
7550 return IsCompatiblePrivArgOfDirectCS(ACS);
7554 if (!
A.checkForAllCallSites(IsCompatiblePrivArgOfOtherCallSite, *
this,
true,
7555 UsedAssumedInformation))
7556 return indicatePessimisticFixpoint();
7558 return ChangeStatus::UNCHANGED;
7564 identifyReplacementTypes(
Type *PrivType,
7565 SmallVectorImpl<Type *> &ReplacementTypes) {
7568 assert(PrivType &&
"Expected privatizable type!");
7572 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++)
7573 ReplacementTypes.
push_back(PrivStructType->getElementType(u));
7575 ReplacementTypes.
append(PrivArrayType->getNumElements(),
7576 PrivArrayType->getElementType());
7585 static void createInitialization(
Type *PrivType,
Value &
Base, Function &
F,
7587 assert(PrivType &&
"Expected privatizable type!");
7590 const DataLayout &
DL =
F.getDataLayout();
7594 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7595 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7598 new StoreInst(
F.getArg(ArgNo + u),
Ptr, IP);
7601 Type *PointeeTy = PrivArrayType->getElementType();
7602 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7603 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7605 new StoreInst(
F.getArg(ArgNo + u),
Ptr, IP);
7608 new StoreInst(
F.getArg(ArgNo), &
Base, IP);
7614 void createReplacementValues(Align Alignment,
Type *PrivType,
7616 SmallVectorImpl<Value *> &ReplacementValues) {
7618 assert(PrivType &&
"Expected privatizable type!");
7626 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7627 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7628 Type *PointeeTy = PrivStructType->getElementType(u);
7632 L->setAlignment(Alignment);
7636 Type *PointeeTy = PrivArrayType->getElementType();
7637 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7638 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7641 L->setAlignment(Alignment);
7646 L->setAlignment(Alignment);
7653 if (!PrivatizableType)
7654 return ChangeStatus::UNCHANGED;
7655 assert(*PrivatizableType &&
"Expected privatizable type!");
7661 bool UsedAssumedInformation =
false;
7662 if (!
A.checkForAllInstructions(
7663 [&](Instruction &
I) {
7664 CallInst &CI = cast<CallInst>(I);
7665 if (CI.isTailCall())
7666 TailCalls.push_back(&CI);
7669 *
this, {Instruction::Call}, UsedAssumedInformation))
7670 return ChangeStatus::UNCHANGED;
7672 Argument *Arg = getAssociatedArgument();
7675 const auto *AlignAA =
7682 [=](
const Attributor::ArgumentReplacementInfo &ARI,
7684 BasicBlock &EntryBB = ReplacementFn.getEntryBlock();
7686 const DataLayout &
DL = IP->getDataLayout();
7687 unsigned AS =
DL.getAllocaAddrSpace();
7688 Instruction *AI =
new AllocaInst(*PrivatizableType, AS,
7689 Arg->
getName() +
".priv", IP);
7690 createInitialization(*PrivatizableType, *AI, ReplacementFn,
7691 ArgIt->getArgNo(), IP);
7694 AI = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
7698 for (CallInst *CI : TailCalls)
7699 CI->setTailCall(
false);
7706 [=](
const Attributor::ArgumentReplacementInfo &ARI,
7707 AbstractCallSite ACS, SmallVectorImpl<Value *> &NewArgOperands) {
7710 createReplacementValues(
7711 AlignAA ? AlignAA->getAssumedAlign() :
Align(0),
7712 *PrivatizableType, ACS,
7720 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7723 if (
A.registerFunctionSignatureRewrite(*Arg, ReplacementTypes,
7724 std::move(FnRepairCB),
7725 std::move(ACSRepairCB)))
7726 return ChangeStatus::CHANGED;
7727 return ChangeStatus::UNCHANGED;
7731 void trackStatistics()
const override {
7736struct AAPrivatizablePtrFloating :
public AAPrivatizablePtrImpl {
7737 AAPrivatizablePtrFloating(
const IRPosition &IRP, Attributor &
A)
7738 : AAPrivatizablePtrImpl(IRP,
A) {}
7743 indicatePessimisticFixpoint();
7748 "updateImpl will not be called");
7752 std::optional<Type *> identifyPrivatizableType(Attributor &
A)
override {
7755 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] No underlying object found!\n");
7762 return AI->getAllocatedType();
7764 auto *PrivArgAA =
A.getAAFor<AAPrivatizablePtr>(
7766 if (PrivArgAA && PrivArgAA->isAssumedPrivatizablePtr())
7767 return PrivArgAA->getPrivatizableType();
7770 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Underlying object neither valid "
7771 "alloca nor privatizable argument: "
7777 void trackStatistics()
const override {
7782struct AAPrivatizablePtrCallSiteArgument final
7783 :
public AAPrivatizablePtrFloating {
7784 AAPrivatizablePtrCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
7785 : AAPrivatizablePtrFloating(IRP,
A) {}
7789 if (
A.hasAttr(getIRPosition(), Attribute::ByVal))
7790 indicateOptimisticFixpoint();
7795 PrivatizableType = identifyPrivatizableType(
A);
7796 if (!PrivatizableType)
7797 return ChangeStatus::UNCHANGED;
7798 if (!*PrivatizableType)
7799 return indicatePessimisticFixpoint();
7801 const IRPosition &IRP = getIRPosition();
7802 bool IsKnownNoCapture;
7804 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoCapture);
7805 if (!IsAssumedNoCapture) {
7806 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might be captured!\n");
7807 return indicatePessimisticFixpoint();
7810 bool IsKnownNoAlias;
7812 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
7813 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might alias!\n");
7814 return indicatePessimisticFixpoint();
7819 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer is written!\n");
7820 return indicatePessimisticFixpoint();
7823 return ChangeStatus::UNCHANGED;
7827 void trackStatistics()
const override {
7832struct AAPrivatizablePtrCallSiteReturned final
7833 :
public AAPrivatizablePtrFloating {
7834 AAPrivatizablePtrCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
7835 : AAPrivatizablePtrFloating(IRP,
A) {}
7840 indicatePessimisticFixpoint();
7844 void trackStatistics()
const override {
7849struct AAPrivatizablePtrReturned final :
public AAPrivatizablePtrFloating {
7850 AAPrivatizablePtrReturned(
const IRPosition &IRP, Attributor &
A)
7851 : AAPrivatizablePtrFloating(IRP,
A) {}
7856 indicatePessimisticFixpoint();
7860 void trackStatistics()
const override {
7870struct AAMemoryBehaviorImpl :
public AAMemoryBehavior {
7871 AAMemoryBehaviorImpl(
const IRPosition &IRP, Attributor &
A)
7872 : AAMemoryBehavior(IRP,
A) {}
7876 intersectAssumedBits(BEST_STATE);
7877 getKnownStateFromValue(
A, getIRPosition(), getState());
7878 AAMemoryBehavior::initialize(
A);
7882 static void getKnownStateFromValue(Attributor &
A,
const IRPosition &IRP,
7883 BitIntegerState &State,
7884 bool IgnoreSubsumingPositions =
false) {
7886 A.getAttrs(IRP, AttrKinds, Attrs, IgnoreSubsumingPositions);
7888 switch (Attr.getKindAsEnum()) {
7889 case Attribute::ReadNone:
7892 case Attribute::ReadOnly:
7895 case Attribute::WriteOnly:
7904 if (!
I->mayReadFromMemory())
7906 if (!
I->mayWriteToMemory())
7912 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
7913 SmallVectorImpl<Attribute> &Attrs)
const override {
7916 Attrs.push_back(Attribute::get(Ctx, Attribute::ReadNone));
7918 Attrs.push_back(Attribute::get(Ctx, Attribute::ReadOnly));
7919 else if (isAssumedWriteOnly())
7920 Attrs.push_back(Attribute::get(Ctx, Attribute::WriteOnly));
7926 const IRPosition &IRP = getIRPosition();
7928 if (
A.hasAttr(IRP, Attribute::ReadNone,
7930 return ChangeStatus::UNCHANGED;
7939 return ChangeStatus::UNCHANGED;
7942 A.removeAttrs(IRP, AttrKinds);
7945 A.removeAttrs(IRP, Attribute::Writable);
7952 const std::string getAsStr(Attributor *
A)
const override {
7957 if (isAssumedWriteOnly())
7959 return "may-read/write";
7963 static const Attribute::AttrKind AttrKinds[3];
7967 Attribute::ReadNone, Attribute::ReadOnly, Attribute::WriteOnly};
7970struct AAMemoryBehaviorFloating : AAMemoryBehaviorImpl {
7971 AAMemoryBehaviorFloating(
const IRPosition &IRP, Attributor &
A)
7972 : AAMemoryBehaviorImpl(IRP,
A) {}
7978 void trackStatistics()
const override {
7983 else if (isAssumedWriteOnly())
7990 bool followUsersOfUseIn(Attributor &
A,
const Use &U,
7991 const Instruction *UserI);
7994 void analyzeUseIn(Attributor &
A,
const Use &U,
const Instruction *UserI);
7998struct AAMemoryBehaviorArgument : AAMemoryBehaviorFloating {
7999 AAMemoryBehaviorArgument(
const IRPosition &IRP, Attributor &
A)
8000 : AAMemoryBehaviorFloating(IRP,
A) {}
8004 intersectAssumedBits(BEST_STATE);
8005 const IRPosition &IRP = getIRPosition();
8009 bool HasByVal =
A.hasAttr(IRP, {Attribute::ByVal},
8011 getKnownStateFromValue(
A, IRP, getState(),
8018 return ChangeStatus::UNCHANGED;
8022 if (
A.hasAttr(getIRPosition(),
8023 {Attribute::InAlloca, Attribute::Preallocated})) {
8024 removeKnownBits(NO_WRITES);
8025 removeAssumedBits(NO_WRITES);
8027 A.removeAttrs(getIRPosition(), AttrKinds);
8028 return AAMemoryBehaviorFloating::manifest(
A);
8032 void trackStatistics()
const override {
8037 else if (isAssumedWriteOnly())
8042struct AAMemoryBehaviorCallSiteArgument final : AAMemoryBehaviorArgument {
8043 AAMemoryBehaviorCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
8044 : AAMemoryBehaviorArgument(IRP,
A) {}
8050 Argument *Arg = getAssociatedArgument();
8052 indicatePessimisticFixpoint();
8056 addKnownBits(NO_WRITES);
8057 removeKnownBits(NO_READS);
8058 removeAssumedBits(NO_READS);
8060 AAMemoryBehaviorArgument::initialize(
A);
8061 if (getAssociatedFunction()->isDeclaration())
8062 indicatePessimisticFixpoint();
8071 Argument *Arg = getAssociatedArgument();
8074 A.getAAFor<AAMemoryBehavior>(*
this, ArgPos, DepClassTy::REQUIRED);
8076 return indicatePessimisticFixpoint();
8081 void trackStatistics()
const override {
8086 else if (isAssumedWriteOnly())
8092struct AAMemoryBehaviorCallSiteReturned final : AAMemoryBehaviorFloating {
8093 AAMemoryBehaviorCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
8094 : AAMemoryBehaviorFloating(IRP,
A) {}
8098 AAMemoryBehaviorImpl::initialize(
A);
8103 return ChangeStatus::UNCHANGED;
8107 void trackStatistics()
const override {}
8111struct AAMemoryBehaviorFunction final :
public AAMemoryBehaviorImpl {
8112 AAMemoryBehaviorFunction(
const IRPosition &IRP, Attributor &
A)
8113 : AAMemoryBehaviorImpl(IRP,
A) {}
8129 else if (isAssumedWriteOnly())
8132 A.removeAttrs(getIRPosition(), AttrKinds);
8135 for (Argument &Arg :
F.args())
8137 return A.manifestAttrs(getIRPosition(),
8138 Attribute::getWithMemoryEffects(
F.getContext(), ME));
8142 void trackStatistics()
const override {
8147 else if (isAssumedWriteOnly())
8153struct AAMemoryBehaviorCallSite final
8154 : AACalleeToCallSite<AAMemoryBehavior, AAMemoryBehaviorImpl> {
8155 AAMemoryBehaviorCallSite(
const IRPosition &IRP, Attributor &
A)
8156 : AACalleeToCallSite<AAMemoryBehavior, AAMemoryBehaviorImpl>(IRP,
A) {}
8167 else if (isAssumedWriteOnly())
8170 A.removeAttrs(getIRPosition(), AttrKinds);
8173 for (Use &U : CB.
args())
8175 Attribute::Writable);
8176 return A.manifestAttrs(
8177 getIRPosition(), Attribute::getWithMemoryEffects(CB.
getContext(), ME));
8181 void trackStatistics()
const override {
8186 else if (isAssumedWriteOnly())
8191ChangeStatus AAMemoryBehaviorFunction::updateImpl(Attributor &
A) {
8194 auto AssumedState = getAssumed();
8201 const auto *MemBehaviorAA =
A.getAAFor<AAMemoryBehavior>(
8203 if (MemBehaviorAA) {
8204 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8205 return !isAtFixpoint();
8210 if (
I.mayReadFromMemory())
8211 removeAssumedBits(NO_READS);
8212 if (
I.mayWriteToMemory())
8213 removeAssumedBits(NO_WRITES);
8214 return !isAtFixpoint();
8217 bool UsedAssumedInformation =
false;
8218 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8219 UsedAssumedInformation))
8220 return indicatePessimisticFixpoint();
8226ChangeStatus AAMemoryBehaviorFloating::updateImpl(Attributor &
A) {
8228 const IRPosition &IRP = getIRPosition();
8239 const auto *FnMemAA =
8242 FnMemAssumedState = FnMemAA->getAssumed();
8243 S.addKnownBits(FnMemAA->getKnown());
8244 if ((S.getAssumed() & FnMemAA->getAssumed()) == S.getAssumed())
8250 auto AssumedState = S.getAssumed();
8256 bool IsKnownNoCapture;
8257 const AANoCapture *ArgNoCaptureAA =
nullptr;
8262 if (!IsAssumedNoCapture &&
8264 S.intersectAssumedBits(FnMemAssumedState);
8270 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
8272 LLVM_DEBUG(
dbgs() <<
"[AAMemoryBehavior] Use: " << *U <<
" in " << *UserI
8280 Follow = followUsersOfUseIn(
A, U, UserI);
8284 analyzeUseIn(
A, U, UserI);
8286 return !isAtFixpoint();
8289 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue()))
8290 return indicatePessimisticFixpoint();
8296bool AAMemoryBehaviorFloating::followUsersOfUseIn(Attributor &
A,
const Use &U,
8297 const Instruction *UserI) {
8315 if (
U.get()->getType()->isPointerTy()) {
8317 bool IsKnownNoCapture;
8326void AAMemoryBehaviorFloating::analyzeUseIn(Attributor &
A,
const Use &U,
8327 const Instruction *UserI) {
8334 case Instruction::Load:
8336 removeAssumedBits(NO_READS);
8339 case Instruction::Store:
8344 removeAssumedBits(NO_WRITES);
8346 indicatePessimisticFixpoint();
8349 case Instruction::Call:
8350 case Instruction::CallBr:
8351 case Instruction::Invoke: {
8358 indicatePessimisticFixpoint();
8365 removeAssumedBits(NO_READS);
8372 if (
U.get()->getType()->isPointerTy())
8376 const auto *MemBehaviorAA =
8382 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8390 removeAssumedBits(NO_READS);
8392 removeAssumedBits(NO_WRITES);
8404 return "all memory";
8407 std::string S =
"memory:";
8413 S +=
"internal global,";
8415 S +=
"external global,";
8419 S +=
"inaccessible,";
8433 AccessKind2Accesses.fill(
nullptr);
8436 ~AAMemoryLocationImpl()
override {
8439 for (AccessSet *AS : AccessKind2Accesses)
8446 intersectAssumedBits(BEST_STATE);
8447 getKnownStateFromValue(
A, getIRPosition(), getState());
8448 AAMemoryLocation::initialize(
A);
8452 static void getKnownStateFromValue(Attributor &
A,
const IRPosition &IRP,
8453 BitIntegerState &State,
8454 bool IgnoreSubsumingPositions =
false) {
8463 bool UseArgMemOnly =
true;
8465 if (AnchorFn &&
A.isRunOn(*AnchorFn))
8469 A.getAttrs(IRP, {Attribute::Memory},
Attrs, IgnoreSubsumingPositions);
8478 State.
addKnownBits(inverseLocation(NO_INACCESSIBLE_MEM,
true,
true));
8483 State.
addKnownBits(inverseLocation(NO_ARGUMENT_MEM,
true,
true));
8487 A.manifestAttrs(IRP,
8488 Attribute::getWithMemoryEffects(
8497 NO_INACCESSIBLE_MEM | NO_ARGUMENT_MEM,
true,
true));
8501 A.manifestAttrs(IRP,
8502 Attribute::getWithMemoryEffects(
8512 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
8513 SmallVectorImpl<Attribute> &Attrs)
const override {
8520 else if (isAssumedInaccessibleMemOnly())
8521 Attrs.push_back(Attribute::getWithMemoryEffects(
8523 else if (isAssumedArgMemOnly())
8526 else if (isAssumedInaccessibleOrArgMemOnly())
8527 Attrs.push_back(Attribute::getWithMemoryEffects(
8537 const IRPosition &IRP = getIRPosition();
8541 if (DeducedAttrs.
size() != 1)
8542 return ChangeStatus::UNCHANGED;
8545 return A.manifestAttrs(IRP, Attribute::getWithMemoryEffects(
8550 bool checkForAllAccessesToMemoryKind(
8552 MemoryLocationsKind)>
8554 MemoryLocationsKind RequestedMLK)
const override {
8555 if (!isValidState())
8558 MemoryLocationsKind AssumedMLK = getAssumedNotAccessedLocation();
8559 if (AssumedMLK == NO_LOCATIONS)
8563 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS;
8564 CurMLK *= 2, ++Idx) {
8565 if (CurMLK & RequestedMLK)
8568 if (
const AccessSet *
Accesses = AccessKind2Accesses[Idx])
8569 for (
const AccessInfo &AI : *
Accesses)
8570 if (!Pred(AI.I, AI.Ptr, AI.Kind, CurMLK))
8583 MemoryLocationsKind KnownMLK = getKnown();
8585 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2)
8586 if (!(CurMLK & KnownMLK))
8587 updateStateAndAccessesMap(getState(), CurMLK,
I,
nullptr,
Changed,
8588 getAccessKindFromInst(
I));
8589 return AAMemoryLocation::indicatePessimisticFixpoint();
8609 bool operator()(
const AccessInfo &
LHS,
const AccessInfo &
RHS)
const {
8613 return LHS.Ptr <
RHS.Ptr;
8614 if (
LHS.Kind !=
RHS.Kind)
8615 return LHS.Kind <
RHS.Kind;
8622 using AccessSet = SmallSet<AccessInfo, 2, AccessInfo>;
8623 std::array<AccessSet *, llvm::ConstantLog2<VALID_STATE>()>
8624 AccessKind2Accesses;
8629 categorizeArgumentPointerLocations(Attributor &
A, CallBase &CB,
8630 AAMemoryLocation::StateType &AccessedLocs,
8635 categorizeAccessedLocations(Attributor &
A, Instruction &
I,
bool &
Changed);
8638 AccessKind getAccessKindFromInst(
const Instruction *
I) {
8641 AK =
I->mayReadFromMemory() ? READ :
NONE;
8650 void updateStateAndAccessesMap(AAMemoryLocation::StateType &State,
8651 MemoryLocationsKind MLK,
const Instruction *
I,
8660 if (MLK == NO_UNKOWN_MEM)
8662 State.removeAssumedBits(MLK);
8667 void categorizePtrValue(Attributor &
A,
const Instruction &
I,
const Value &
Ptr,
8668 AAMemoryLocation::StateType &State,
bool &
Changed,
8669 unsigned AccessAS = 0);
8675void AAMemoryLocationImpl::categorizePtrValue(
8676 Attributor &
A,
const Instruction &
I,
const Value &
Ptr,
8678 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize pointer locations for "
8682 auto Pred = [&](
Value &Obj) {
8685 MemoryLocationsKind MLK = NO_LOCATIONS;
8704 MLK = NO_ARGUMENT_MEM;
8710 if (GVar->isConstant())
8713 if (GV->hasLocalLinkage())
8714 MLK = NO_GLOBAL_INTERNAL_MEM;
8716 MLK = NO_GLOBAL_EXTERNAL_MEM;
8724 bool IsKnownNoAlias;
8728 MLK = NO_MALLOCED_MEM;
8730 MLK = NO_UNKOWN_MEM;
8732 MLK = NO_UNKOWN_MEM;
8735 assert(MLK != NO_LOCATIONS &&
"No location specified!");
8736 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Ptr value can be categorized: "
8737 << Obj <<
" -> " << getMemoryLocationsAsStr(MLK) <<
"\n");
8739 getAccessKindFromInst(&
I));
8744 const auto *AA =
A.getAAFor<AAUnderlyingObjects>(
8748 dbgs() <<
"[AAMemoryLocation] Pointer locations not categorized\n");
8749 updateStateAndAccessesMap(
State, NO_UNKOWN_MEM, &
I,
nullptr,
Changed,
8750 getAccessKindFromInst(&
I));
8755 dbgs() <<
"[AAMemoryLocation] Accessed locations with pointer locations: "
8759void AAMemoryLocationImpl::categorizeArgumentPointerLocations(
8762 for (
unsigned ArgNo = 0,
E = CB.
arg_size(); ArgNo <
E; ++ArgNo) {
8771 const auto *ArgOpMemLocationAA =
8774 if (ArgOpMemLocationAA && ArgOpMemLocationAA->isAssumedReadNone())
8779 categorizePtrValue(
A, CB, *ArgOp, AccessedLocs,
Changed);
8784AAMemoryLocationImpl::categorizeAccessedLocations(Attributor &
A, Instruction &
I,
8786 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize accessed locations for "
8790 AccessedLocs.intersectAssumedBits(NO_LOCATIONS);
8795 const auto *CBMemLocationAA =
A.getAAFor<AAMemoryLocation>(
8798 <<
" [" << CBMemLocationAA <<
"]\n");
8799 if (!CBMemLocationAA) {
8800 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr,
8801 Changed, getAccessKindFromInst(&
I));
8802 return NO_UNKOWN_MEM;
8805 if (CBMemLocationAA->isAssumedReadNone())
8806 return NO_LOCATIONS;
8808 if (CBMemLocationAA->isAssumedInaccessibleMemOnly()) {
8809 updateStateAndAccessesMap(AccessedLocs, NO_INACCESSIBLE_MEM, &
I,
nullptr,
8810 Changed, getAccessKindFromInst(&
I));
8811 return AccessedLocs.getAssumed();
8814 uint32_t CBAssumedNotAccessedLocs =
8815 CBMemLocationAA->getAssumedNotAccessedLocation();
8818 uint32_t CBAssumedNotAccessedLocsNoArgMem =
8819 CBAssumedNotAccessedLocs | NO_ARGUMENT_MEM | NO_GLOBAL_MEM;
8821 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2) {
8822 if (CBAssumedNotAccessedLocsNoArgMem & CurMLK)
8824 updateStateAndAccessesMap(AccessedLocs, CurMLK, &
I,
nullptr,
Changed,
8825 getAccessKindFromInst(&
I));
8830 bool HasGlobalAccesses = ((~CBAssumedNotAccessedLocs) & NO_GLOBAL_MEM);
8831 if (HasGlobalAccesses) {
8834 updateStateAndAccessesMap(AccessedLocs, MLK, &
I,
Ptr,
Changed,
8835 getAccessKindFromInst(&
I));
8838 if (!CBMemLocationAA->checkForAllAccessesToMemoryKind(
8839 AccessPred, inverseLocation(NO_GLOBAL_MEM,
false,
false)))
8840 return AccessedLocs.getWorstState();
8844 dbgs() <<
"[AAMemoryLocation] Accessed state before argument handling: "
8845 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8848 bool HasArgAccesses = ((~CBAssumedNotAccessedLocs) & NO_ARGUMENT_MEM);
8850 categorizeArgumentPointerLocations(
A, *CB, AccessedLocs,
Changed);
8853 dbgs() <<
"[AAMemoryLocation] Accessed state after argument handling: "
8854 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8856 return AccessedLocs.getAssumed();
8861 dbgs() <<
"[AAMemoryLocation] Categorize memory access with pointer: "
8862 <<
I <<
" [" << *
Ptr <<
"]\n");
8864 Ptr->getType()->getPointerAddressSpace());
8865 return AccessedLocs.getAssumed();
8868 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Failed to categorize instruction: "
8870 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr,
Changed,
8871 getAccessKindFromInst(&
I));
8872 return AccessedLocs.getAssumed();
8876struct AAMemoryLocationFunction final :
public AAMemoryLocationImpl {
8877 AAMemoryLocationFunction(
const IRPosition &IRP, Attributor &
A)
8878 : AAMemoryLocationImpl(IRP,
A) {}
8883 const auto *MemBehaviorAA =
8884 A.getAAFor<AAMemoryBehavior>(*
this, getIRPosition(), DepClassTy::NONE);
8887 return indicateOptimisticFixpoint();
8889 "AAMemoryLocation was not read-none but AAMemoryBehavior was!");
8890 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
8891 return ChangeStatus::UNCHANGED;
8895 auto AssumedState = getAssumed();
8899 MemoryLocationsKind MLK = categorizeAccessedLocations(
A,
I,
Changed);
8900 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Accessed locations for " <<
I
8901 <<
": " << getMemoryLocationsAsStr(MLK) <<
"\n");
8902 removeAssumedBits(inverseLocation(MLK,
false,
false));
8905 return getAssumedNotAccessedLocation() != VALID_STATE;
8908 bool UsedAssumedInformation =
false;
8909 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8910 UsedAssumedInformation))
8911 return indicatePessimisticFixpoint();
8913 Changed |= AssumedState != getAssumed();
8914 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8918 void trackStatistics()
const override {
8921 else if (isAssumedArgMemOnly())
8923 else if (isAssumedInaccessibleMemOnly())
8925 else if (isAssumedInaccessibleOrArgMemOnly())
8931struct AAMemoryLocationCallSite final : AAMemoryLocationImpl {
8932 AAMemoryLocationCallSite(
const IRPosition &IRP, Attributor &
A)
8933 : AAMemoryLocationImpl(IRP,
A) {}
8944 A.getAAFor<AAMemoryLocation>(*
this, FnPos, DepClassTy::REQUIRED);
8946 return indicatePessimisticFixpoint();
8950 updateStateAndAccessesMap(getState(), MLK,
I,
Ptr,
Changed,
8951 getAccessKindFromInst(
I));
8954 if (!FnAA->checkForAllAccessesToMemoryKind(AccessPred, ALL_LOCATIONS))
8955 return indicatePessimisticFixpoint();
8956 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8960 void trackStatistics()
const override {
8970struct AADenormalFPMathImpl :
public AADenormalFPMath {
8971 AADenormalFPMathImpl(
const IRPosition &IRP, Attributor &
A)
8972 : AADenormalFPMath(IRP,
A) {}
8974 const std::string getAsStr(Attributor *
A)
const override {
8975 std::string Str(
"AADenormalFPMath[");
8976 raw_string_ostream OS(Str);
8978 DenormalState Known = getKnown();
8979 if (Known.Mode.isValid())
8980 OS <<
"denormal-fp-math=" << Known.Mode;
8984 if (Known.ModeF32.isValid())
8985 OS <<
" denormal-fp-math-f32=" << Known.ModeF32;
8991struct AADenormalFPMathFunction final : AADenormalFPMathImpl {
8992 AADenormalFPMathFunction(
const IRPosition &IRP, Attributor &
A)
8993 : AADenormalFPMathImpl(IRP,
A) {}
8997 DenormalMode
Mode =
F->getDenormalModeRaw();
8998 DenormalMode ModeF32 =
F->getDenormalModeF32Raw();
9005 Known = DenormalState{
Mode, ModeF32};
9013 auto CheckCallSite = [=, &Change, &
A](AbstractCallSite CS) {
9016 <<
"->" << getAssociatedFunction()->
getName() <<
'\n');
9018 const auto *CallerInfo =
A.getAAFor<AADenormalFPMath>(
9024 CallerInfo->getState());
9028 bool AllCallSitesKnown =
true;
9029 if (!
A.checkForAllCallSites(CheckCallSite, *
this,
true, AllCallSitesKnown))
9030 return indicatePessimisticFixpoint();
9032 if (Change == ChangeStatus::CHANGED && isModeFixed())
9038 LLVMContext &Ctx = getAssociatedFunction()->getContext();
9043 AttrToRemove.
push_back(
"denormal-fp-math");
9046 Attribute::get(Ctx,
"denormal-fp-math", Known.Mode.str()));
9049 if (Known.ModeF32 != Known.Mode) {
9051 Attribute::get(Ctx,
"denormal-fp-math-f32", Known.ModeF32.str()));
9053 AttrToRemove.
push_back(
"denormal-fp-math-f32");
9056 auto &IRP = getIRPosition();
9059 return A.removeAttrs(IRP, AttrToRemove) |
9060 A.manifestAttrs(IRP, AttrToAdd,
true);
9063 void trackStatistics()
const override {
9072struct AAValueConstantRangeImpl : AAValueConstantRange {
9073 using StateType = IntegerRangeState;
9074 AAValueConstantRangeImpl(
const IRPosition &IRP, Attributor &
A)
9075 : AAValueConstantRange(IRP,
A) {}
9079 if (
A.hasSimplificationCallback(getIRPosition())) {
9080 indicatePessimisticFixpoint();
9085 intersectKnown(getConstantRangeFromSCEV(
A, getCtxI()));
9088 intersectKnown(getConstantRangeFromLVI(
A, getCtxI()));
9092 const std::string getAsStr(Attributor *
A)
const override {
9094 llvm::raw_string_ostream OS(Str);
9096 getKnown().print(OS);
9098 getAssumed().print(OS);
9105 const SCEV *getSCEV(Attributor &
A,
const Instruction *
I =
nullptr)
const {
9106 if (!getAnchorScope())
9109 ScalarEvolution *SE =
9110 A.getInfoCache().getAnalysisResultForFunction<ScalarEvolutionAnalysis>(
9113 LoopInfo *LI =
A.getInfoCache().getAnalysisResultForFunction<LoopAnalysis>(
9119 const SCEV *S = SE->
getSCEV(&getAssociatedValue());
9128 ConstantRange getConstantRangeFromSCEV(Attributor &
A,
9129 const Instruction *
I =
nullptr)
const {
9130 if (!getAnchorScope())
9133 ScalarEvolution *SE =
9134 A.getInfoCache().getAnalysisResultForFunction<ScalarEvolutionAnalysis>(
9137 const SCEV *S = getSCEV(
A,
I);
9147 getConstantRangeFromLVI(Attributor &
A,
9148 const Instruction *CtxI =
nullptr)
const {
9149 if (!getAnchorScope())
9152 LazyValueInfo *LVI =
9153 A.getInfoCache().getAnalysisResultForFunction<LazyValueAnalysis>(
9168 bool isValidCtxInstructionForOutsideAnalysis(Attributor &
A,
9169 const Instruction *CtxI,
9170 bool AllowAACtxI)
const {
9171 if (!CtxI || (!AllowAACtxI && CtxI == getCtxI()))
9183 InformationCache &InfoCache =
A.getInfoCache();
9184 const DominatorTree *DT =
9195 getKnownConstantRange(Attributor &
A,
9196 const Instruction *CtxI =
nullptr)
const override {
9197 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9201 ConstantRange LVIR = getConstantRangeFromLVI(
A, CtxI);
9202 ConstantRange SCEVR = getConstantRangeFromSCEV(
A, CtxI);
9203 return getKnown().intersectWith(SCEVR).intersectWith(LVIR);
9208 getAssumedConstantRange(Attributor &
A,
9209 const Instruction *CtxI =
nullptr)
const override {
9214 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9216 return getAssumed();
9218 ConstantRange LVIR = getConstantRangeFromLVI(
A, CtxI);
9219 ConstantRange SCEVR = getConstantRangeFromSCEV(
A, CtxI);
9220 return getAssumed().intersectWith(SCEVR).intersectWith(LVIR);
9225 getMDNodeForConstantRange(
Type *Ty, LLVMContext &Ctx,
9226 const ConstantRange &AssumedConstantRange) {
9228 Ty, AssumedConstantRange.
getLower())),
9230 Ty, AssumedConstantRange.
getUpper()))};
9235 static bool isBetterRange(
const ConstantRange &Assumed,
9236 const Instruction &
I) {
9240 std::optional<ConstantRange> Known;
9244 }
else if (MDNode *KnownRanges =
I.getMetadata(LLVMContext::MD_range)) {
9250 if (KnownRanges->getNumOperands() > 2)
9253 ConstantInt *
Lower =
9255 ConstantInt *
Upper =
9258 Known.emplace(
Lower->getValue(),
Upper->getValue());
9260 return !Known || (*Known != Assumed && Known->contains(Assumed));
9265 setRangeMetadataIfisBetterRange(Instruction *
I,
9266 const ConstantRange &AssumedConstantRange) {
9267 if (isBetterRange(AssumedConstantRange, *
I)) {
9268 I->setMetadata(LLVMContext::MD_range,
9269 getMDNodeForConstantRange(
I->getType(),
I->getContext(),
9270 AssumedConstantRange));
9277 setRangeRetAttrIfisBetterRange(Attributor &
A,
const IRPosition &IRP,
9279 const ConstantRange &AssumedConstantRange) {
9280 if (isBetterRange(AssumedConstantRange, *
I)) {
9281 A.manifestAttrs(IRP,
9282 Attribute::get(
I->getContext(), Attribute::Range,
9283 AssumedConstantRange),
9293 ConstantRange AssumedConstantRange = getAssumedConstantRange(
A);
9296 auto &
V = getAssociatedValue();
9300 assert(
I == getCtxI() &&
"Should not annotate an instruction which is "
9301 "not the context instruction");
9303 if (setRangeMetadataIfisBetterRange(
I, AssumedConstantRange))
9304 Changed = ChangeStatus::CHANGED;
9306 if (setRangeRetAttrIfisBetterRange(
A, getIRPosition(),
I,
9307 AssumedConstantRange))
9308 Changed = ChangeStatus::CHANGED;
9316struct AAValueConstantRangeArgument final
9317 : AAArgumentFromCallSiteArguments<
9318 AAValueConstantRange, AAValueConstantRangeImpl, IntegerRangeState,
9320 using Base = AAArgumentFromCallSiteArguments<
9321 AAValueConstantRange, AAValueConstantRangeImpl, IntegerRangeState,
9323 AAValueConstantRangeArgument(
const IRPosition &IRP, Attributor &
A)
9327 void trackStatistics()
const override {
9332struct AAValueConstantRangeReturned
9333 : AAReturnedFromReturnedValues<AAValueConstantRange,
9334 AAValueConstantRangeImpl,
9335 AAValueConstantRangeImpl::StateType,
9338 AAReturnedFromReturnedValues<AAValueConstantRange,
9339 AAValueConstantRangeImpl,
9340 AAValueConstantRangeImpl::StateType,
9342 AAValueConstantRangeReturned(
const IRPosition &IRP, Attributor &
A)
9347 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9348 indicatePessimisticFixpoint();
9352 void trackStatistics()
const override {
9357struct AAValueConstantRangeFloating : AAValueConstantRangeImpl {
9358 AAValueConstantRangeFloating(
const IRPosition &IRP, Attributor &
A)
9359 : AAValueConstantRangeImpl(IRP,
A) {}
9363 AAValueConstantRangeImpl::initialize(
A);
9367 Value &
V = getAssociatedValue();
9370 unionAssumed(ConstantRange(
C->getValue()));
9371 indicateOptimisticFixpoint();
9377 unionAssumed(ConstantRange(APInt(
getBitWidth(), 0)));
9378 indicateOptimisticFixpoint();
9390 if (
auto *RangeMD = LI->getMetadata(LLVMContext::MD_range)) {
9401 indicatePessimisticFixpoint();
9404 << getAssociatedValue() <<
"\n");
9407 bool calculateBinaryOperator(
9408 Attributor &
A, BinaryOperator *BinOp, IntegerRangeState &
T,
9409 const Instruction *CtxI,
9410 SmallVectorImpl<const AAValueConstantRange *> &QuerriedAAs) {
9415 bool UsedAssumedInformation =
false;
9416 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9419 if (!SimplifiedLHS.has_value())
9421 if (!*SimplifiedLHS)
9423 LHS = *SimplifiedLHS;
9425 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9428 if (!SimplifiedRHS.has_value())
9430 if (!*SimplifiedRHS)
9432 RHS = *SimplifiedRHS;
9438 auto *LHSAA =
A.getAAFor<AAValueConstantRange>(
9440 DepClassTy::REQUIRED);
9444 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9446 auto *RHSAA =
A.getAAFor<AAValueConstantRange>(
9448 DepClassTy::REQUIRED);
9452 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9454 auto AssumedRange = LHSAARange.binaryOp(BinOp->
getOpcode(), RHSAARange);
9456 T.unionAssumed(AssumedRange);
9460 return T.isValidState();
9463 bool calculateCastInst(
9464 Attributor &
A, CastInst *CastI, IntegerRangeState &
T,
9465 const Instruction *CtxI,
9466 SmallVectorImpl<const AAValueConstantRange *> &QuerriedAAs) {
9472 bool UsedAssumedInformation =
false;
9473 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9476 if (!SimplifiedOpV.has_value())
9478 if (!*SimplifiedOpV)
9480 OpV = *SimplifiedOpV;
9485 auto *OpAA =
A.getAAFor<AAValueConstantRange>(
9487 DepClassTy::REQUIRED);
9491 T.unionAssumed(OpAA->getAssumed().castOp(CastI->
getOpcode(),
9493 return T.isValidState();
9497 calculateCmpInst(Attributor &
A, CmpInst *CmpI, IntegerRangeState &
T,
9498 const Instruction *CtxI,
9499 SmallVectorImpl<const AAValueConstantRange *> &QuerriedAAs) {
9504 bool UsedAssumedInformation =
false;
9505 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9508 if (!SimplifiedLHS.has_value())
9510 if (!*SimplifiedLHS)
9512 LHS = *SimplifiedLHS;
9514 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9517 if (!SimplifiedRHS.has_value())
9519 if (!*SimplifiedRHS)
9521 RHS = *SimplifiedRHS;
9527 auto *LHSAA =
A.getAAFor<AAValueConstantRange>(
9529 DepClassTy::REQUIRED);
9533 auto *RHSAA =
A.getAAFor<AAValueConstantRange>(
9535 DepClassTy::REQUIRED);
9539 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9540 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9543 if (LHSAARange.isEmptySet() || RHSAARange.isEmptySet())
9546 bool MustTrue =
false, MustFalse =
false;
9548 auto AllowedRegion =
9551 if (AllowedRegion.intersectWith(LHSAARange).isEmptySet())
9557 assert((!MustTrue || !MustFalse) &&
9558 "Either MustTrue or MustFalse should be false!");
9561 T.unionAssumed(ConstantRange(APInt( 1, 1)));
9563 T.unionAssumed(ConstantRange(APInt( 1, 0)));
9565 T.unionAssumed(ConstantRange( 1,
true));
9567 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] " << *CmpI <<
" after "
9568 << (MustTrue ?
"true" : (MustFalse ?
"false" :
"unknown"))
9569 <<
": " <<
T <<
"\n\t" << *LHSAA <<
"\t<op>\n\t"
9573 return T.isValidState();
9585 bool UsedAssumedInformation =
false;
9586 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9589 if (!SimplifiedOpV.has_value())
9591 if (!*SimplifiedOpV)
9593 Value *VPtr = *SimplifiedOpV;
9596 const auto *AA =
A.getAAFor<AAValueConstantRange>(
9598 DepClassTy::REQUIRED);
9602 T.unionAssumed(AA->getAssumedConstantRange(
A, CtxI));
9606 return T.isValidState();
9611 if (!calculateBinaryOperator(
A, BinOp,
T, CtxI, QuerriedAAs))
9614 if (!calculateCmpInst(
A, CmpI,
T, CtxI, QuerriedAAs))
9617 if (!calculateCastInst(
A, CastI,
T, CtxI, QuerriedAAs))
9623 T.indicatePessimisticFixpoint();
9630 for (
const AAValueConstantRange *QueriedAA : QuerriedAAs) {
9631 if (QueriedAA !=
this)
9634 if (
T.getAssumed() == getState().getAssumed())
9636 T.indicatePessimisticFixpoint();
9639 return T.isValidState();
9642 if (!VisitValueCB(getAssociatedValue(), getCtxI()))
9643 return indicatePessimisticFixpoint();
9648 return ChangeStatus::UNCHANGED;
9649 if (++NumChanges > MaxNumChanges) {
9650 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] performed " << NumChanges
9651 <<
" but only " << MaxNumChanges
9652 <<
" are allowed to avoid cyclic reasoning.");
9653 return indicatePessimisticFixpoint();
9655 return ChangeStatus::CHANGED;
9659 void trackStatistics()
const override {
9668 static constexpr int MaxNumChanges = 5;
9671struct AAValueConstantRangeFunction : AAValueConstantRangeImpl {
9672 AAValueConstantRangeFunction(
const IRPosition &IRP, Attributor &
A)
9673 : AAValueConstantRangeImpl(IRP,
A) {}
9677 llvm_unreachable(
"AAValueConstantRange(Function|CallSite)::updateImpl will "
9685struct AAValueConstantRangeCallSite : AAValueConstantRangeFunction {
9686 AAValueConstantRangeCallSite(
const IRPosition &IRP, Attributor &
A)
9687 : AAValueConstantRangeFunction(IRP,
A) {}
9693struct AAValueConstantRangeCallSiteReturned
9694 : AACalleeToCallSite<AAValueConstantRange, AAValueConstantRangeImpl,
9695 AAValueConstantRangeImpl::StateType,
9697 AAValueConstantRangeCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
9698 : AACalleeToCallSite<AAValueConstantRange, AAValueConstantRangeImpl,
9699 AAValueConstantRangeImpl::StateType,
9706 if (std::optional<ConstantRange>
Range = CI->getRange())
9707 intersectKnown(*
Range);
9710 AAValueConstantRangeImpl::initialize(
A);
9714 void trackStatistics()
const override {
9718struct AAValueConstantRangeCallSiteArgument : AAValueConstantRangeFloating {
9719 AAValueConstantRangeCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
9720 : AAValueConstantRangeFloating(IRP,
A) {}
9724 return ChangeStatus::UNCHANGED;
9728 void trackStatistics()
const override {
9737struct AAPotentialConstantValuesImpl : AAPotentialConstantValues {
9740 AAPotentialConstantValuesImpl(
const IRPosition &IRP, Attributor &
A)
9741 : AAPotentialConstantValues(IRP,
A) {}
9745 if (
A.hasSimplificationCallback(getIRPosition()))
9746 indicatePessimisticFixpoint();
9748 AAPotentialConstantValues::initialize(
A);
9751 bool fillSetWithConstantValues(Attributor &
A,
const IRPosition &IRP, SetTy &S,
9752 bool &ContainsUndef,
bool ForSelf) {
9754 bool UsedAssumedInformation =
false;
9756 UsedAssumedInformation)) {
9763 auto *PotentialValuesAA =
A.getAAFor<AAPotentialConstantValues>(
9764 *
this, IRP, DepClassTy::REQUIRED);
9765 if (!PotentialValuesAA || !PotentialValuesAA->getState().isValidState())
9767 ContainsUndef = PotentialValuesAA->getState().undefIsContained();
9768 S = PotentialValuesAA->getState().getAssumedSet();
9775 ContainsUndef =
false;
9776 for (
auto &It : Values) {
9778 ContainsUndef =
true;
9784 S.insert(CI->getValue());
9786 ContainsUndef &= S.empty();
9792 const std::string getAsStr(Attributor *
A)
const override {
9794 llvm::raw_string_ostream OS(Str);
9801 return indicatePessimisticFixpoint();
9805struct AAPotentialConstantValuesArgument final
9806 : AAArgumentFromCallSiteArguments<AAPotentialConstantValues,
9807 AAPotentialConstantValuesImpl,
9808 PotentialConstantIntValuesState> {
9809 using Base = AAArgumentFromCallSiteArguments<AAPotentialConstantValues,
9810 AAPotentialConstantValuesImpl,
9812 AAPotentialConstantValuesArgument(
const IRPosition &IRP, Attributor &
A)
9816 void trackStatistics()
const override {
9821struct AAPotentialConstantValuesReturned
9822 : AAReturnedFromReturnedValues<AAPotentialConstantValues,
9823 AAPotentialConstantValuesImpl> {
9824 using Base = AAReturnedFromReturnedValues<AAPotentialConstantValues,
9825 AAPotentialConstantValuesImpl>;
9826 AAPotentialConstantValuesReturned(
const IRPosition &IRP, Attributor &
A)
9830 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9831 indicatePessimisticFixpoint();
9832 Base::initialize(
A);
9836 void trackStatistics()
const override {
9841struct AAPotentialConstantValuesFloating : AAPotentialConstantValuesImpl {
9842 AAPotentialConstantValuesFloating(
const IRPosition &IRP, Attributor &
A)
9843 : AAPotentialConstantValuesImpl(IRP,
A) {}
9847 AAPotentialConstantValuesImpl::initialize(
A);
9851 Value &
V = getAssociatedValue();
9854 unionAssumed(
C->getValue());
9855 indicateOptimisticFixpoint();
9860 unionAssumedWithUndef();
9861 indicateOptimisticFixpoint();
9871 indicatePessimisticFixpoint();
9874 << getAssociatedValue() <<
"\n");
9877 static bool calculateICmpInst(
const ICmpInst *ICI,
const APInt &
LHS,
9882 static APInt calculateCastInst(
const CastInst *CI,
const APInt &Src,
9883 uint32_t ResultBitWidth) {
9888 case Instruction::Trunc:
9889 return Src.trunc(ResultBitWidth);
9890 case Instruction::SExt:
9891 return Src.sext(ResultBitWidth);
9892 case Instruction::ZExt:
9893 return Src.zext(ResultBitWidth);
9894 case Instruction::BitCast:
9899 static APInt calculateBinaryOperator(
const BinaryOperator *BinOp,
9900 const APInt &
LHS,
const APInt &
RHS,
9901 bool &SkipOperation,
bool &Unsupported) {
9908 switch (BinOpcode) {
9912 case Instruction::Add:
9914 case Instruction::Sub:
9916 case Instruction::Mul:
9918 case Instruction::UDiv:
9920 SkipOperation =
true;
9924 case Instruction::SDiv:
9926 SkipOperation =
true;
9930 case Instruction::URem:
9932 SkipOperation =
true;
9936 case Instruction::SRem:
9938 SkipOperation =
true;
9942 case Instruction::Shl:
9944 case Instruction::LShr:
9946 case Instruction::AShr:
9948 case Instruction::And:
9950 case Instruction::Or:
9952 case Instruction::Xor:
9957 bool calculateBinaryOperatorAndTakeUnion(
const BinaryOperator *BinOp,
9958 const APInt &
LHS,
const APInt &
RHS) {
9959 bool SkipOperation =
false;
9962 calculateBinaryOperator(BinOp,
LHS,
RHS, SkipOperation, Unsupported);
9967 unionAssumed(Result);
9968 return isValidState();
9971 ChangeStatus updateWithICmpInst(Attributor &
A, ICmpInst *ICI) {
9972 auto AssumedBefore = getAssumed();
9976 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9977 SetTy LHSAAPVS, RHSAAPVS;
9979 LHSContainsUndef,
false) ||
9981 RHSContainsUndef,
false))
9982 return indicatePessimisticFixpoint();
9985 bool MaybeTrue =
false, MaybeFalse =
false;
9987 if (LHSContainsUndef && RHSContainsUndef) {
9990 unionAssumedWithUndef();
9991 }
else if (LHSContainsUndef) {
9992 for (
const APInt &R : RHSAAPVS) {
9993 bool CmpResult = calculateICmpInst(ICI, Zero, R);
9994 MaybeTrue |= CmpResult;
9995 MaybeFalse |= !CmpResult;
9996 if (MaybeTrue & MaybeFalse)
9997 return indicatePessimisticFixpoint();
9999 }
else if (RHSContainsUndef) {
10000 for (
const APInt &L : LHSAAPVS) {
10001 bool CmpResult = calculateICmpInst(ICI, L, Zero);
10002 MaybeTrue |= CmpResult;
10003 MaybeFalse |= !CmpResult;
10004 if (MaybeTrue & MaybeFalse)
10005 return indicatePessimisticFixpoint();
10008 for (
const APInt &L : LHSAAPVS) {
10009 for (
const APInt &R : RHSAAPVS) {
10010 bool CmpResult = calculateICmpInst(ICI, L, R);
10011 MaybeTrue |= CmpResult;
10012 MaybeFalse |= !CmpResult;
10013 if (MaybeTrue & MaybeFalse)
10014 return indicatePessimisticFixpoint();
10019 unionAssumed(APInt( 1, 1));
10021 unionAssumed(APInt( 1, 0));
10022 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10023 : ChangeStatus::CHANGED;
10026 ChangeStatus updateWithSelectInst(Attributor &
A, SelectInst *SI) {
10027 auto AssumedBefore = getAssumed();
10031 bool UsedAssumedInformation =
false;
10032 std::optional<Constant *>
C =
A.getAssumedConstant(
10033 *
SI->getCondition(), *
this, UsedAssumedInformation);
10036 bool OnlyLeft =
false, OnlyRight =
false;
10037 if (
C && *
C && (*C)->isOneValue())
10039 else if (
C && *
C && (*C)->isZeroValue())
10042 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
10043 SetTy LHSAAPVS, RHSAAPVS;
10046 LHSContainsUndef,
false))
10047 return indicatePessimisticFixpoint();
10051 RHSContainsUndef,
false))
10052 return indicatePessimisticFixpoint();
10054 if (OnlyLeft || OnlyRight) {
10056 auto *OpAA = OnlyLeft ? &LHSAAPVS : &RHSAAPVS;
10057 auto Undef = OnlyLeft ? LHSContainsUndef : RHSContainsUndef;
10060 unionAssumedWithUndef();
10062 for (
const auto &It : *OpAA)
10066 }
else if (LHSContainsUndef && RHSContainsUndef) {
10068 unionAssumedWithUndef();
10070 for (
const auto &It : LHSAAPVS)
10072 for (
const auto &It : RHSAAPVS)
10075 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10076 : ChangeStatus::CHANGED;
10079 ChangeStatus updateWithCastInst(Attributor &
A, CastInst *CI) {
10080 auto AssumedBefore = getAssumed();
10082 return indicatePessimisticFixpoint();
10087 bool SrcContainsUndef =
false;
10090 SrcContainsUndef,
false))
10091 return indicatePessimisticFixpoint();
10093 if (SrcContainsUndef)
10094 unionAssumedWithUndef();
10096 for (
const APInt &S : SrcPVS) {
10097 APInt
T = calculateCastInst(CI, S, ResultBitWidth);
10101 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10102 : ChangeStatus::CHANGED;
10105 ChangeStatus updateWithBinaryOperator(Attributor &
A, BinaryOperator *BinOp) {
10106 auto AssumedBefore = getAssumed();
10110 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
10111 SetTy LHSAAPVS, RHSAAPVS;
10113 LHSContainsUndef,
false) ||
10115 RHSContainsUndef,
false))
10116 return indicatePessimisticFixpoint();
10121 if (LHSContainsUndef && RHSContainsUndef) {
10122 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, Zero))
10123 return indicatePessimisticFixpoint();
10124 }
else if (LHSContainsUndef) {
10125 for (
const APInt &R : RHSAAPVS) {
10126 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, R))
10127 return indicatePessimisticFixpoint();
10129 }
else if (RHSContainsUndef) {
10130 for (
const APInt &L : LHSAAPVS) {
10131 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, Zero))
10132 return indicatePessimisticFixpoint();
10135 for (
const APInt &L : LHSAAPVS) {
10136 for (
const APInt &R : RHSAAPVS) {
10137 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, R))
10138 return indicatePessimisticFixpoint();
10142 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10143 : ChangeStatus::CHANGED;
10146 ChangeStatus updateWithInstruction(Attributor &
A, Instruction *Inst) {
10147 auto AssumedBefore = getAssumed();
10149 bool ContainsUndef;
10151 ContainsUndef,
true))
10152 return indicatePessimisticFixpoint();
10153 if (ContainsUndef) {
10154 unionAssumedWithUndef();
10156 for (
const auto &It : Incoming)
10159 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10160 : ChangeStatus::CHANGED;
10165 Value &
V = getAssociatedValue();
10169 return updateWithICmpInst(
A, ICI);
10172 return updateWithSelectInst(
A, SI);
10175 return updateWithCastInst(
A, CI);
10178 return updateWithBinaryOperator(
A, BinOp);
10181 return updateWithInstruction(
A,
I);
10183 return indicatePessimisticFixpoint();
10187 void trackStatistics()
const override {
10192struct AAPotentialConstantValuesFunction : AAPotentialConstantValuesImpl {
10193 AAPotentialConstantValuesFunction(
const IRPosition &IRP, Attributor &
A)
10194 : AAPotentialConstantValuesImpl(IRP,
A) {}
10199 "AAPotentialConstantValues(Function|CallSite)::updateImpl will "
10204 void trackStatistics()
const override {
10209struct AAPotentialConstantValuesCallSite : AAPotentialConstantValuesFunction {
10210 AAPotentialConstantValuesCallSite(
const IRPosition &IRP, Attributor &
A)
10211 : AAPotentialConstantValuesFunction(IRP,
A) {}
10214 void trackStatistics()
const override {
10219struct AAPotentialConstantValuesCallSiteReturned
10220 : AACalleeToCallSite<AAPotentialConstantValues,
10221 AAPotentialConstantValuesImpl> {
10222 AAPotentialConstantValuesCallSiteReturned(
const IRPosition &IRP,
10224 : AACalleeToCallSite<AAPotentialConstantValues,
10225 AAPotentialConstantValuesImpl>(IRP,
A) {}
10228 void trackStatistics()
const override {
10233struct AAPotentialConstantValuesCallSiteArgument
10234 : AAPotentialConstantValuesFloating {
10235 AAPotentialConstantValuesCallSiteArgument(
const IRPosition &IRP,
10237 : AAPotentialConstantValuesFloating(IRP,
A) {}
10241 AAPotentialConstantValuesImpl::initialize(
A);
10242 if (isAtFixpoint())
10245 Value &
V = getAssociatedValue();
10248 unionAssumed(
C->getValue());
10249 indicateOptimisticFixpoint();
10254 unionAssumedWithUndef();
10255 indicateOptimisticFixpoint();
10262 Value &
V = getAssociatedValue();
10263 auto AssumedBefore = getAssumed();
10264 auto *AA =
A.getAAFor<AAPotentialConstantValues>(
10267 return indicatePessimisticFixpoint();
10268 const auto &S = AA->getAssumed();
10270 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10271 : ChangeStatus::CHANGED;
10275 void trackStatistics()
const override {
10284 bool IgnoreSubsumingPositions) {
10285 assert(ImpliedAttributeKind == Attribute::NoUndef &&
10286 "Unexpected attribute kind");
10287 if (
A.hasAttr(IRP, {Attribute::NoUndef}, IgnoreSubsumingPositions,
10288 Attribute::NoUndef))
10308 Value &V = getAssociatedValue();
10310 indicatePessimisticFixpoint();
10311 assert(!isImpliedByIR(
A, getIRPosition(), Attribute::NoUndef));
10315 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
10316 AANoUndef::StateType &State) {
10317 const Value *UseV =
U->get();
10318 const DominatorTree *DT =
nullptr;
10319 AssumptionCache *AC =
nullptr;
10320 InformationCache &InfoCache =
A.getInfoCache();
10321 if (Function *
F = getAnchorScope()) {
10326 bool TrackUse =
false;
10335 const std::string getAsStr(Attributor *
A)
const override {
10336 return getAssumed() ?
"noundef" :
"may-undef-or-poison";
10343 bool UsedAssumedInformation =
false;
10344 if (
A.isAssumedDead(getIRPosition(),
nullptr,
nullptr,
10345 UsedAssumedInformation))
10346 return ChangeStatus::UNCHANGED;
10350 if (!
A.getAssumedSimplified(getIRPosition(), *
this, UsedAssumedInformation,
10353 return ChangeStatus::UNCHANGED;
10354 return AANoUndef::manifest(
A);
10358struct AANoUndefFloating :
public AANoUndefImpl {
10359 AANoUndefFloating(
const IRPosition &IRP, Attributor &
A)
10360 : AANoUndefImpl(IRP,
A) {}
10364 AANoUndefImpl::initialize(
A);
10365 if (!getState().isAtFixpoint() && getAnchorScope() &&
10366 !getAnchorScope()->isDeclaration())
10367 if (Instruction *CtxI = getCtxI())
10368 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10373 auto VisitValueCB = [&](
const IRPosition &IRP) ->
bool {
10374 bool IsKnownNoUndef;
10376 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoUndef);
10380 bool UsedAssumedInformation =
false;
10381 Value *AssociatedValue = &getAssociatedValue();
10383 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10388 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
10396 if (AVIRP == getIRPosition() || !VisitValueCB(AVIRP))
10397 return indicatePessimisticFixpoint();
10398 return ChangeStatus::UNCHANGED;
10401 for (
const auto &VAC : Values)
10403 return indicatePessimisticFixpoint();
10405 return ChangeStatus::UNCHANGED;
10412struct AANoUndefReturned final
10413 : AAReturnedFromReturnedValues<AANoUndef, AANoUndefImpl> {
10414 AANoUndefReturned(
const IRPosition &IRP, Attributor &
A)
10415 : AAReturnedFromReturnedValues<AANoUndef, AANoUndefImpl>(IRP,
A) {}
10421struct AANoUndefArgument final
10422 : AAArgumentFromCallSiteArguments<AANoUndef, AANoUndefImpl> {
10423 AANoUndefArgument(
const IRPosition &IRP, Attributor &
A)
10424 : AAArgumentFromCallSiteArguments<AANoUndef, AANoUndefImpl>(IRP,
A) {}
10430struct AANoUndefCallSiteArgument final : AANoUndefFloating {
10431 AANoUndefCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
10432 : AANoUndefFloating(IRP,
A) {}
10438struct AANoUndefCallSiteReturned final
10439 : AACalleeToCallSite<AANoUndef, AANoUndefImpl> {
10440 AANoUndefCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
10441 : AACalleeToCallSite<AANoUndef, AANoUndefImpl>(IRP,
A) {}
10449struct AANoFPClassImpl : AANoFPClass {
10450 AANoFPClassImpl(
const IRPosition &IRP, Attributor &
A) : AANoFPClass(IRP,
A) {}
10453 const IRPosition &IRP = getIRPosition();
10457 indicateOptimisticFixpoint();
10462 A.getAttrs(getIRPosition(), {Attribute::NoFPClass},
Attrs,
false);
10463 for (
const auto &Attr : Attrs) {
10467 const DataLayout &
DL =
A.getDataLayout();
10473 if (Instruction *CtxI = getCtxI())
10474 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10478 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
10479 AANoFPClass::StateType &State) {
10490 if (
auto *NoFPAA =
A.getAAFor<AANoFPClass>(*
this, IRP, DepClassTy::NONE))
10491 State.addKnownBits(NoFPAA->getState().getKnown());
10495 const std::string getAsStr(Attributor *
A)
const override {
10496 std::string
Result =
"nofpclass";
10497 raw_string_ostream OS(Result);
10498 OS << getKnownNoFPClass() <<
'/' << getAssumedNoFPClass();
10502 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
10503 SmallVectorImpl<Attribute> &Attrs)
const override {
10504 Attrs.emplace_back(Attribute::getWithNoFPClass(Ctx, getAssumedNoFPClass()));
10508struct AANoFPClassFloating :
public AANoFPClassImpl {
10509 AANoFPClassFloating(
const IRPosition &IRP, Attributor &
A)
10510 : AANoFPClassImpl(IRP,
A) {}
10515 bool UsedAssumedInformation =
false;
10516 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10518 Values.
push_back({getAssociatedValue(), getCtxI()});
10524 DepClassTy::REQUIRED);
10525 if (!AA ||
this == AA) {
10526 T.indicatePessimisticFixpoint();
10528 const AANoFPClass::StateType &S =
10529 static_cast<const AANoFPClass::StateType &
>(AA->
getState());
10532 return T.isValidState();
10535 for (
const auto &VAC : Values)
10536 if (!VisitValueCB(*
VAC.getValue(),
VAC.getCtxI()))
10537 return indicatePessimisticFixpoint();
10543 void trackStatistics()
const override {
10548struct AANoFPClassReturned final
10549 : AAReturnedFromReturnedValues<AANoFPClass, AANoFPClassImpl,
10550 AANoFPClassImpl::StateType, false,
10551 Attribute::None, false> {
10552 AANoFPClassReturned(
const IRPosition &IRP, Attributor &
A)
10553 : AAReturnedFromReturnedValues<AANoFPClass, AANoFPClassImpl,
10554 AANoFPClassImpl::StateType,
false,
10558 void trackStatistics()
const override {
10563struct AANoFPClassArgument final
10564 : AAArgumentFromCallSiteArguments<AANoFPClass, AANoFPClassImpl> {
10565 AANoFPClassArgument(
const IRPosition &IRP, Attributor &
A)
10566 : AAArgumentFromCallSiteArguments<AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10572struct AANoFPClassCallSiteArgument final : AANoFPClassFloating {
10573 AANoFPClassCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
10574 : AANoFPClassFloating(IRP,
A) {}
10577 void trackStatistics()
const override {
10582struct AANoFPClassCallSiteReturned final
10583 : AACalleeToCallSite<AANoFPClass, AANoFPClassImpl> {
10584 AANoFPClassCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
10585 : AACalleeToCallSite<AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10588 void trackStatistics()
const override {
10593struct AACallEdgesImpl :
public AACallEdges {
10594 AACallEdgesImpl(
const IRPosition &IRP, Attributor &
A) : AACallEdges(IRP,
A) {}
10596 const SetVector<Function *> &getOptimisticEdges()
const override {
10597 return CalledFunctions;
10600 bool hasUnknownCallee()
const override {
return HasUnknownCallee; }
10602 bool hasNonAsmUnknownCallee()
const override {
10603 return HasUnknownCalleeNonAsm;
10606 const std::string getAsStr(Attributor *
A)
const override {
10607 return "CallEdges[" + std::to_string(HasUnknownCallee) +
"," +
10608 std::to_string(CalledFunctions.size()) +
"]";
10611 void trackStatistics()
const override {}
10614 void addCalledFunction(Function *Fn,
ChangeStatus &Change) {
10615 if (CalledFunctions.insert(Fn)) {
10616 Change = ChangeStatus::CHANGED;
10622 void setHasUnknownCallee(
bool NonAsm,
ChangeStatus &Change) {
10623 if (!HasUnknownCallee)
10624 Change = ChangeStatus::CHANGED;
10625 if (NonAsm && !HasUnknownCalleeNonAsm)
10626 Change = ChangeStatus::CHANGED;
10627 HasUnknownCalleeNonAsm |= NonAsm;
10628 HasUnknownCallee =
true;
10633 SetVector<Function *> CalledFunctions;
10636 bool HasUnknownCallee =
false;
10639 bool HasUnknownCalleeNonAsm =
false;
10642struct AACallEdgesCallSite :
public AACallEdgesImpl {
10643 AACallEdgesCallSite(
const IRPosition &IRP, Attributor &
A)
10644 : AACallEdgesImpl(IRP,
A) {}
10651 addCalledFunction(Fn, Change);
10653 LLVM_DEBUG(
dbgs() <<
"[AACallEdges] Unrecognized value: " << V <<
"\n");
10654 setHasUnknownCallee(
true, Change);
10665 VisitValue(*V, CtxI);
10669 bool UsedAssumedInformation =
false;
10675 for (
auto &VAC : Values)
10676 VisitValue(*
VAC.getValue(),
VAC.getCtxI());
10682 if (
IA->hasSideEffects() &&
10685 setHasUnknownCallee(
false, Change);
10691 if (
auto *IndirectCallAA =
A.getAAFor<AAIndirectCallInfo>(
10692 *
this, getIRPosition(), DepClassTy::OPTIONAL))
10693 if (IndirectCallAA->foreachCallee(
10694 [&](Function *Fn) { return VisitValue(*Fn, CB); }))
10703 for (
const Use *U : CallbackUses)
10704 ProcessCalledOperand(
U->get(), CB);
10710struct AACallEdgesFunction :
public AACallEdgesImpl {
10711 AACallEdgesFunction(
const IRPosition &IRP, Attributor &
A)
10712 : AACallEdgesImpl(IRP,
A) {}
10721 auto *CBEdges =
A.getAAFor<AACallEdges>(
10725 if (CBEdges->hasNonAsmUnknownCallee())
10726 setHasUnknownCallee(
true, Change);
10727 if (CBEdges->hasUnknownCallee())
10728 setHasUnknownCallee(
false, Change);
10730 for (Function *
F : CBEdges->getOptimisticEdges())
10731 addCalledFunction(
F, Change);
10737 bool UsedAssumedInformation =
false;
10738 if (!
A.checkForAllCallLikeInstructions(ProcessCallInst, *
this,
10739 UsedAssumedInformation,
10743 setHasUnknownCallee(
true, Change);
10752struct AAInterFnReachabilityFunction
10753 :
public CachedReachabilityAA<AAInterFnReachability, Function> {
10754 using Base = CachedReachabilityAA<AAInterFnReachability, Function>;
10755 AAInterFnReachabilityFunction(
const IRPosition &IRP, Attributor &
A)
10758 bool instructionCanReach(
10759 Attributor &
A,
const Instruction &From,
const Function &To,
10762 auto *NonConstThis =
const_cast<AAInterFnReachabilityFunction *
>(
this);
10764 RQITy StackRQI(
A, From, To, ExclusionSet,
false);
10765 RQITy::Reachable
Result;
10766 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
10767 return NonConstThis->isReachableImpl(
A, StackRQI,
10769 return Result == RQITy::Reachable::Yes;
10773 bool IsTemporaryRQI)
override {
10775 &RQI.From->getFunction()->getEntryBlock().front();
10776 if (EntryI != RQI.From &&
10777 !instructionCanReach(
A, *EntryI, *RQI.To,
nullptr))
10778 return rememberResult(
A, RQITy::Reachable::No, RQI,
false,
10781 auto CheckReachableCallBase = [&](CallBase *CB) {
10782 auto *CBEdges =
A.getAAFor<AACallEdges>(
10784 if (!CBEdges || !CBEdges->getState().isValidState())
10787 if (CBEdges->hasUnknownCallee())
10790 for (Function *Fn : CBEdges->getOptimisticEdges()) {
10801 if (Fn == getAnchorScope()) {
10802 if (EntryI == RQI.From)
10807 const AAInterFnReachability *InterFnReachability =
10809 DepClassTy::OPTIONAL);
10812 if (!InterFnReachability ||
10820 const auto *IntraFnReachability =
A.getAAFor<AAIntraFnReachability>(
10822 DepClassTy::OPTIONAL);
10830 return IntraFnReachability && !IntraFnReachability->isAssumedReachable(
10831 A, *RQI.From, CBInst, RQI.ExclusionSet);
10834 bool UsedExclusionSet =
true;
10835 bool UsedAssumedInformation =
false;
10836 if (!
A.checkForAllCallLikeInstructions(CheckCallBase, *
this,
10837 UsedAssumedInformation,
10839 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
10842 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
10846 void trackStatistics()
const override {}
10850template <
typename AAType>
10851static std::optional<Constant *>
10854 if (!Ty.isIntegerTy())
10862 std::optional<Constant *> COpt =
AA->getAssumedConstant(
A);
10864 if (!COpt.has_value()) {
10866 return std::nullopt;
10868 if (
auto *
C = *COpt) {
10879 std::optional<Value *> V;
10880 for (
auto &It : Values) {
10882 if (V.has_value() && !*V)
10885 if (!V.has_value())
10899 if (
A.hasSimplificationCallback(getIRPosition())) {
10900 indicatePessimisticFixpoint();
10903 Value *Stripped = getAssociatedValue().stripPointerCasts();
10905 addValue(
A, getState(), *Stripped, getCtxI(),
AA::AnyScope,
10907 indicateOptimisticFixpoint();
10910 AAPotentialValues::initialize(
A);
10914 const std::string getAsStr(Attributor *
A)
const override {
10916 llvm::raw_string_ostream OS(Str);
10921 template <
typename AAType>
10922 static std::optional<Value *> askOtherAA(Attributor &
A,
10923 const AbstractAttribute &AA,
10924 const IRPosition &IRP,
Type &Ty) {
10929 return std::nullopt;
10936 virtual void addValue(Attributor &
A, StateType &State,
Value &V,
10938 Function *AnchorScope)
const {
10942 for (
const auto &U : CB->
args()) {
10952 Type &Ty = *getAssociatedType();
10953 std::optional<Value *> SimpleV =
10954 askOtherAA<AAValueConstantRange>(
A, *
this, ValIRP, Ty);
10955 if (SimpleV.has_value() && !*SimpleV) {
10956 auto *PotentialConstantsAA =
A.getAAFor<AAPotentialConstantValues>(
10957 *
this, ValIRP, DepClassTy::OPTIONAL);
10958 if (PotentialConstantsAA && PotentialConstantsAA->isValidState()) {
10959 for (
const auto &It : PotentialConstantsAA->getAssumedSet())
10960 State.unionAssumed({{*ConstantInt::get(&Ty, It),
nullptr}, S});
10961 if (PotentialConstantsAA->undefIsContained())
10966 if (!SimpleV.has_value())
10978 State.unionAssumed({{*VPtr, CtxI}, S});
10984 AA::ValueAndContext
I;
10988 return II.I ==
I &&
II.S == S;
10991 return std::tie(
I, S) < std::tie(
II.I,
II.S);
10995 bool recurseForValue(Attributor &
A,
const IRPosition &IRP,
AA::ValueScope S) {
10996 SmallMapVector<AA::ValueAndContext, int, 8> ValueScopeMap;
11001 bool UsedAssumedInformation =
false;
11003 if (!
A.getAssumedSimplifiedValues(IRP,
this, Values, CS,
11004 UsedAssumedInformation))
11007 for (
auto &It : Values)
11008 ValueScopeMap[It] += CS;
11010 for (
auto &It : ValueScopeMap)
11011 addValue(
A, getState(), *It.first.getValue(), It.first.getCtxI(),
11017 void giveUpOnIntraprocedural(Attributor &
A) {
11018 auto NewS = StateType::getBestState(getState());
11019 for (
const auto &It : getAssumedSet()) {
11022 addValue(
A, NewS, *It.first.getValue(), It.first.getCtxI(),
11025 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11033 getState() = StateType::getBestState(getState());
11034 getState().unionAssumed({{getAssociatedValue(), getCtxI()},
AA::AnyScope});
11035 AAPotentialValues::indicateOptimisticFixpoint();
11036 return ChangeStatus::CHANGED;
11041 return indicatePessimisticFixpoint();
11049 if (!getAssumedSimplifiedValues(
A, Values, S))
11051 Value &OldV = getAssociatedValue();
11054 Value *NewV = getSingleValue(
A, *
this, getIRPosition(), Values);
11055 if (!NewV || NewV == &OldV)
11060 if (
A.changeAfterManifest(getIRPosition(), *NewV))
11061 return ChangeStatus::CHANGED;
11063 return ChangeStatus::UNCHANGED;
11066 bool getAssumedSimplifiedValues(
11067 Attributor &
A, SmallVectorImpl<AA::ValueAndContext> &Values,
11068 AA::ValueScope S,
bool RecurseForSelectAndPHI =
false)
const override {
11069 if (!isValidState())
11071 bool UsedAssumedInformation =
false;
11072 for (
const auto &It : getAssumedSet())
11073 if (It.second & S) {
11074 if (RecurseForSelectAndPHI && (
isa<PHINode>(It.first.getValue()) ||
11076 if (
A.getAssumedSimplifiedValues(
11078 this, Values, S, UsedAssumedInformation))
11083 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11088struct AAPotentialValuesFloating : AAPotentialValuesImpl {
11089 AAPotentialValuesFloating(
const IRPosition &IRP, Attributor &
A)
11090 : AAPotentialValuesImpl(IRP,
A) {}
11094 auto AssumedBefore = getAssumed();
11096 genericValueTraversal(
A, &getAssociatedValue());
11098 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11099 : ChangeStatus::CHANGED;
11103 struct LivenessInfo {
11104 const AAIsDead *LivenessAA =
nullptr;
11105 bool AnyDead =
false;
11115 SmallVectorImpl<ItemInfo> &Worklist) {
11118 bool UsedAssumedInformation =
false;
11120 auto GetSimplifiedValues = [&](
Value &
V,
11122 if (!
A.getAssumedSimplifiedValues(
11126 Values.
push_back(AA::ValueAndContext{
V,
II.I.getCtxI()});
11128 return Values.
empty();
11130 if (GetSimplifiedValues(*
LHS, LHSValues))
11132 if (GetSimplifiedValues(*
RHS, RHSValues))
11137 InformationCache &InfoCache =
A.getInfoCache();
11144 F ?
A.getInfoCache().getTargetLibraryInfoForFunction(*
F) :
nullptr;
11149 const DataLayout &
DL =
A.getDataLayout();
11150 SimplifyQuery Q(
DL, TLI, DT, AC, CmpI);
11152 auto CheckPair = [&](
Value &LHSV,
Value &RHSV) {
11155 nullptr,
II.S, getAnchorScope());
11161 if (&LHSV == &RHSV &&
11163 Constant *NewV = ConstantInt::get(Type::getInt1Ty(Ctx),
11165 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11172 if (TypedLHS && TypedRHS) {
11174 if (NewV && NewV != &Cmp) {
11175 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11187 if (!LHSIsNull && !RHSIsNull)
11193 assert((LHSIsNull || RHSIsNull) &&
11194 "Expected nullptr versus non-nullptr comparison at this point");
11197 unsigned PtrIdx = LHSIsNull;
11198 bool IsKnownNonNull;
11201 DepClassTy::REQUIRED, IsKnownNonNull);
11202 if (!IsAssumedNonNull)
11208 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11213 for (
auto &LHSValue : LHSValues)
11214 for (
auto &RHSValue : RHSValues)
11215 if (!CheckPair(*LHSValue.getValue(), *RHSValue.getValue()))
11220 bool handleSelectInst(Attributor &
A, SelectInst &SI, ItemInfo
II,
11221 SmallVectorImpl<ItemInfo> &Worklist) {
11223 bool UsedAssumedInformation =
false;
11225 std::optional<Constant *>
C =
11226 A.getAssumedConstant(*
SI.getCondition(), *
this, UsedAssumedInformation);
11227 bool NoValueYet = !
C.has_value();
11235 }
else if (&SI == &getAssociatedValue()) {
11240 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11242 if (!SimpleV.has_value())
11245 addValue(
A, getState(), **SimpleV, CtxI,
II.S, getAnchorScope());
11253 bool handleLoadInst(Attributor &
A, LoadInst &LI, ItemInfo
II,
11254 SmallVectorImpl<ItemInfo> &Worklist) {
11255 SmallSetVector<Value *, 4> PotentialCopies;
11256 SmallSetVector<Instruction *, 4> PotentialValueOrigins;
11257 bool UsedAssumedInformation =
false;
11259 PotentialValueOrigins, *
this,
11260 UsedAssumedInformation,
11262 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Failed to get potentially "
11263 "loaded values for load instruction "
11271 InformationCache &InfoCache =
A.getInfoCache();
11273 if (!
llvm::all_of(PotentialValueOrigins, [&](Instruction *
I) {
11277 return A.isAssumedDead(
SI->getOperandUse(0),
this,
11279 UsedAssumedInformation,
11281 return A.isAssumedDead(*
I,
this,
nullptr,
11282 UsedAssumedInformation,
11285 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Load is onl used by assumes "
11286 "and we cannot delete all the stores: "
11297 bool AllLocal = ScopeIsLocal;
11302 if (!DynamicallyUnique) {
11303 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Not all potentially loaded "
11304 "values are dynamically unique: "
11309 for (
auto *PotentialCopy : PotentialCopies) {
11311 Worklist.
push_back({{*PotentialCopy, CtxI},
II.S});
11316 if (!AllLocal && ScopeIsLocal)
11321 bool handlePHINode(
11322 Attributor &
A, PHINode &
PHI, ItemInfo
II,
11323 SmallVectorImpl<ItemInfo> &Worklist,
11324 SmallMapVector<const Function *, LivenessInfo, 4> &LivenessAAs) {
11325 auto GetLivenessInfo = [&](
const Function &
F) -> LivenessInfo & {
11326 LivenessInfo &LI = LivenessAAs[&
F];
11327 if (!LI.LivenessAA)
11333 if (&
PHI == &getAssociatedValue()) {
11334 LivenessInfo &LI = GetLivenessInfo(*
PHI.getFunction());
11336 A.getInfoCache().getAnalysisResultForFunction<CycleAnalysis>(
11337 *
PHI.getFunction());
11341 for (
unsigned u = 0, e =
PHI.getNumIncomingValues(); u < e; u++) {
11343 if (LI.LivenessAA &&
11344 LI.LivenessAA->isEdgeDead(IncomingBB,
PHI.getParent())) {
11363 bool UsedAssumedInformation =
false;
11364 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11366 if (!SimpleV.has_value())
11370 addValue(
A, getState(), **SimpleV, &
PHI,
II.S, getAnchorScope());
11377 bool handleGenericInst(Attributor &
A, Instruction &
I, ItemInfo
II,
11378 SmallVectorImpl<ItemInfo> &Worklist) {
11379 bool SomeSimplified =
false;
11380 bool UsedAssumedInformation =
false;
11382 SmallVector<Value *, 8> NewOps(
I.getNumOperands());
11385 const auto &SimplifiedOp =
A.getAssumedSimplified(
11390 if (!SimplifiedOp.has_value())
11394 NewOps[Idx] = *SimplifiedOp;
11398 SomeSimplified |= (NewOps[Idx] !=
Op);
11404 if (!SomeSimplified)
11407 InformationCache &InfoCache =
A.getInfoCache();
11411 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
11414 const DataLayout &
DL =
I.getDataLayout();
11415 SimplifyQuery Q(
DL, TLI, DT, AC, &
I);
11417 if (!NewV || NewV == &
I)
11420 LLVM_DEBUG(
dbgs() <<
"Generic inst " <<
I <<
" assumed simplified to "
11427 Attributor &
A, Instruction &
I, ItemInfo
II,
11428 SmallVectorImpl<ItemInfo> &Worklist,
11429 SmallMapVector<const Function *, LivenessInfo, 4> &LivenessAAs) {
11432 CI->getPredicate(),
II, Worklist);
11434 switch (
I.getOpcode()) {
11435 case Instruction::Select:
11437 case Instruction::PHI:
11439 case Instruction::Load:
11442 return handleGenericInst(
A,
I,
II, Worklist);
11447 void genericValueTraversal(Attributor &
A,
Value *InitialV) {
11448 SmallMapVector<const Function *, LivenessInfo, 4> LivenessAAs;
11450 SmallSet<ItemInfo, 16> Visited;
11469 LLVM_DEBUG(
dbgs() <<
"Generic value traversal reached iteration limit: "
11470 << Iteration <<
"!\n");
11471 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11477 Value *NewV =
nullptr;
11478 if (
V->getType()->isPointerTy()) {
11484 for (Argument &Arg :
Callee->args())
11491 if (NewV && NewV != V) {
11492 Worklist.
push_back({{*NewV, CtxI}, S});
11506 if (V == InitialV && CtxI == getCtxI()) {
11507 indicatePessimisticFixpoint();
11511 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11512 }
while (!Worklist.
empty());
11516 for (
auto &It : LivenessAAs)
11517 if (It.second.AnyDead)
11518 A.recordDependence(*It.second.LivenessAA, *
this, DepClassTy::OPTIONAL);
11522 void trackStatistics()
const override {
11527struct AAPotentialValuesArgument final : AAPotentialValuesImpl {
11528 using Base = AAPotentialValuesImpl;
11529 AAPotentialValuesArgument(
const IRPosition &IRP, Attributor &
A)
11536 indicatePessimisticFixpoint();
11541 auto AssumedBefore = getAssumed();
11543 unsigned ArgNo = getCalleeArgNo();
11545 bool UsedAssumedInformation =
false;
11547 auto CallSitePred = [&](AbstractCallSite ACS) {
11549 if (CSArgIRP.getPositionKind() == IRP_INVALID)
11552 if (!
A.getAssumedSimplifiedValues(CSArgIRP,
this, Values,
11554 UsedAssumedInformation))
11557 return isValidState();
11560 if (!
A.checkForAllCallSites(CallSitePred, *
this,
11562 UsedAssumedInformation))
11563 return indicatePessimisticFixpoint();
11565 Function *Fn = getAssociatedFunction();
11566 bool AnyNonLocal =
false;
11567 for (
auto &It : Values) {
11569 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11574 return indicatePessimisticFixpoint();
11578 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11584 AnyNonLocal =
true;
11586 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11588 giveUpOnIntraprocedural(
A);
11590 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11591 : ChangeStatus::CHANGED;
11595 void trackStatistics()
const override {
11600struct AAPotentialValuesReturned :
public AAPotentialValuesFloating {
11601 using Base = AAPotentialValuesFloating;
11602 AAPotentialValuesReturned(
const IRPosition &IRP, Attributor &
A)
11608 if (!
F ||
F->isDeclaration() ||
F->getReturnType()->isVoidTy()) {
11609 indicatePessimisticFixpoint();
11613 for (Argument &Arg :
F->args())
11616 ReturnedArg = &Arg;
11619 if (!
A.isFunctionIPOAmendable(*
F) ||
11620 A.hasSimplificationCallback(getIRPosition())) {
11622 indicatePessimisticFixpoint();
11624 indicateOptimisticFixpoint();
11630 auto AssumedBefore = getAssumed();
11631 bool UsedAssumedInformation =
false;
11634 Function *AnchorScope = getAnchorScope();
11640 UsedAssumedInformation,
11646 bool AllInterAreIntra =
false;
11649 llvm::all_of(Values, [&](
const AA::ValueAndContext &VAC) {
11653 for (
const AA::ValueAndContext &VAC : Values) {
11654 addValue(
A, getState(), *
VAC.getValue(),
11655 VAC.getCtxI() ?
VAC.getCtxI() : CtxI,
11658 if (AllInterAreIntra)
11665 HandleReturnedValue(*ReturnedArg,
nullptr,
true);
11668 bool AddValues =
true;
11671 addValue(
A, getState(), *RetI.getOperand(0), &RetI,
AA::AnyScope,
11675 return HandleReturnedValue(*RetI.getOperand(0), &RetI, AddValues);
11678 if (!
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11679 UsedAssumedInformation,
11681 return indicatePessimisticFixpoint();
11684 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11685 : ChangeStatus::CHANGED;
11690 return ChangeStatus::UNCHANGED;
11692 if (!getAssumedSimplifiedValues(
A, Values, AA::ValueScope::Intraprocedural,
11694 return ChangeStatus::UNCHANGED;
11695 Value *NewVal = getSingleValue(
A, *
this, getIRPosition(), Values);
11697 return ChangeStatus::UNCHANGED;
11702 "Number of function with unique return");
11705 {Attribute::get(Arg->
getContext(), Attribute::Returned)});
11710 Value *RetOp = RetI.getOperand(0);
11714 if (
A.changeUseAfterManifest(RetI.getOperandUse(0), *NewVal))
11715 Changed = ChangeStatus::CHANGED;
11718 bool UsedAssumedInformation =
false;
11719 (void)
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11720 UsedAssumedInformation,
11726 return AAPotentialValues::indicatePessimisticFixpoint();
11730 void trackStatistics()
const override{
11737struct AAPotentialValuesFunction : AAPotentialValuesImpl {
11738 AAPotentialValuesFunction(
const IRPosition &IRP, Attributor &
A)
11739 : AAPotentialValuesImpl(IRP,
A) {}
11748 void trackStatistics()
const override {
11753struct AAPotentialValuesCallSite : AAPotentialValuesFunction {
11754 AAPotentialValuesCallSite(
const IRPosition &IRP, Attributor &
A)
11755 : AAPotentialValuesFunction(IRP,
A) {}
11758 void trackStatistics()
const override {
11763struct AAPotentialValuesCallSiteReturned : AAPotentialValuesImpl {
11764 AAPotentialValuesCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
11765 : AAPotentialValuesImpl(IRP,
A) {}
11769 auto AssumedBefore = getAssumed();
11773 return indicatePessimisticFixpoint();
11775 bool UsedAssumedInformation =
false;
11779 UsedAssumedInformation))
11780 return indicatePessimisticFixpoint();
11787 Values, S, UsedAssumedInformation))
11790 for (
auto &It : Values) {
11791 Value *
V = It.getValue();
11792 std::optional<Value *> CallerV =
A.translateArgumentToCallSiteContent(
11793 V, *CB, *
this, UsedAssumedInformation);
11794 if (!CallerV.has_value()) {
11798 V = *CallerV ? *CallerV :
V;
11804 giveUpOnIntraprocedural(
A);
11807 addValue(
A, getState(), *V, CB, S, getAnchorScope());
11812 return indicatePessimisticFixpoint();
11814 return indicatePessimisticFixpoint();
11815 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11816 : ChangeStatus::CHANGED;
11820 return AAPotentialValues::indicatePessimisticFixpoint();
11824 void trackStatistics()
const override {
11829struct AAPotentialValuesCallSiteArgument : AAPotentialValuesFloating {
11830 AAPotentialValuesCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
11831 : AAPotentialValuesFloating(IRP,
A) {}
11834 void trackStatistics()
const override {
11842struct AAAssumptionInfoImpl :
public AAAssumptionInfo {
11843 AAAssumptionInfoImpl(
const IRPosition &IRP, Attributor &
A,
11844 const DenseSet<StringRef> &Known)
11845 : AAAssumptionInfo(IRP,
A, Known) {}
11850 if (getKnown().isUniversal())
11851 return ChangeStatus::UNCHANGED;
11853 const IRPosition &IRP = getIRPosition();
11855 getAssumed().getSet().
end());
11857 return A.manifestAttrs(IRP,
11864 bool hasAssumption(
const StringRef Assumption)
const override {
11865 return isValidState() && setContains(Assumption);
11869 const std::string getAsStr(Attributor *
A)
const override {
11870 const SetContents &Known = getKnown();
11871 const SetContents &Assumed = getAssumed();
11875 const std::string KnownStr =
llvm::join(Set,
",");
11877 std::string AssumedStr =
"Universal";
11878 if (!Assumed.isUniversal()) {
11879 Set.assign(Assumed.getSet().begin(), Assumed.getSet().end());
11882 return "Known [" + KnownStr +
"]," +
" Assumed [" + AssumedStr +
"]";
11897struct AAAssumptionInfoFunction final : AAAssumptionInfoImpl {
11898 AAAssumptionInfoFunction(
const IRPosition &IRP, Attributor &
A)
11899 : AAAssumptionInfoImpl(IRP,
A,
11906 auto CallSitePred = [&](AbstractCallSite ACS) {
11907 const auto *AssumptionAA =
A.getAAFor<AAAssumptionInfo>(
11909 DepClassTy::REQUIRED);
11913 Changed |= getIntersection(AssumptionAA->getAssumed());
11914 return !getAssumed().empty() || !getKnown().empty();
11917 bool UsedAssumedInformation =
false;
11922 if (!
A.checkForAllCallSites(CallSitePred, *
this,
true,
11923 UsedAssumedInformation))
11924 return indicatePessimisticFixpoint();
11926 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11929 void trackStatistics()
const override {}
11933struct AAAssumptionInfoCallSite final : AAAssumptionInfoImpl {
11935 AAAssumptionInfoCallSite(
const IRPosition &IRP, Attributor &
A)
11936 : AAAssumptionInfoImpl(IRP,
A, getInitialAssumptions(IRP)) {}
11941 A.getAAFor<AAAssumptionInfo>(*
this, FnPos, DepClassTy::REQUIRED);
11947 auto *AssumptionAA =
11948 A.getAAFor<AAAssumptionInfo>(*
this, FnPos, DepClassTy::REQUIRED);
11950 return indicatePessimisticFixpoint();
11951 bool Changed = getIntersection(AssumptionAA->getAssumed());
11952 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11956 void trackStatistics()
const override {}
11961 DenseSet<StringRef> getInitialAssumptions(
const IRPosition &IRP) {
11968 return Assumptions;
11983struct AAUnderlyingObjectsImpl
11989 const std::string getAsStr(
Attributor *
A)
const override {
11990 if (!isValidState())
11991 return "<invalid>";
11994 OS <<
"underlying objects: inter " << InterAssumedUnderlyingObjects.size()
11995 <<
" objects, intra " << IntraAssumedUnderlyingObjects.size()
11997 if (!InterAssumedUnderlyingObjects.empty()) {
11998 OS <<
"inter objects:\n";
11999 for (
auto *Obj : InterAssumedUnderlyingObjects)
12000 OS << *Obj <<
'\n';
12002 if (!IntraAssumedUnderlyingObjects.empty()) {
12003 OS <<
"intra objects:\n";
12004 for (
auto *Obj : IntraAssumedUnderlyingObjects)
12005 OS << *Obj <<
'\n';
12011 void trackStatistics()
const override {}
12015 auto &
Ptr = getAssociatedValue();
12017 bool UsedAssumedInformation =
false;
12018 auto DoUpdate = [&](SmallSetVector<Value *, 8> &UnderlyingObjects,
12020 SmallPtrSet<Value *, 8> SeenObjects;
12024 Scope, UsedAssumedInformation))
12029 for (
unsigned I = 0;
I < Values.
size(); ++
I) {
12030 auto &
VAC = Values[
I];
12031 auto *Obj =
VAC.getValue();
12033 if (!SeenObjects.
insert(UO ? UO : Obj).second)
12035 if (UO && UO != Obj) {
12041 const auto *OtherAA =
A.getAAFor<AAUnderlyingObjects>(
12043 auto Pred = [&](
Value &
V) {
12051 if (!OtherAA || !OtherAA->forallUnderlyingObjects(Pred, Scope))
12053 "The forall call should not return false at this position");
12059 Changed |= handleIndirect(
A, *Obj, UnderlyingObjects, Scope,
12060 UsedAssumedInformation);
12066 for (
unsigned u = 0, e =
PHI->getNumIncomingValues(); u < e; u++) {
12068 handleIndirect(
A, *
PHI->getIncomingValue(u), UnderlyingObjects,
12069 Scope, UsedAssumedInformation);
12083 if (!UsedAssumedInformation)
12084 indicateOptimisticFixpoint();
12085 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
12088 bool forallUnderlyingObjects(
12089 function_ref<
bool(
Value &)> Pred,
12091 if (!isValidState())
12092 return Pred(getAssociatedValue());
12095 ? IntraAssumedUnderlyingObjects
12096 : InterAssumedUnderlyingObjects;
12097 for (
Value *Obj : AssumedUnderlyingObjects)
12107 bool handleIndirect(Attributor &
A,
Value &V,
12108 SmallSetVector<Value *, 8> &UnderlyingObjects,
12111 const auto *AA =
A.getAAFor<AAUnderlyingObjects>(
12113 auto Pred = [&](
Value &
V) {
12117 if (!AA || !AA->forallUnderlyingObjects(Pred, Scope))
12119 "The forall call should not return false at this position");
12125 SmallSetVector<Value *, 8> IntraAssumedUnderlyingObjects;
12127 SmallSetVector<Value *, 8> InterAssumedUnderlyingObjects;
12130struct AAUnderlyingObjectsFloating final : AAUnderlyingObjectsImpl {
12131 AAUnderlyingObjectsFloating(
const IRPosition &IRP, Attributor &
A)
12132 : AAUnderlyingObjectsImpl(IRP,
A) {}
12135struct AAUnderlyingObjectsArgument final : AAUnderlyingObjectsImpl {
12136 AAUnderlyingObjectsArgument(
const IRPosition &IRP, Attributor &
A)
12137 : AAUnderlyingObjectsImpl(IRP,
A) {}
12140struct AAUnderlyingObjectsCallSite final : AAUnderlyingObjectsImpl {
12141 AAUnderlyingObjectsCallSite(
const IRPosition &IRP, Attributor &
A)
12142 : AAUnderlyingObjectsImpl(IRP,
A) {}
12145struct AAUnderlyingObjectsCallSiteArgument final : AAUnderlyingObjectsImpl {
12146 AAUnderlyingObjectsCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
12147 : AAUnderlyingObjectsImpl(IRP,
A) {}
12150struct AAUnderlyingObjectsReturned final : AAUnderlyingObjectsImpl {
12151 AAUnderlyingObjectsReturned(
const IRPosition &IRP, Attributor &
A)
12152 : AAUnderlyingObjectsImpl(IRP,
A) {}
12155struct AAUnderlyingObjectsCallSiteReturned final : AAUnderlyingObjectsImpl {
12156 AAUnderlyingObjectsCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
12157 : AAUnderlyingObjectsImpl(IRP,
A) {}
12160struct AAUnderlyingObjectsFunction final : AAUnderlyingObjectsImpl {
12161 AAUnderlyingObjectsFunction(
const IRPosition &IRP, Attributor &
A)
12162 : AAUnderlyingObjectsImpl(IRP,
A) {}
12168struct AAGlobalValueInfoFloating :
public AAGlobalValueInfo {
12169 AAGlobalValueInfoFloating(
const IRPosition &IRP, Attributor &
A)
12170 : AAGlobalValueInfo(IRP,
A) {}
12175 bool checkUse(Attributor &
A,
const Use &U,
bool &Follow,
12176 SmallVectorImpl<const Value *> &Worklist) {
12183 LLVM_DEBUG(
dbgs() <<
"[AAGlobalValueInfo] Check use: " << *
U.get() <<
" in "
12184 << *UInst <<
"\n");
12187 int Idx = &
Cmp->getOperandUse(0) == &
U;
12190 return U == &getAnchorValue();
12195 auto CallSitePred = [&](AbstractCallSite ACS) {
12196 Worklist.
push_back(ACS.getInstruction());
12199 bool UsedAssumedInformation =
false;
12201 if (!
A.checkForAllCallSites(CallSitePred, *UInst->
getFunction(),
12203 UsedAssumedInformation))
12221 if (!Fn || !
A.isFunctionIPOAmendable(*Fn))
12230 unsigned NumUsesBefore =
Uses.size();
12232 SmallPtrSet<const Value *, 8> Visited;
12236 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
12244 return checkUse(
A, U, Follow, Worklist);
12246 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
12247 Uses.insert(&OldU);
12251 while (!Worklist.
empty()) {
12253 if (!Visited.
insert(V).second)
12255 if (!
A.checkForAllUses(UsePred, *
this, *V,
12257 DepClassTy::OPTIONAL,
12258 true, EquivalentUseCB)) {
12259 return indicatePessimisticFixpoint();
12263 return Uses.size() == NumUsesBefore ? ChangeStatus::UNCHANGED
12264 : ChangeStatus::CHANGED;
12267 bool isPotentialUse(
const Use &U)
const override {
12268 return !isValidState() ||
Uses.contains(&U);
12273 return ChangeStatus::UNCHANGED;
12277 const std::string getAsStr(Attributor *
A)
const override {
12278 return "[" + std::to_string(
Uses.size()) +
" uses]";
12281 void trackStatistics()
const override {
12287 SmallPtrSet<const Use *, 8>
Uses;
12293struct AAIndirectCallInfoCallSite :
public AAIndirectCallInfo {
12294 AAIndirectCallInfoCallSite(
const IRPosition &IRP, Attributor &
A)
12295 : AAIndirectCallInfo(IRP,
A) {}
12299 auto *MD = getCtxI()->getMetadata(LLVMContext::MD_callees);
12300 if (!MD && !
A.isClosedWorldModule())
12304 for (
const auto &
Op : MD->operands())
12306 PotentialCallees.insert(Callee);
12307 }
else if (
A.isClosedWorldModule()) {
12309 A.getInfoCache().getIndirectlyCallableFunctions(
A);
12310 PotentialCallees.insert_range(IndirectlyCallableFunctions);
12313 if (PotentialCallees.empty())
12314 indicateOptimisticFixpoint();
12322 SmallSetVector<Function *, 4> AssumedCalleesNow;
12323 bool AllCalleesKnownNow = AllCalleesKnown;
12325 auto CheckPotentialCalleeUse = [&](
Function &PotentialCallee,
12326 bool &UsedAssumedInformation) {
12327 const auto *GIAA =
A.getAAFor<AAGlobalValueInfo>(
12329 if (!GIAA || GIAA->isPotentialUse(CalleeUse))
12331 UsedAssumedInformation = !GIAA->isAtFixpoint();
12335 auto AddPotentialCallees = [&]() {
12336 for (
auto *PotentialCallee : PotentialCallees) {
12337 bool UsedAssumedInformation =
false;
12338 if (CheckPotentialCalleeUse(*PotentialCallee, UsedAssumedInformation))
12339 AssumedCalleesNow.
insert(PotentialCallee);
12345 bool UsedAssumedInformation =
false;
12348 AA::ValueScope::AnyScope,
12349 UsedAssumedInformation)) {
12350 if (PotentialCallees.empty())
12351 return indicatePessimisticFixpoint();
12352 AddPotentialCallees();
12357 auto CheckPotentialCallee = [&](
Function &Fn) {
12358 if (!PotentialCallees.empty() && !PotentialCallees.count(&Fn))
12361 auto &CachedResult = FilterResults[&Fn];
12362 if (CachedResult.has_value())
12363 return CachedResult.value();
12365 bool UsedAssumedInformation =
false;
12366 if (!CheckPotentialCalleeUse(Fn, UsedAssumedInformation)) {
12367 if (!UsedAssumedInformation)
12368 CachedResult =
false;
12377 for (
int I = NumCBArgs;
I < NumFnArgs; ++
I) {
12378 bool IsKnown =
false;
12381 DepClassTy::OPTIONAL, IsKnown)) {
12383 CachedResult =
false;
12388 CachedResult =
true;
12394 for (
auto &VAC : Values) {
12398 VAC.getValue()->getType()->getPointerAddressSpace() == 0)
12402 if (CheckPotentialCallee(*VACFn))
12403 AssumedCalleesNow.
insert(VACFn);
12406 if (!PotentialCallees.empty()) {
12407 AddPotentialCallees();
12410 AllCalleesKnownNow =
false;
12413 if (AssumedCalleesNow == AssumedCallees &&
12414 AllCalleesKnown == AllCalleesKnownNow)
12415 return ChangeStatus::UNCHANGED;
12417 std::swap(AssumedCallees, AssumedCalleesNow);
12418 AllCalleesKnown = AllCalleesKnownNow;
12419 return ChangeStatus::CHANGED;
12425 if (!AllCalleesKnown && AssumedCallees.empty())
12426 return ChangeStatus::UNCHANGED;
12429 bool UsedAssumedInformation =
false;
12430 if (
A.isAssumedDead(*CB,
this,
nullptr,
12431 UsedAssumedInformation))
12432 return ChangeStatus::UNCHANGED;
12436 if (
FP->getType()->getPointerAddressSpace())
12437 FP =
new AddrSpaceCastInst(
FP, PointerType::get(
FP->getContext(), 0),
12447 if (AssumedCallees.empty()) {
12448 assert(AllCalleesKnown &&
12449 "Expected all callees to be known if there are none.");
12450 A.changeToUnreachableAfterManifest(CB);
12451 return ChangeStatus::CHANGED;
12455 if (AllCalleesKnown && AssumedCallees.size() == 1) {
12456 auto *NewCallee = AssumedCallees.front();
12459 NumIndirectCallsPromoted++;
12460 return ChangeStatus::CHANGED;
12467 A.deleteAfterManifest(*CB);
12468 return ChangeStatus::CHANGED;
12478 bool SpecializedForAnyCallees =
false;
12479 bool SpecializedForAllCallees = AllCalleesKnown;
12480 ICmpInst *LastCmp =
nullptr;
12483 for (Function *NewCallee : AssumedCallees) {
12484 if (!
A.shouldSpecializeCallSiteForCallee(*
this, *CB, *NewCallee,
12485 AssumedCallees.size())) {
12486 SkippedAssumedCallees.
push_back(NewCallee);
12487 SpecializedForAllCallees =
false;
12490 SpecializedForAnyCallees =
true;
12496 A.registerManifestAddedBasicBlock(*ThenTI->
getParent());
12497 A.registerManifestAddedBasicBlock(*IP->getParent());
12503 A.registerManifestAddedBasicBlock(*ElseBB);
12505 SplitTI->replaceUsesOfWith(CBBB, ElseBB);
12510 CastInst *RetBC =
nullptr;
12511 CallInst *NewCall =
nullptr;
12516 NumIndirectCallsPromoted++;
12524 auto AttachCalleeMetadata = [&](CallBase &IndirectCB) {
12525 if (!AllCalleesKnown)
12526 return ChangeStatus::UNCHANGED;
12527 MDBuilder MDB(IndirectCB.getContext());
12528 MDNode *Callees = MDB.createCallees(SkippedAssumedCallees);
12529 IndirectCB.setMetadata(LLVMContext::MD_callees, Callees);
12530 return ChangeStatus::CHANGED;
12533 if (!SpecializedForAnyCallees)
12534 return AttachCalleeMetadata(*CB);
12537 if (SpecializedForAllCallees) {
12540 new UnreachableInst(IP->getContext(), IP);
12541 IP->eraseFromParent();
12544 CBClone->setName(CB->
getName());
12545 CBClone->insertBefore(*IP->getParent(), IP);
12546 NewCalls.
push_back({CBClone,
nullptr});
12547 AttachCalleeMetadata(*CBClone);
12554 CB->
getParent()->getFirstInsertionPt());
12555 for (
auto &It : NewCalls) {
12556 CallBase *NewCall = It.first;
12557 Instruction *CallRet = It.second ? It.second : It.first;
12569 A.deleteAfterManifest(*CB);
12570 Changed = ChangeStatus::CHANGED;
12576 const std::string getAsStr(Attributor *
A)
const override {
12577 return std::string(AllCalleesKnown ?
"eliminate" :
"specialize") +
12578 " indirect call site with " + std::to_string(AssumedCallees.size()) +
12582 void trackStatistics()
const override {
12583 if (AllCalleesKnown) {
12585 Eliminated, CallSites,
12586 "Number of indirect call sites eliminated via specialization")
12589 "Number of indirect call sites specialized")
12593 bool foreachCallee(function_ref<
bool(Function *)> CB)
const override {
12594 return isValidState() && AllCalleesKnown &&
all_of(AssumedCallees, CB);
12599 DenseMap<Function *, std::optional<bool>> FilterResults;
12603 SmallSetVector<Function *, 4> PotentialCallees;
12607 SmallSetVector<Function *, 4> AssumedCallees;
12611 bool AllCalleesKnown =
true;
12618struct AAInvariantLoadPointerImpl
12619 :
public StateWrapper<BitIntegerState<uint8_t, 15>,
12620 AAInvariantLoadPointer> {
12624 IS_NOALIAS = 1 << 0,
12627 IS_NOEFFECT = 1 << 1,
12629 IS_LOCALLY_INVARIANT = 1 << 2,
12631 IS_LOCALLY_CONSTRAINED = 1 << 3,
12633 IS_BEST_STATE = IS_NOALIAS | IS_NOEFFECT | IS_LOCALLY_INVARIANT |
12634 IS_LOCALLY_CONSTRAINED,
12636 static_assert(getBestState() == IS_BEST_STATE,
"Unexpected best state");
12639 StateWrapper<BitIntegerState<uint8_t, 15>, AAInvariantLoadPointer>;
12643 AAInvariantLoadPointerImpl(
const IRPosition &IRP, Attributor &
A)
12646 bool isKnownInvariant()
const final {
12647 return isKnownLocallyInvariant() && isKnown(IS_LOCALLY_CONSTRAINED);
12650 bool isKnownLocallyInvariant()
const final {
12651 if (isKnown(IS_LOCALLY_INVARIANT))
12653 return isKnown(IS_NOALIAS | IS_NOEFFECT);
12656 bool isAssumedInvariant()
const final {
12657 return isAssumedLocallyInvariant() && isAssumed(IS_LOCALLY_CONSTRAINED);
12660 bool isAssumedLocallyInvariant()
const final {
12661 if (isAssumed(IS_LOCALLY_INVARIANT))
12663 return isAssumed(IS_NOALIAS | IS_NOEFFECT);
12670 if (requiresNoAlias() && !isAssumed(IS_NOALIAS))
12671 return indicatePessimisticFixpoint();
12675 Changed |= updateLocalInvariance(
A);
12681 if (!isKnownInvariant())
12682 return ChangeStatus::UNCHANGED;
12685 const Value *
Ptr = &getAssociatedValue();
12686 const auto TagInvariantLoads = [&](
const Use &
U,
bool &) {
12687 if (
U.get() !=
Ptr)
12695 if (!
A.isRunOn(
I->getFunction()))
12698 if (
I->hasMetadata(LLVMContext::MD_invariant_load))
12702 LI->setMetadata(LLVMContext::MD_invariant_load,
12704 Changed = ChangeStatus::CHANGED;
12709 (void)
A.checkForAllUses(TagInvariantLoads, *
this, *
Ptr);
12714 const std::string getAsStr(Attributor *)
const override {
12715 if (isKnownInvariant())
12716 return "load-invariant pointer";
12717 return "non-invariant pointer";
12721 void trackStatistics()
const override {}
12725 bool requiresNoAlias()
const {
12726 switch (getPositionKind()) {
12732 case IRP_CALL_SITE:
12734 case IRP_CALL_SITE_RETURNED: {
12739 case IRP_ARGUMENT: {
12740 const Function *
F = getAssociatedFunction();
12741 assert(
F &&
"no associated function for argument");
12747 bool isExternal()
const {
12748 const Function *
F = getAssociatedFunction();
12752 getPositionKind() != IRP_CALL_SITE_RETURNED;
12756 if (isKnown(IS_NOALIAS) || !isAssumed(IS_NOALIAS))
12757 return ChangeStatus::UNCHANGED;
12760 if (
const auto *ANoAlias =
A.getOrCreateAAFor<AANoAlias>(
12761 getIRPosition(),
this, DepClassTy::REQUIRED)) {
12762 if (ANoAlias->isKnownNoAlias()) {
12763 addKnownBits(IS_NOALIAS);
12764 return ChangeStatus::CHANGED;
12767 if (!ANoAlias->isAssumedNoAlias()) {
12768 removeAssumedBits(IS_NOALIAS);
12769 return ChangeStatus::CHANGED;
12772 return ChangeStatus::UNCHANGED;
12777 if (
const Argument *Arg = getAssociatedArgument()) {
12779 addKnownBits(IS_NOALIAS);
12780 return ChangeStatus::UNCHANGED;
12785 removeAssumedBits(IS_NOALIAS);
12786 return ChangeStatus::CHANGED;
12789 return ChangeStatus::UNCHANGED;
12793 if (isKnown(IS_NOEFFECT) || !isAssumed(IS_NOEFFECT))
12794 return ChangeStatus::UNCHANGED;
12796 if (!getAssociatedFunction())
12797 return indicatePessimisticFixpoint();
12800 return indicatePessimisticFixpoint();
12802 const auto HasNoEffectLoads = [&](
const Use &
U,
bool &) {
12804 return !LI || !LI->mayHaveSideEffects();
12806 if (!
A.checkForAllUses(HasNoEffectLoads, *
this, getAssociatedValue()))
12807 return indicatePessimisticFixpoint();
12809 if (
const auto *AMemoryBehavior =
A.getOrCreateAAFor<AAMemoryBehavior>(
12810 getIRPosition(),
this, DepClassTy::REQUIRED)) {
12813 if (!AMemoryBehavior->isAssumedReadOnly())
12814 return indicatePessimisticFixpoint();
12816 if (AMemoryBehavior->isKnownReadOnly()) {
12817 addKnownBits(IS_NOEFFECT);
12818 return ChangeStatus::UNCHANGED;
12821 return ChangeStatus::UNCHANGED;
12824 if (
const Argument *Arg = getAssociatedArgument()) {
12826 addKnownBits(IS_NOEFFECT);
12827 return ChangeStatus::UNCHANGED;
12832 return indicatePessimisticFixpoint();
12835 return ChangeStatus::UNCHANGED;
12839 if (isKnown(IS_LOCALLY_INVARIANT) || !isAssumed(IS_LOCALLY_INVARIANT))
12840 return ChangeStatus::UNCHANGED;
12843 const auto *AUO =
A.getOrCreateAAFor<AAUnderlyingObjects>(
12844 getIRPosition(),
this, DepClassTy::REQUIRED);
12846 return ChangeStatus::UNCHANGED;
12848 bool UsedAssumedInformation =
false;
12849 const auto IsLocallyInvariantLoadIfPointer = [&](
const Value &
V) {
12850 if (!
V.getType()->isPointerTy())
12852 const auto *IsInvariantLoadPointer =
12854 DepClassTy::REQUIRED);
12856 if (!IsInvariantLoadPointer)
12859 if (IsInvariantLoadPointer->isKnownLocallyInvariant())
12861 if (!IsInvariantLoadPointer->isAssumedLocallyInvariant())
12864 UsedAssumedInformation =
true;
12867 if (!AUO->forallUnderlyingObjects(IsLocallyInvariantLoadIfPointer))
12868 return indicatePessimisticFixpoint();
12874 if (!IsLocallyInvariantLoadIfPointer(*Arg))
12875 return indicatePessimisticFixpoint();
12880 if (!UsedAssumedInformation) {
12882 addKnownBits(IS_LOCALLY_INVARIANT);
12883 return ChangeStatus::CHANGED;
12886 return ChangeStatus::UNCHANGED;
12890struct AAInvariantLoadPointerFloating final : AAInvariantLoadPointerImpl {
12891 AAInvariantLoadPointerFloating(
const IRPosition &IRP, Attributor &
A)
12892 : AAInvariantLoadPointerImpl(IRP,
A) {}
12895struct AAInvariantLoadPointerReturned final : AAInvariantLoadPointerImpl {
12896 AAInvariantLoadPointerReturned(
const IRPosition &IRP, Attributor &
A)
12897 : AAInvariantLoadPointerImpl(IRP,
A) {}
12900 removeAssumedBits(IS_LOCALLY_CONSTRAINED);
12904struct AAInvariantLoadPointerCallSiteReturned final
12905 : AAInvariantLoadPointerImpl {
12906 AAInvariantLoadPointerCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
12907 : AAInvariantLoadPointerImpl(IRP,
A) {}
12910 const Function *
F = getAssociatedFunction();
12911 assert(
F &&
"no associated function for return from call");
12913 if (!
F->isDeclaration() && !
F->isIntrinsic())
12914 return AAInvariantLoadPointerImpl::initialize(
A);
12919 return AAInvariantLoadPointerImpl::initialize(
A);
12921 if (
F->onlyReadsMemory() &&
F->hasNoSync())
12922 return AAInvariantLoadPointerImpl::initialize(
A);
12926 indicatePessimisticFixpoint();
12930struct AAInvariantLoadPointerArgument final : AAInvariantLoadPointerImpl {
12931 AAInvariantLoadPointerArgument(
const IRPosition &IRP, Attributor &
A)
12932 : AAInvariantLoadPointerImpl(IRP,
A) {}
12935 const Function *
F = getAssociatedFunction();
12936 assert(
F &&
"no associated function for argument");
12939 addKnownBits(IS_LOCALLY_CONSTRAINED);
12943 if (!
F->hasLocalLinkage())
12944 removeAssumedBits(IS_LOCALLY_CONSTRAINED);
12948struct AAInvariantLoadPointerCallSiteArgument final
12949 : AAInvariantLoadPointerImpl {
12950 AAInvariantLoadPointerCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
12951 : AAInvariantLoadPointerImpl(IRP,
A) {}
12958template <
typename InstType>
12959static bool makeChange(Attributor &
A, InstType *MemInst,
const Use &U,
12960 Value *OriginalValue, PointerType *NewPtrTy,
12961 bool UseOriginalValue) {
12962 if (
U.getOperandNo() != InstType::getPointerOperandIndex())
12965 if (MemInst->isVolatile()) {
12966 auto *
TTI =
A.getInfoCache().getAnalysisResultForFunction<TargetIRAnalysis>(
12967 *MemInst->getFunction());
12968 unsigned NewAS = NewPtrTy->getPointerAddressSpace();
12973 if (UseOriginalValue) {
12974 A.changeUseAfterManifest(
const_cast<Use &
>(U), *OriginalValue);
12978 Instruction *CastInst =
new AddrSpaceCastInst(OriginalValue, NewPtrTy);
12980 A.changeUseAfterManifest(
const_cast<Use &
>(U), *CastInst);
12984struct AAAddressSpaceImpl :
public AAAddressSpace {
12985 AAAddressSpaceImpl(
const IRPosition &IRP, Attributor &
A)
12986 : AAAddressSpace(IRP,
A) {}
12989 assert(isValidState() &&
"the AA is invalid");
12990 return AssumedAddressSpace;
12995 assert(getAssociatedType()->isPtrOrPtrVectorTy() &&
12996 "Associated value is not a pointer");
12998 if (!
A.getInfoCache().getFlatAddressSpace().has_value()) {
12999 indicatePessimisticFixpoint();
13003 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13004 unsigned AS = getAssociatedType()->getPointerAddressSpace();
13005 if (AS != FlatAS) {
13006 [[maybe_unused]]
bool R = takeAddressSpace(AS);
13007 assert(R &&
"The take should happen");
13008 indicateOptimisticFixpoint();
13013 uint32_t OldAddressSpace = AssumedAddressSpace;
13014 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13016 auto CheckAddressSpace = [&](
Value &Obj) {
13022 unsigned ObjAS = Obj.getType()->getPointerAddressSpace();
13023 if (ObjAS != FlatAS)
13024 return takeAddressSpace(ObjAS);
13038 A.getInfoCache().getAnalysisResultForFunction<TargetIRAnalysis>(*F);
13040 if (AssumedAS != ~0U)
13041 return takeAddressSpace(AssumedAS);
13045 return takeAddressSpace(FlatAS);
13048 auto *AUO =
A.getOrCreateAAFor<AAUnderlyingObjects>(getIRPosition(),
this,
13049 DepClassTy::REQUIRED);
13050 if (!AUO->forallUnderlyingObjects(CheckAddressSpace))
13051 return indicatePessimisticFixpoint();
13053 return OldAddressSpace == AssumedAddressSpace ? ChangeStatus::UNCHANGED
13054 : ChangeStatus::CHANGED;
13061 if (NewAS == InvalidAddressSpace ||
13063 return ChangeStatus::UNCHANGED;
13065 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13067 Value *AssociatedValue = &getAssociatedValue();
13068 Value *OriginalValue = peelAddrspacecast(AssociatedValue, FlatAS);
13071 PointerType::get(getAssociatedType()->
getContext(), NewAS);
13072 bool UseOriginalValue =
13077 auto Pred = [&](
const Use &
U,
bool &) {
13078 if (
U.get() != AssociatedValue)
13089 makeChange(
A, LI, U, OriginalValue, NewPtrTy, UseOriginalValue);
13092 makeChange(
A, SI, U, OriginalValue, NewPtrTy, UseOriginalValue);
13095 makeChange(
A, RMW, U, OriginalValue, NewPtrTy, UseOriginalValue);
13098 makeChange(
A, CmpX, U, OriginalValue, NewPtrTy, UseOriginalValue);
13105 (void)
A.checkForAllUses(Pred, *
this, getAssociatedValue(),
13108 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
13112 const std::string getAsStr(Attributor *
A)
const override {
13113 if (!isValidState())
13114 return "addrspace(<invalid>)";
13115 return "addrspace(" +
13116 (AssumedAddressSpace == InvalidAddressSpace
13118 : std::to_string(AssumedAddressSpace)) +
13123 uint32_t AssumedAddressSpace = InvalidAddressSpace;
13125 bool takeAddressSpace(uint32_t AS) {
13126 if (AssumedAddressSpace == InvalidAddressSpace) {
13127 AssumedAddressSpace = AS;
13130 return AssumedAddressSpace == AS;
13133 static Value *peelAddrspacecast(
Value *V,
unsigned FlatAS) {
13135 assert(
I->getSrcAddressSpace() != FlatAS &&
13136 "there should not be flat AS -> non-flat AS");
13137 return I->getPointerOperand();
13140 if (
C->getOpcode() == Instruction::AddrSpaceCast) {
13141 assert(
C->getOperand(0)->getType()->getPointerAddressSpace() !=
13143 "there should not be flat AS -> non-flat AS X");
13144 return C->getOperand(0);
13150struct AAAddressSpaceFloating final : AAAddressSpaceImpl {
13151 AAAddressSpaceFloating(
const IRPosition &IRP, Attributor &
A)
13152 : AAAddressSpaceImpl(IRP,
A) {}
13154 void trackStatistics()
const override {
13159struct AAAddressSpaceReturned final : AAAddressSpaceImpl {
13160 AAAddressSpaceReturned(
const IRPosition &IRP, Attributor &
A)
13161 : AAAddressSpaceImpl(IRP,
A) {}
13167 (void)indicatePessimisticFixpoint();
13170 void trackStatistics()
const override {
13175struct AAAddressSpaceCallSiteReturned final : AAAddressSpaceImpl {
13176 AAAddressSpaceCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
13177 : AAAddressSpaceImpl(IRP,
A) {}
13179 void trackStatistics()
const override {
13184struct AAAddressSpaceArgument final : AAAddressSpaceImpl {
13185 AAAddressSpaceArgument(
const IRPosition &IRP, Attributor &
A)
13186 : AAAddressSpaceImpl(IRP,
A) {}
13191struct AAAddressSpaceCallSiteArgument final : AAAddressSpaceImpl {
13192 AAAddressSpaceCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
13193 : AAAddressSpaceImpl(IRP,
A) {}
13199 (void)indicatePessimisticFixpoint();
13202 void trackStatistics()
const override {
13217struct AANoAliasAddrSpaceImpl :
public AANoAliasAddrSpace {
13218 AANoAliasAddrSpaceImpl(
const IRPosition &IRP, Attributor &
A)
13219 : AANoAliasAddrSpace(IRP,
A) {}
13222 assert(getAssociatedType()->isPtrOrPtrVectorTy() &&
13223 "Associated value is not a pointer");
13227 std::optional<unsigned> FlatAS =
A.getInfoCache().getFlatAddressSpace();
13228 if (!FlatAS.has_value()) {
13229 indicatePessimisticFixpoint();
13235 unsigned AS = getAssociatedType()->getPointerAddressSpace();
13236 if (AS != *FlatAS) {
13238 indicateOptimisticFixpoint();
13243 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13244 uint32_t OldAssumed = getAssumed();
13246 auto CheckAddressSpace = [&](
Value &Obj) {
13250 unsigned AS = Obj.getType()->getPointerAddressSpace();
13254 removeAS(Obj.getType()->getPointerAddressSpace());
13258 const AAUnderlyingObjects *AUO =
A.getOrCreateAAFor<AAUnderlyingObjects>(
13259 getIRPosition(),
this, DepClassTy::REQUIRED);
13261 return indicatePessimisticFixpoint();
13263 return OldAssumed == getAssumed() ? ChangeStatus::UNCHANGED
13264 : ChangeStatus::CHANGED;
13269 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13271 unsigned AS = getAssociatedType()->getPointerAddressSpace();
13272 if (AS != FlatAS ||
Map.empty())
13273 return ChangeStatus::UNCHANGED;
13275 LLVMContext &Ctx = getAssociatedValue().getContext();
13276 MDNode *NoAliasASNode =
nullptr;
13277 MDBuilder MDB(Ctx);
13279 for (RangeMap::const_iterator
I =
Map.begin();
I !=
Map.end();
I++) {
13282 unsigned Upper =
I.stop();
13283 unsigned Lower =
I.start();
13284 if (!NoAliasASNode) {
13285 NoAliasASNode = MDB.createRange(APInt(32,
Lower), APInt(32,
Upper + 1));
13288 MDNode *ASRange = MDB.createRange(APInt(32,
Lower), APInt(32,
Upper + 1));
13292 Value *AssociatedValue = &getAssociatedValue();
13295 auto AddNoAliasAttr = [&](
const Use &
U,
bool &) {
13296 if (
U.get() != AssociatedValue)
13299 if (!Inst || Inst->
hasMetadata(LLVMContext::MD_noalias_addrspace))
13306 Inst->
setMetadata(LLVMContext::MD_noalias_addrspace, NoAliasASNode);
13310 (void)
A.checkForAllUses(AddNoAliasAttr, *
this, *AssociatedValue,
13312 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
13316 const std::string getAsStr(Attributor *
A)
const override {
13317 if (!isValidState())
13318 return "<invalid>";
13320 raw_string_ostream OS(Str);
13321 OS <<
"CanNotBeAddrSpace(";
13322 for (RangeMap::const_iterator
I =
Map.begin();
I !=
Map.end();
I++) {
13323 unsigned Upper =
I.stop();
13324 unsigned Lower =
I.start();
13325 OS <<
' ' <<
'[' <<
Upper <<
',' <<
Lower + 1 <<
')';
13332 void removeAS(
unsigned AS) {
13333 RangeMap::iterator
I =
Map.find(AS);
13335 if (
I !=
Map.end()) {
13336 unsigned Upper =
I.stop();
13337 unsigned Lower =
I.start();
13341 if (AS != ~((
unsigned)0) && AS + 1 <=
Upper)
13343 if (AS != 0 &&
Lower <= AS - 1)
13348 void resetASRanges(Attributor &
A) {
13350 Map.insert(0,
A.getInfoCache().getMaxAddrSpace(),
true);
13354struct AANoAliasAddrSpaceFloating final : AANoAliasAddrSpaceImpl {
13355 AANoAliasAddrSpaceFloating(
const IRPosition &IRP, Attributor &
A)
13356 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13358 void trackStatistics()
const override {
13363struct AANoAliasAddrSpaceReturned final : AANoAliasAddrSpaceImpl {
13364 AANoAliasAddrSpaceReturned(
const IRPosition &IRP, Attributor &
A)
13365 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13367 void trackStatistics()
const override {
13372struct AANoAliasAddrSpaceCallSiteReturned final : AANoAliasAddrSpaceImpl {
13373 AANoAliasAddrSpaceCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
13374 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13376 void trackStatistics()
const override {
13381struct AANoAliasAddrSpaceArgument final : AANoAliasAddrSpaceImpl {
13382 AANoAliasAddrSpaceArgument(
const IRPosition &IRP, Attributor &
A)
13383 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13385 void trackStatistics()
const override {
13390struct AANoAliasAddrSpaceCallSiteArgument final : AANoAliasAddrSpaceImpl {
13391 AANoAliasAddrSpaceCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
13392 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13394 void trackStatistics()
const override {
13401struct AAAllocationInfoImpl :
public AAAllocationInfo {
13402 AAAllocationInfoImpl(
const IRPosition &IRP, Attributor &
A)
13403 : AAAllocationInfo(IRP,
A) {}
13405 std::optional<TypeSize> getAllocatedSize()
const override {
13406 assert(isValidState() &&
"the AA is invalid");
13407 return AssumedAllocatedSize;
13410 std::optional<TypeSize> findInitialAllocationSize(Instruction *
I,
13411 const DataLayout &
DL) {
13414 switch (
I->getOpcode()) {
13415 case Instruction::Alloca: {
13420 return std::nullopt;
13426 const IRPosition &IRP = getIRPosition();
13431 return indicatePessimisticFixpoint();
13433 bool IsKnownNoCapture;
13435 A,
this, IRP, DepClassTy::OPTIONAL, IsKnownNoCapture))
13436 return indicatePessimisticFixpoint();
13438 const AAPointerInfo *PI =
13439 A.getOrCreateAAFor<AAPointerInfo>(IRP, *
this, DepClassTy::REQUIRED);
13442 return indicatePessimisticFixpoint();
13445 return indicatePessimisticFixpoint();
13447 const DataLayout &
DL =
A.getDataLayout();
13448 const auto AllocationSize = findInitialAllocationSize(
I,
DL);
13451 if (!AllocationSize)
13452 return indicatePessimisticFixpoint();
13456 if (*AllocationSize == 0)
13457 return indicatePessimisticFixpoint();
13463 return indicatePessimisticFixpoint();
13465 if (BinSize == 0) {
13466 auto NewAllocationSize = std::make_optional<TypeSize>(0,
false);
13467 if (!changeAllocationSize(NewAllocationSize))
13468 return ChangeStatus::UNCHANGED;
13469 return ChangeStatus::CHANGED;
13473 const auto &It = PI->
begin();
13476 if (It->first.Offset != 0)
13477 return indicatePessimisticFixpoint();
13479 uint64_t SizeOfBin = It->first.Offset + It->first.Size;
13481 if (SizeOfBin >= *AllocationSize)
13482 return indicatePessimisticFixpoint();
13484 auto NewAllocationSize = std::make_optional<TypeSize>(SizeOfBin * 8,
false);
13486 if (!changeAllocationSize(NewAllocationSize))
13487 return ChangeStatus::UNCHANGED;
13489 return ChangeStatus::CHANGED;
13495 assert(isValidState() &&
13496 "Manifest should only be called if the state is valid.");
13500 auto FixedAllocatedSizeInBits = getAllocatedSize()->getFixedValue();
13502 unsigned long NumBytesToAllocate = (FixedAllocatedSizeInBits + 7) / 8;
13504 switch (
I->getOpcode()) {
13506 case Instruction::Alloca: {
13510 Type *CharType = Type::getInt8Ty(
I->getContext());
13512 auto *NumBytesToValue =
13513 ConstantInt::get(
I->getContext(), APInt(32, NumBytesToAllocate));
13516 insertPt = std::next(insertPt);
13517 AllocaInst *NewAllocaInst =
13522 return ChangeStatus::CHANGED;
13530 return ChangeStatus::UNCHANGED;
13534 const std::string getAsStr(Attributor *
A)
const override {
13535 if (!isValidState())
13536 return "allocationinfo(<invalid>)";
13537 return "allocationinfo(" +
13538 (AssumedAllocatedSize == HasNoAllocationSize
13540 : std::to_string(AssumedAllocatedSize->getFixedValue())) +
13545 std::optional<TypeSize> AssumedAllocatedSize = HasNoAllocationSize;
13549 bool changeAllocationSize(std::optional<TypeSize>
Size) {
13550 if (AssumedAllocatedSize == HasNoAllocationSize ||
13551 AssumedAllocatedSize !=
Size) {
13552 AssumedAllocatedSize =
Size;
13559struct AAAllocationInfoFloating : AAAllocationInfoImpl {
13560 AAAllocationInfoFloating(
const IRPosition &IRP, Attributor &
A)
13561 : AAAllocationInfoImpl(IRP,
A) {}
13563 void trackStatistics()
const override {
13568struct AAAllocationInfoReturned : AAAllocationInfoImpl {
13569 AAAllocationInfoReturned(
const IRPosition &IRP, Attributor &
A)
13570 : AAAllocationInfoImpl(IRP,
A) {}
13576 (void)indicatePessimisticFixpoint();
13579 void trackStatistics()
const override {
13584struct AAAllocationInfoCallSiteReturned : AAAllocationInfoImpl {
13585 AAAllocationInfoCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
13586 : AAAllocationInfoImpl(IRP,
A) {}
13588 void trackStatistics()
const override {
13593struct AAAllocationInfoArgument : AAAllocationInfoImpl {
13594 AAAllocationInfoArgument(
const IRPosition &IRP, Attributor &
A)
13595 : AAAllocationInfoImpl(IRP,
A) {}
13597 void trackStatistics()
const override {
13602struct AAAllocationInfoCallSiteArgument : AAAllocationInfoImpl {
13603 AAAllocationInfoCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
13604 : AAAllocationInfoImpl(IRP,
A) {}
13609 (void)indicatePessimisticFixpoint();
13612 void trackStatistics()
const override {
13661#define SWITCH_PK_INV(CLASS, PK, POS_NAME) \
13662 case IRPosition::PK: \
13663 llvm_unreachable("Cannot create " #CLASS " for a " POS_NAME " position!");
13665#define SWITCH_PK_CREATE(CLASS, IRP, PK, SUFFIX) \
13666 case IRPosition::PK: \
13667 AA = new (A.Allocator) CLASS##SUFFIX(IRP, A); \
13671#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13672 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13673 CLASS *AA = nullptr; \
13674 switch (IRP.getPositionKind()) { \
13675 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13676 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
13677 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
13678 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13679 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
13680 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13681 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13682 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13687#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13688 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13689 CLASS *AA = nullptr; \
13690 switch (IRP.getPositionKind()) { \
13691 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13692 SWITCH_PK_INV(CLASS, IRP_FUNCTION, "function") \
13693 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13694 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13695 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13696 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
13697 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13698 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13703#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS) \
13704 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13705 CLASS *AA = nullptr; \
13706 switch (IRP.getPositionKind()) { \
13707 SWITCH_PK_CREATE(CLASS, IRP, POS, SUFFIX) \
13709 llvm_unreachable("Cannot create " #CLASS " for position otherthan " #POS \
13715#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13716 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13717 CLASS *AA = nullptr; \
13718 switch (IRP.getPositionKind()) { \
13719 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13720 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13721 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13722 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13723 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13724 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
13725 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13726 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13731#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13732 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13733 CLASS *AA = nullptr; \
13734 switch (IRP.getPositionKind()) { \
13735 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13736 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
13737 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
13738 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13739 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
13740 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13741 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13742 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13747#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13748 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13749 CLASS *AA = nullptr; \
13750 switch (IRP.getPositionKind()) { \
13751 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13752 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13753 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13754 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13755 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13756 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13757 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13758 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13810#undef CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION
13811#undef CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION
13812#undef CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION
13813#undef CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION
13814#undef CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION
13815#undef CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION
13816#undef SWITCH_PK_CREATE
13817#undef SWITCH_PK_INV
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
AMDGPU Register Bank Select
This file implements a class to represent arbitrary precision integral constant values and operations...
ReachingDefInfo InstSet & ToRemove
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Function Alias Analysis false
This file contains the simple types necessary to represent the attributes associated with functions a...
#define STATS_DECLTRACK(NAME, TYPE, MSG)
static std::optional< Constant * > askForAssumedConstant(Attributor &A, const AbstractAttribute &QueryingAA, const IRPosition &IRP, Type &Ty)
static cl::opt< unsigned, true > MaxPotentialValues("attributor-max-potential-values", cl::Hidden, cl::desc("Maximum number of potential values to be " "tracked for each position."), cl::location(llvm::PotentialConstantIntValuesState::MaxPotentialValues), cl::init(7))
static void clampReturnedValueStates(Attributor &A, const AAType &QueryingAA, StateType &S, const IRPosition::CallBaseContext *CBContext=nullptr)
Clamp the information known for all returned values of a function (identified by QueryingAA) into S.
#define STATS_DECLTRACK_FN_ATTR(NAME)
#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxPotentialValuesIterations("attributor-max-potential-values-iterations", cl::Hidden, cl::desc("Maximum number of iterations we keep dismantling potential values."), cl::init(64))
#define STATS_DECLTRACK_CS_ATTR(NAME)
#define PIPE_OPERATOR(CLASS)
static bool mayBeInCycle(const CycleInfo *CI, const Instruction *I, bool HeaderOnly, Cycle **CPtr=nullptr)
#define STATS_DECLTRACK_ARG_ATTR(NAME)
static const Value * stripAndAccumulateOffsets(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Val, const DataLayout &DL, APInt &Offset, bool GetMinOffset, bool AllowNonInbounds, bool UseAssumed=false)
#define STATS_DECLTRACK_CSRET_ATTR(NAME)
static cl::opt< bool > ManifestInternal("attributor-manifest-internal", cl::Hidden, cl::desc("Manifest Attributor internal string attributes."), cl::init(false))
static Value * constructPointer(Value *Ptr, int64_t Offset, IRBuilder< NoFolder > &IRB)
Helper function to create a pointer based on Ptr, and advanced by Offset bytes.
#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define BUILD_STAT_NAME(NAME, TYPE)
static bool isDenselyPacked(Type *Ty, const DataLayout &DL)
Checks if a type could have padding bytes.
#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static const Value * getMinimalBaseOfPointer(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Ptr, int64_t &BytesOffset, const DataLayout &DL, bool AllowNonInbounds=false)
#define STATS_DECLTRACK_FNRET_ATTR(NAME)
#define STATS_DECLTRACK_CSARG_ATTR(NAME)
#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS)
#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxHeapToStackSize("max-heap-to-stack-size", cl::init(128), cl::Hidden)
#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define STATS_DECLTRACK_FLOATING_ATTR(NAME)
#define STATS_DECL(NAME, TYPE, MSG)
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static bool isReachableImpl(SmallVectorImpl< BasicBlock * > &Worklist, const StopSetT &StopSet, const SmallPtrSetImpl< BasicBlock * > *ExclusionSet, const DominatorTree *DT, const LoopInfo *LI)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file declares an analysis pass that computes CycleInfo for LLVM IR, specialized from GenericCycl...
DXIL Forward Handle Accesses
This file defines DenseMapInfo traits for DenseMap.
Machine Check Debug Module
This file implements a map that provides insertion order iteration.
static unsigned getAddressSpace(const Value *V, unsigned MaxLookup)
ConstantRange Range(APInt(BitWidth, Low), APInt(BitWidth, High))
uint64_t IntrinsicInst * II
static StringRef getName(Value *V)
static cl::opt< RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode > Mode("regalloc-enable-advisor", cl::Hidden, cl::init(RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode::Default), cl::desc("Enable regalloc advisor mode"), cl::values(clEnumValN(RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode::Default, "default", "Default"), clEnumValN(RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode::Release, "release", "precompiled"), clEnumValN(RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode::Development, "development", "for training")))
dot regions Print regions of function to dot true view regions View regions of function(with no function bodies)"
Remove Loads Into Fake Uses
This builds on the llvm/ADT/GraphTraits.h file to find the strongly connected components (SCCs) of a ...
std::pair< BasicBlock *, BasicBlock * > Edge
BaseType
A given derived pointer can have multiple base pointers through phi/selects.
This file defines generic set operations that may be used on set's of different types,...
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static SymbolRef::Type getType(const Symbol *Sym)
static void initialize(TargetLibraryInfoImpl &TLI, const Triple &T, ArrayRef< StringLiteral > StandardNames)
Initialize the set of available library functions based on the specified target triple.
static unsigned getBitWidth(Type *Ty, const DataLayout &DL)
Returns the bitwidth of the given scalar or pointer type.
static unsigned getSize(unsigned Kind)
LLVM_ABI AACallGraphNode * operator*() const
bool isNoAlias(const MemoryLocation &LocA, const MemoryLocation &LocB)
A trivial helper function to check to see if the specified pointers are no-alias.
Class for arbitrary precision integers.
int64_t getSExtValue() const
Get sign extended value.
CallBase * getInstruction() const
Return the underlying instruction.
bool isCallbackCall() const
Return true if this ACS represents a callback call.
bool isDirectCall() const
Return true if this ACS represents a direct call.
static LLVM_ABI void getCallbackUses(const CallBase &CB, SmallVectorImpl< const Use * > &CallbackUses)
Add operand uses of CB that represent callback uses into CallbackUses.
int getCallArgOperandNo(Argument &Arg) const
Return the operand index of the underlying instruction associated with Arg.
Align getAlign() const
Return the alignment of the memory that is being allocated by the instruction.
unsigned getAddressSpace() const
Return the address space for the allocation.
LLVM_ABI std::optional< TypeSize > getAllocationSize(const DataLayout &DL) const
Get allocation size in bytes.
This class represents an incoming formal argument to a Function.
LLVM_ABI bool hasNoAliasAttr() const
Return true if this argument has the noalias attribute.
LLVM_ABI bool onlyReadsMemory() const
Return true if this argument has the readonly or readnone attribute.
LLVM_ABI bool hasPointeeInMemoryValueAttr() const
Return true if this argument has the byval, sret, inalloca, preallocated, or byref attribute.
LLVM_ABI bool hasReturnedAttr() const
Return true if this argument has the returned attribute.
LLVM_ABI bool hasByValAttr() const
Return true if this argument has the byval attribute.
const Function * getParent() const
unsigned getArgNo() const
Return the index of this formal argument in its containing function.
A function analysis which provides an AssumptionCache.
A cache of @llvm.assume calls within a function.
Functions, function parameters, and return types can have attributes to indicate how they should be t...
static LLVM_ABI Attribute get(LLVMContext &Context, AttrKind Kind, uint64_t Val=0)
Return a uniquified Attribute object.
LLVM_ABI FPClassTest getNoFPClass() const
Return the FPClassTest for nofpclass.
LLVM_ABI Attribute::AttrKind getKindAsEnum() const
Return the attribute's kind as an enum (Attribute::AttrKind).
LLVM_ABI MemoryEffects getMemoryEffects() const
Returns memory effects.
AttrKind
This enumeration lists the attributes that can be associated with parameters, function results,...
static LLVM_ABI Attribute getWithCaptureInfo(LLVMContext &Context, CaptureInfo CI)
static bool isEnumAttrKind(AttrKind Kind)
LLVM_ABI CaptureInfo getCaptureInfo() const
Returns information from captures attribute.
LLVM Basic Block Representation.
LLVM_ABI const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
const Function * getParent() const
Return the enclosing method, or null if none.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
const Instruction & front() const
InstListType::iterator iterator
Instruction iterators...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
BinaryOps getOpcode() const
Conditional or Unconditional Branch instruction.
unsigned getNumSuccessors() const
static BranchInst * Create(BasicBlock *IfTrue, InsertPosition InsertBefore=nullptr)
BasicBlock * getSuccessor(unsigned i) const
Value * getCondition() const
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
LLVM_ABI bool isMustTailCall() const
Tests if this call site must be tail call optimized.
LLVM_ABI bool isIndirectCall() const
Return true if the callsite is an indirect call.
bool isCallee(Value::const_user_iterator UI) const
Determine whether the passed iterator points to the callee operand's Use.
Value * getCalledOperand() const
const Use & getCalledOperandUse() const
const Use & getArgOperandUse(unsigned i) const
Wrappers for getting the Use of a call argument.
LLVM_ABI std::optional< ConstantRange > getRange() const
If this return value has a range attribute, return the value range of the argument.
Value * getArgOperand(unsigned i) const
bool isBundleOperand(unsigned Idx) const
Return true if the operand at index Idx is a bundle operand.
bool isConvergent() const
Determine if the invoke is convergent.
FunctionType * getFunctionType() const
LLVM_ABI Intrinsic::ID getIntrinsicID() const
Returns the intrinsic ID of the intrinsic called or Intrinsic::not_intrinsic if the called function i...
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
unsigned getArgOperandNo(const Use *U) const
Given a use for a arg operand, get the arg operand number that corresponds to it.
unsigned arg_size() const
bool isArgOperand(const Use *U) const
LLVM_ABI Function * getCaller()
Helper to get the caller (the parent function).
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
static CaptureInfo none()
Create CaptureInfo that does not capture any components of the pointer.
Instruction::CastOps getOpcode() const
Return the opcode of this CastInst.
LLVM_ABI bool isIntegerCast() const
There are several places where we need to know if a cast instruction only deals with integer source a...
Type * getDestTy() const
Return the destination type, as a convenience.
bool isEquality() const
Determine if this is an equals/not equals predicate.
bool isFalseWhenEqual() const
This is just a convenience.
Predicate
This enumeration lists the possible predicates for CmpInst subclasses.
bool isTrueWhenEqual() const
This is just a convenience.
Predicate getPredicate() const
Return the predicate for this instruction.
static LLVM_ABI Constant * getExtractElement(Constant *Vec, Constant *Idx, Type *OnlyIfReducedTy=nullptr)
static LLVM_ABI ConstantInt * getTrue(LLVMContext &Context)
This class represents a range of values.
const APInt & getLower() const
Return the lower value for this range.
LLVM_ABI bool isFullSet() const
Return true if this set contains all of the elements possible for this data-type.
LLVM_ABI bool isEmptySet() const
Return true if this set contains no members.
bool isSingleElement() const
Return true if this set contains exactly one member.
static LLVM_ABI ConstantRange makeAllowedICmpRegion(CmpInst::Predicate Pred, const ConstantRange &Other)
Produce the smallest range such that all values that may satisfy the given predicate with any value c...
const APInt & getUpper() const
Return the upper value for this range.
A parsed version of the target data layout string in and methods for querying it.
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
bool contains(const_arg_type_t< KeyT > Val) const
Return true if the specified key is in the map, false otherwise.
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Analysis pass which computes a DominatorTree.
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
LLVM_ABI bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
const BasicBlock & getEntryBlock() const
iterator_range< arg_iterator > args()
const Function & getFunction() const
Argument * getArg(unsigned i) const
bool hasFnAttribute(Attribute::AttrKind Kind) const
Return true if the function has the attribute.
CycleT * getCycle(const BlockT *Block) const
Find the innermost cycle containing a given block.
LLVM_ABI bool isDeclaration() const
Return true if the primary definition of this global value is outside of the current translation unit...
bool hasLocalLinkage() const
static LLVM_ABI bool compare(const APInt &LHS, const APInt &RHS, ICmpInst::Predicate Pred)
Return result of LHS Pred RHS comparison.
Value * CreatePtrAdd(Value *Ptr, Value *Offset, const Twine &Name="", GEPNoWrapFlags NW=GEPNoWrapFlags::none())
ConstantInt * getInt64(uint64_t C)
Get a constant 64-bit value.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
LLVM_ABI Instruction * clone() const
Create a copy of 'this' instruction that is identical in all ways except the following:
LLVM_ABI bool isLifetimeStartOrEnd() const LLVM_READONLY
Return true if the instruction is a llvm.lifetime.start or llvm.lifetime.end marker.
bool mayReadOrWriteMemory() const
Return true if this instruction may read or write memory.
LLVM_ABI bool mayWriteToMemory() const LLVM_READONLY
Return true if this instruction may modify memory.
bool hasMetadata() const
Return true if this instruction has any metadata attached to it.
LLVM_ABI void insertBefore(InstListType::iterator InsertPos)
Insert an unlinked instruction into a basic block immediately before the specified position.
LLVM_ABI InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
LLVM_ABI const Function * getFunction() const
Return the function this instruction belongs to.
LLVM_ABI BasicBlock * getSuccessor(unsigned Idx) const LLVM_READONLY
Return the specified successor. This instruction must be a terminator.
LLVM_ABI bool mayHaveSideEffects() const LLVM_READONLY
Return true if the instruction may have side effects.
bool isTerminator() const
LLVM_ABI bool mayReadFromMemory() const LLVM_READONLY
Return true if this instruction may read memory.
LLVM_ABI void setMetadata(unsigned KindID, MDNode *Node)
Set the metadata of the specified kind to the specified node.
unsigned getOpcode() const
Returns a member of one of the enums like Instruction::Add.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
LLVM_ABI const DataLayout & getDataLayout() const
Get the data layout of the module this instruction belongs to.
This is an important class for using LLVM in a threaded context.
ConstantRange getConstantRange(Value *V, Instruction *CxtI, bool UndefAllowed)
Return the ConstantRange constraint that is known to hold for the specified value at the specified in...
LoopT * getLoopFor(const BlockT *BB) const
Return the inner most loop that BB lives in.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
static LLVM_ABI MDNode * getMostGenericRange(MDNode *A, MDNode *B)
static MemoryEffectsBase readOnly()
bool doesNotAccessMemory() const
Whether this function accesses no memory.
static MemoryEffectsBase argMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
static MemoryEffectsBase inaccessibleMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
bool onlyAccessesInaccessibleMem() const
Whether this function only (at most) accesses inaccessible memory.
ModRefInfo getModRef(Location Loc) const
Get ModRefInfo for the given Location.
bool onlyAccessesArgPointees() const
Whether this function only (at most) accesses argument memory.
bool onlyReadsMemory() const
Whether this function only (at most) reads memory.
static MemoryEffectsBase writeOnly()
static MemoryEffectsBase inaccessibleOrArgMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
static MemoryEffectsBase none()
bool onlyAccessesInaccessibleOrArgMem() const
Whether this function only (at most) accesses argument and inaccessible memory.
static MemoryEffectsBase unknown()
static LLVM_ABI std::optional< MemoryLocation > getOrNone(const Instruction *Inst)
static SizeOffsetValue unknown()
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
static LLVM_ABI PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
Value * getReturnValue() const
Convenience accessor. Returns null if there is no return value.
LLVM_ABI const SCEV * getSCEVAtScope(const SCEV *S, const Loop *L)
Return a SCEV expression for the specified value at the specified scope in the program.
LLVM_ABI const SCEV * getSCEV(Value *V)
Return a SCEV expression for the full generality of the specified expression.
LLVM_ABI unsigned getSmallConstantMaxTripCount(const Loop *L, SmallVectorImpl< const SCEVPredicate * > *Predicates=nullptr)
Returns the upper bound of the loop trip count as a normal unsigned value.
ConstantRange getUnsignedRange(const SCEV *S)
Determine the unsigned range for a particular SCEV.
A vector that has set insertion semantics.
size_type size() const
Determine the number of elements in the SetVector.
bool insert(const value_type &X)
Insert a new element into the SetVector.
bool erase(PtrType Ptr)
Remove pointer from the set.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void append(ItTy in_start, ItTy in_end)
Add the specified range to the end of the SmallVector.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Used to lazily calculate structure layout information for a target machine, based on the DataLayout s...
TypeSize getElementOffset(unsigned Idx) const
TypeSize getElementOffsetInBits(unsigned Idx) const
Class to represent struct types.
unsigned getNumElements() const
Random access to the elements.
Type * getElementType(unsigned N) const
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
LLVM_ABI unsigned getIntegerBitWidth() const
bool isPointerTy() const
True if this is an instance of PointerType.
LLVM_ABI unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
bool isPtrOrPtrVectorTy() const
Return true if this is a pointer type or a vector of pointer types.
bool isIntegerTy() const
True if this is an instance of IntegerType.
bool isVoidTy() const
Return true if this is 'void'.
static LLVM_ABI UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
A Use represents the edge between a Value definition and its users.
User * getUser() const
Returns the User that contains this Use.
LLVM_ABI bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
const Use & getOperandUse(unsigned i) const
Value * getOperand(unsigned i) const
unsigned getNumOperands() const
LLVM_ABI bool isDroppable() const
A droppable user is a user for which uses can be dropped without affecting correctness and should be ...
ValueT lookup(const KeyT &Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
static constexpr uint64_t MaximumAlignment
LLVM_ABI void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
LLVM_ABI const Value * stripAndAccumulateConstantOffsets(const DataLayout &DL, APInt &Offset, bool AllowNonInbounds, bool AllowInvariantGroup=false, function_ref< bool(Value &Value, APInt &Offset)> ExternalAnalysis=nullptr, bool LookThroughIntToPtr=false) const
Accumulate the constant offset this value has compared to a base pointer.
LLVM_ABI LLVMContext & getContext() const
All values hold a context through their type.
static constexpr unsigned MaxAlignmentExponent
The maximum alignment for instructions.
iterator_range< use_iterator > uses()
LLVM_ABI StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
constexpr ScalarTy getFixedValue() const
constexpr bool isScalable() const
Returns whether the quantity is scaled by a runtime quantity (vscale).
const ParentTy * getParent() const
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
SetVector< Function * >::iterator I
This class implements an extremely fast bulk output stream that can only output to a stream.
A raw_ostream that writes to an std::string.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
Abstract Attribute helper functions.
LLVM_ABI bool isAssumedReadNone(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readnone.
LLVM_ABI bool isAssumedReadOnly(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readonly.
raw_ostream & operator<<(raw_ostream &OS, const RangeTy &R)
LLVM_ABI std::optional< Value * > combineOptionalValuesInAAValueLatice(const std::optional< Value * > &A, const std::optional< Value * > &B, Type *Ty)
Return the combination of A and B such that the result is a possible value of both.
LLVM_ABI bool isValidAtPosition(const ValueAndContext &VAC, InformationCache &InfoCache)
Return true if the value of VAC is a valid at the position of VAC, that is a constant,...
LLVM_ABI bool isAssumedThreadLocalObject(Attributor &A, Value &Obj, const AbstractAttribute &QueryingAA)
Return true if Obj is assumed to be a thread local object.
LLVM_ABI bool isDynamicallyUnique(Attributor &A, const AbstractAttribute &QueryingAA, const Value &V, bool ForAnalysisOnly=true)
Return true if V is dynamically unique, that is, there are no two "instances" of V at runtime with di...
LLVM_ABI bool getPotentialCopiesOfStoredValue(Attributor &A, StoreInst &SI, SmallSetVector< Value *, 4 > &PotentialCopies, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values of the one stored by SI into PotentialCopies.
SmallPtrSet< Instruction *, 4 > InstExclusionSetTy
LLVM_ABI bool isGPU(const Module &M)
Return true iff M target a GPU (and we can use GPU AS reasoning).
ValueScope
Flags to distinguish intra-procedural queries from potentially inter-procedural queries.
LLVM_ABI bool isValidInScope(const Value &V, const Function *Scope)
Return true if V is a valid value in Scope, that is a constant or an instruction/argument of Scope.
LLVM_ABI bool isPotentiallyReachable(Attributor &A, const Instruction &FromI, const Instruction &ToI, const AbstractAttribute &QueryingAA, const AA::InstExclusionSetTy *ExclusionSet=nullptr, std::function< bool(const Function &F)> GoBackwardsCB=nullptr)
Return true if ToI is potentially reachable from FromI without running into any instruction in Exclus...
LLVM_ABI bool isNoSyncInst(Attributor &A, const Instruction &I, const AbstractAttribute &QueryingAA)
Return true if I is a nosync instruction.
bool hasAssumedIRAttr(Attributor &A, const AbstractAttribute *QueryingAA, const IRPosition &IRP, DepClassTy DepClass, bool &IsKnown, bool IgnoreSubsumingPositions=false, const AAType **AAPtr=nullptr)
Helper to avoid creating an AA for IR Attributes that might already be set.
LLVM_ABI bool getPotentiallyLoadedValues(Attributor &A, LoadInst &LI, SmallSetVector< Value *, 4 > &PotentialValues, SmallSetVector< Instruction *, 4 > &PotentialValueOrigins, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values LI could read into PotentialValues.
LLVM_ABI Value * getWithType(Value &V, Type &Ty)
Try to convert V to type Ty without introducing new instructions.
constexpr char Align[]
Key for Kernel::Arg::Metadata::mAlign.
constexpr char Attrs[]
Key for Kernel::Metadata::mAttrs.
@ C
The default llvm calling convention, compatible with C.
@ BasicBlock
Various leaf nodes.
@ Unsupported
This operation is completely unsupported on the target.
@ Undef
Value of the register doesn't matter.
@ SingleThread
Synchronized with respect to signal handlers executing in the same thread.
@ CE
Windows NT (Windows on ARM)
@ Valid
The data is already valid.
initializer< Ty > init(const Ty &Val)
LocationClass< Ty > location(Ty &L)
unsigned combineHashValue(unsigned a, unsigned b)
Simplistic combination of 32-bit hash values into 32-bit hash values.
ElementType
The element type of an SRV or UAV resource.
Scope
Defines the scope in which this symbol should be visible: Default – Visible in the public interface o...
std::enable_if_t< detail::IsValidPointer< X, Y >::value, X * > dyn_extract_or_null(Y &&MD)
Extract a Value from Metadata, if any, allowing null.
std::enable_if_t< detail::IsValidPointer< X, Y >::value, X * > extract(Y &&MD)
Extract a Value from Metadata.
@ User
could "use" a pointer
NodeAddr< UseNode * > Use
Context & getContext() const
friend class Instruction
Iterator for Instructions in a `BasicBlock.
LLVM_ABI iterator begin() const
This is an optimization pass for GlobalISel generic memory operations.
bool operator<(int64_t V1, const APSInt &V2)
FunctionAddr VTableAddr Value
LLVM_ATTRIBUTE_ALWAYS_INLINE DynamicAPInt gcd(const DynamicAPInt &A, const DynamicAPInt &B)
LLVM_ABI KnownFPClass computeKnownFPClass(const Value *V, const APInt &DemandedElts, FPClassTest InterestedClasses, const SimplifyQuery &SQ, unsigned Depth=0)
Determine which floating-point classes are valid for V, and return them in KnownFPClass bit sets.
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
LLVM_ABI bool isLegalToPromote(const CallBase &CB, Function *Callee, const char **FailureReason=nullptr)
Return true if the given indirect call site can be made to call Callee.
LLVM_ABI Constant * getInitialValueOfAllocation(const Value *V, const TargetLibraryInfo *TLI, Type *Ty)
If this is a call to an allocation function that initializes memory to a fixed value,...
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
auto pred_end(const MachineBasicBlock *BB)
unsigned getPointerAddressSpace(const Type *T)
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
FunctionAddr VTableAddr uintptr_t uintptr_t Int32Ty
auto successors(const MachineBasicBlock *BB)
LLVM_ABI bool isRemovableAlloc(const CallBase *V, const TargetLibraryInfo *TLI)
Return true if this is a call to an allocation function that does not have side effects that we are r...
APFloat abs(APFloat X)
Returns the absolute value of the argument.
LLVM_ABI raw_fd_ostream & outs()
This returns a reference to a raw_fd_ostream for standard output.
LLVM_ABI Value * getAllocAlignment(const CallBase *V, const TargetLibraryInfo *TLI)
Gets the alignment argument for an aligned_alloc-like function, using either built-in knowledge based...
auto dyn_cast_if_present(const Y &Val)
dyn_cast_if_present<X> - Functionally identical to dyn_cast, except that a null (or none in the case ...
LLVM_ABI Value * simplifyInstructionWithOperands(Instruction *I, ArrayRef< Value * > NewOps, const SimplifyQuery &Q)
Like simplifyInstruction but the operands of I are replaced with NewOps.
Value * GetPointerBaseWithConstantOffset(Value *Ptr, int64_t &Offset, const DataLayout &DL, bool AllowNonInbounds=true)
Analyze the specified pointer to see if it can be expressed as a base pointer plus a constant offset.
scc_iterator< T > scc_begin(const T &G)
Construct the begin iterator for a deduced graph type T.
LLVM_ABI bool isNoAliasCall(const Value *V)
Return true if this pointer is returned by a noalias function.
MemoryEffectsBase< IRMemLocation > MemoryEffects
Summary of how a function affects memory in the program.
raw_ostream & WriteGraph(raw_ostream &O, const GraphType &G, bool ShortNames=false, const Twine &Title="")
LLVM_ABI bool isSafeToSpeculativelyExecute(const Instruction *I, const Instruction *CtxI=nullptr, AssumptionCache *AC=nullptr, const DominatorTree *DT=nullptr, const TargetLibraryInfo *TLI=nullptr, bool UseVariableInfo=true, bool IgnoreUBImplyingAttrs=true)
Return true if the instruction does not have any effects besides calculating the result and does not ...
bool isa_and_nonnull(const Y &Val)
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
LLVM_ABI ConstantRange getConstantRangeFromMetadata(const MDNode &RangeMD)
Parse out a conservative ConstantRange from !range metadata.
auto map_range(ContainerTy &&C, FuncTy F)
const Value * getPointerOperand(const Value *V)
A helper function that returns the pointer operand of a load, store or GEP instruction.
LLVM_ABI Value * simplifyInstruction(Instruction *I, const SimplifyQuery &Q)
See if we can compute a simplified version of this instruction.
auto dyn_cast_or_null(const Y &Val)
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
unsigned Log2_32(uint32_t Value)
Return the floor log base 2 of the specified value, -1 if the value is zero.
LLVM_ABI bool isIntrinsicReturningPointerAliasingArgumentWithoutCapturing(const CallBase *Call, bool MustPreserveNullness)
{launder,strip}.invariant.group returns pointer that aliases its argument, and it only captures point...
constexpr bool isPowerOf2_32(uint32_t Value)
Return true if the argument is a power of two > 0.
PotentialValuesState< std::pair< AA::ValueAndContext, AA::ValueScope > > PotentialLLVMValuesState
void sort(IteratorTy Start, IteratorTy End)
LLVM_ABI bool NullPointerIsDefined(const Function *F, unsigned AS=0)
Check whether null pointer dereferencing is considered undefined behavior for a given function or an ...
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool isPointerTy(const Type *T)
LLVM_ABI bool wouldInstructionBeTriviallyDead(const Instruction *I, const TargetLibraryInfo *TLI=nullptr)
Return true if the result produced by the instruction would have no side effects if it was not used.
bool set_union(S1Ty &S1, const S2Ty &S2)
set_union(A, B) - Compute A := A u B, return whether A changed.
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
LLVM_ABI CallBase & promoteCall(CallBase &CB, Function *Callee, CastInst **RetBitCast=nullptr)
Promote the given indirect call site to unconditionally call Callee.
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
LLVM_ABI bool hasAssumption(const Function &F, const KnownAssumptionString &AssumptionStr)
Return true if F has the assumption AssumptionStr attached.
LLVM_ABI RetainedKnowledge getKnowledgeFromUse(const Use *U, ArrayRef< Attribute::AttrKind > AttrKinds)
Return a valid Knowledge associated to the Use U if its Attribute kind is in AttrKinds.
@ Success
The lock was released successfully.
LLVM_ATTRIBUTE_VISIBILITY_DEFAULT AnalysisKey InnerAnalysisManagerProxy< AnalysisManagerT, IRUnitT, ExtraArgTs... >::Key
LLVM_ABI bool isKnownNonZero(const Value *V, const SimplifyQuery &Q, unsigned Depth=0)
Return true if the given value is known to be non-zero when defined.
AtomicOrdering
Atomic ordering for LLVM's memory model.
PotentialValuesState< APInt > PotentialConstantIntValuesState
std::string join(IteratorT Begin, IteratorT End, StringRef Separator)
Joins the strings in the range [Begin, End), adding Separator between the elements.
IRBuilder(LLVMContext &, FolderTy, InserterTy, MDNode *, ArrayRef< OperandBundleDef >) -> IRBuilder< FolderTy, InserterTy >
InterleavedRange< Range > interleaved_array(const Range &R, StringRef Separator=", ")
Output range R as an array of interleaved elements.
ChangeStatus clampStateAndIndicateChange< DerefState >(DerefState &S, const DerefState &R)
void RemapInstruction(Instruction *I, ValueToValueMapTy &VM, RemapFlags Flags=RF_None, ValueMapTypeRemapper *TypeMapper=nullptr, ValueMaterializer *Materializer=nullptr, const MetadataPredicate *IdentityMD=nullptr)
Convert the instruction operands from referencing the current values into those specified by VM.
DWARFExpression::Operation Op
LLVM_ABI bool isGuaranteedNotToBeUndefOrPoison(const Value *V, AssumptionCache *AC=nullptr, const Instruction *CtxI=nullptr, const DominatorTree *DT=nullptr, unsigned Depth=0)
Return true if this function can prove that V does not have undef bits and is never poison.
ArrayRef(const T &OneElt) -> ArrayRef< T >
LLVM_ABI Value * getFreedOperand(const CallBase *CB, const TargetLibraryInfo *TLI)
If this if a call to a free function, return the freed operand.
ChangeStatus clampStateAndIndicateChange(StateType &S, const StateType &R)
Helper function to clamp a state S of type StateType with the information in R and indicate/return if...
constexpr unsigned BitWidth
ValueMap< const Value *, WeakTrackingVH > ValueToValueMapTy
auto pred_begin(const MachineBasicBlock *BB)
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
iterator_range< pointer_iterator< WrappedIteratorT > > make_pointer_range(RangeT &&Range)
LLVM_ABI std::optional< APInt > getAllocSize(const CallBase *CB, const TargetLibraryInfo *TLI, function_ref< const Value *(const Value *)> Mapper=[](const Value *V) { return V;})
Return the size of the requested allocation.
LLVM_ABI DenseSet< StringRef > getAssumptions(const Function &F)
Return the set of all assumptions for the function F.
Align assumeAligned(uint64_t Value)
Treats the value 0 as a 1, so Align is always at least 1.
LLVM_ABI Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
@ OPTIONAL
The target may be valid if the source is not.
@ NONE
Do not track a dependence between source and target.
@ REQUIRED
The target cannot be valid if the source is not.
LLVM_ABI UseCaptureInfo DetermineUseCaptureKind(const Use &U, const Value *Base)
Determine what kind of capture behaviour U may exhibit.
LLVM_ABI Value * simplifyCmpInst(CmpPredicate Predicate, Value *LHS, Value *RHS, const SimplifyQuery &Q)
Given operands for a CmpInst, fold the result or return null.
LLVM_ABI bool mayContainIrreducibleControl(const Function &F, const LoopInfo *LI)
BumpPtrAllocatorImpl<> BumpPtrAllocator
The standard BumpPtrAllocator which just uses the default template parameters.
T bit_floor(T Value)
Returns the largest integral power of two no greater than Value if Value is nonzero.
LLVM_ABI const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=MaxLookupSearchDepth)
This method strips off any GEP address adjustments, pointer casts or llvm.threadlocal....
bool capturesNothing(CaptureComponents CC)
LLVM_ABI bool isIdentifiedObject(const Value *V)
Return true if this pointer refers to a distinct and identifiable object.
constexpr StringRef AssumptionAttrKey
The key we use for assumption attributes.
constexpr bool isCallableCC(CallingConv::ID CC)
GenericCycleInfo< SSAContext > CycleInfo
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
A type to track pointer/struct usage and accesses for AAPointerInfo.
bool forallInterferingAccesses(AA::RangeTy Range, F CB) const
See AAPointerInfo::forallInterferingAccesses.
AAPointerInfo::const_bin_iterator end() const
ChangeStatus addAccess(Attributor &A, const AAPointerInfo::RangeList &Ranges, Instruction &I, std::optional< Value * > Content, AAPointerInfo::AccessKind Kind, Type *Ty, Instruction *RemoteI=nullptr)
Add a new Access to the state at offset Offset and with size Size.
DenseMap< const Instruction *, SmallVector< unsigned > > RemoteIMap
AAPointerInfo::const_bin_iterator begin() const
AAPointerInfo::OffsetInfo ReturnedOffsets
Flag to determine if the underlying pointer is reaching a return statement in the associated function...
State & operator=(State &&R)
State(State &&SIS)=default
const AAPointerInfo::Access & getAccess(unsigned Index) const
SmallVector< AAPointerInfo::Access > AccessList
bool isAtFixpoint() const override
See AbstractState::isAtFixpoint().
bool forallInterferingAccesses(Instruction &I, F CB, AA::RangeTy &Range) const
See AAPointerInfo::forallInterferingAccesses.
static State getWorstState(const State &SIS)
Return the worst possible representable state.
int64_t numOffsetBins() const
AAPointerInfo::OffsetBinsTy OffsetBins
ChangeStatus indicateOptimisticFixpoint() override
See AbstractState::indicateOptimisticFixpoint().
State & operator=(const State &R)
ChangeStatus indicatePessimisticFixpoint() override
See AbstractState::indicatePessimisticFixpoint().
const State & getAssumed() const
static State getBestState(const State &SIS)
Return the best possible representable state.
bool isValidState() const override
See AbstractState::isValidState().
----------------—AAIntraFnReachability Attribute-----------------------—
ReachabilityQueryInfo(const ReachabilityQueryInfo &RQI)
unsigned Hash
Precomputed hash for this RQI.
const Instruction * From
Start here,.
Reachable Result
and remember if it worked:
ReachabilityQueryInfo(const Instruction *From, const ToTy *To)
ReachabilityQueryInfo(Attributor &A, const Instruction &From, const ToTy &To, const AA::InstExclusionSetTy *ES, bool MakeUnique)
Constructor replacement to ensure unique and stable sets are used for the cache.
const ToTy * To
reach this place,
const AA::InstExclusionSetTy * ExclusionSet
without going through any of these instructions,
unsigned computeHashValue() const
An abstract interface for address space information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all align attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
Align getKnownAlign() const
Return known alignment.
static LLVM_ABI const char ID
An abstract attribute for getting assumption information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract state for querying live call edges.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for specializing "dynamic" components of "denormal-fp-math" and "denormal-fp-ma...
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all dereferenceable attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for llvm::GlobalValue information interference.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for indirect call information interference.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface to track if a value leaves it's defining function instance.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for computing reachability between functions.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool canReach(Attributor &A, const Function &Fn) const
If the function represented by this possition can reach Fn.
virtual bool instructionCanReach(Attributor &A, const Instruction &Inst, const Function &Fn, const AA::InstExclusionSetTy *ExclusionSet=nullptr) const =0
Can Inst reach Fn.
An abstract interface to determine reachability of point A to B.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for identifying pointers from which loads can be marked invariant.
static LLVM_ABI const char ID
Unique ID (due to the unique address).
An abstract interface for liveness abstract attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for memory access kind related attributes (readnone/readonly/writeonly).
bool isAssumedReadOnly() const
Return true if we assume that the underlying value is not accessed (=written) in its respective scope...
bool isKnownReadNone() const
Return true if we know that the underlying value is not read or accessed in its respective scope.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool isAssumedReadNone() const
Return true if we assume that the underlying value is not read or accessed in its respective scope.
An abstract interface for all memory location attributes (readnone/argmemonly/inaccessiblememonly/ina...
static LLVM_ABI std::string getMemoryLocationsAsStr(MemoryLocationsKind MLK)
Return the locations encoded by MLK as a readable string.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
StateType::base_t MemoryLocationsKind
An abstract interface for all nonnull attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for potential address space information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all noalias attributes.
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all nocapture attributes.
@ NO_CAPTURE_MAYBE_RETURNED
If we do not capture the value in memory or through integers we can only communicate it back as a der...
@ NO_CAPTURE
If we do not capture the value in memory, through integers, or as a derived pointer we know it is not...
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool isAssumedNoCaptureMaybeReturned() const
Return true if we assume that the underlying value is not captured in its respective scope but we all...
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI void determineFunctionCaptureCapabilities(const IRPosition &IRP, const Function &F, BitIntegerState &State)
Update State according to the capture capabilities of F for position IRP.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An AbstractAttribute for nofree.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for norecurse.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An AbstractAttribute for noreturn.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isAlignedBarrier(const CallBase &CB, bool ExecutedAligned)
Helper function to determine if CB is an aligned (GPU) barrier.
static LLVM_ABI bool isNonRelaxedAtomic(const Instruction *I)
Helper function used to determine whether an instruction is non-relaxed atomic.
static LLVM_ABI bool isNoSyncIntrinsic(const Instruction *I)
Helper function specific for intrinsics which are potentially volatile.
An abstract interface for all noundef attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for determining the necessity of the convergent attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all nonnull attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See AbstractAttribute::isImpliedByIR(...).
A helper containing a list of offsets computed for a Use.
A container for a list of ranges.
static void set_difference(const RangeList &L, const RangeList &R, RangeList &D)
Copy ranges from L that are not in R, into D.
An abstract interface for struct information.
virtual bool reachesReturn() const =0
OffsetBinsTy::const_iterator const_bin_iterator
virtual const_bin_iterator begin() const =0
DenseMap< AA::RangeTy, SmallSet< unsigned, 4 > > OffsetBinsTy
static LLVM_ABI const char ID
Unique ID (due to the unique address)
virtual int64_t numOffsetBins() const =0
An abstract interface for potential values analysis.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI Value * getSingleValue(Attributor &A, const AbstractAttribute &AA, const IRPosition &IRP, SmallVectorImpl< AA::ValueAndContext > &Values)
Extract the single value in Values if any.
An abstract interface for privatizability.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for undefined behavior.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for getting all assumption underlying objects.
virtual bool forallUnderlyingObjects(function_ref< bool(Value &)> Pred, AA::ValueScope Scope=AA::Interprocedural) const =0
Check Pred on all underlying objects in Scope collected so far.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for range value analysis.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for value simplify abstract attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for willreturn.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
Helper to represent an access offset and size, with logic to deal with uncertainty and check for over...
static constexpr int64_t Unknown
static RangeTy getUnknown()
Base struct for all "concrete attribute" deductions.
void print(raw_ostream &OS) const
Helper functions, for debug purposes only.
virtual StateType & getState()=0
Return the internal abstract state for inspection.
An interface to query the internal state of an abstract attribute.
virtual bool isAtFixpoint() const =0
Return if this abstract state is fixed, thus does not need to be updated if information changes as it...
virtual bool isValidState() const =0
Return if this abstract state is in a valid state.
Helper for AA::PointerInfo::Access DenseMap/Set usage ignoring everythign but the instruction.
static unsigned getHashValue(const Access &A)
AAPointerInfo::Access Access
static Access getTombstoneKey()
DenseMapInfo< Instruction * > Base
static bool isEqual(const Access &LHS, const Access &RHS)
static Access getEmptyKey()
constexpr uint64_t value() const
This is a hole in the type system and should not be abused.
std::function< void( const ArgumentReplacementInfo &, Function &, Function::arg_iterator)> CalleeRepairCBTy
Callee repair callback type.
const Argument & getReplacedArg() const
std::function< void(const ArgumentReplacementInfo &, AbstractCallSite, SmallVectorImpl< Value * > &)> ACSRepairCBTy
Abstract call site (ACS) repair callback type.
The fixpoint analysis framework that orchestrates the attribute deduction.
std::function< std::optional< Value * >( const IRPosition &, const AbstractAttribute *, bool &)> SimplifictionCallbackTy
Register CB as a simplification callback.
Specialization of the integer state for a bit-wise encoding.
BitIntegerState & addKnownBits(base_t Bits)
Add the bits in BitsEncoding to the "known bits".
Simple wrapper for a single bit (boolean) state.
static constexpr DenormalMode getDefault()
Return the assumed default mode for a function without denormal-fp-math.
static constexpr DenormalMode getInvalid()
static Access getTombstoneKey()
static unsigned getHashValue(const Access &A)
static Access getEmptyKey()
AAPointerInfo::Access Access
static bool isEqual(const Access &LHS, const Access &RHS)
static bool isEqual(const AA::RangeTy &A, const AA::RangeTy B)
static AA::RangeTy getTombstoneKey()
static unsigned getHashValue(const AA::RangeTy &Range)
static AA::RangeTy getEmptyKey()
static ReachabilityQueryInfo< ToTy > EmptyKey
static ReachabilityQueryInfo< ToTy > TombstoneKey
static ReachabilityQueryInfo< ToTy > * getEmptyKey()
DenseMapInfo< std::pair< const Instruction *, const ToTy * > > PairDMI
static ReachabilityQueryInfo< ToTy > * getTombstoneKey()
static bool isEqual(const ReachabilityQueryInfo< ToTy > *LHS, const ReachabilityQueryInfo< ToTy > *RHS)
DenseMapInfo< const AA::InstExclusionSetTy * > InstSetDMI
static unsigned getHashValue(const ReachabilityQueryInfo< ToTy > *RQI)
An information struct used to provide DenseMap with the various necessary components for a given valu...
State for dereferenceable attribute.
IncIntegerState DerefBytesState
State representing for dereferenceable bytes.
ChangeStatus manifest(Attributor &A) override
See AbstractAttribute::manifest(...).
Helper to describe and deal with positions in the LLVM-IR.
Function * getAssociatedFunction() const
Return the associated function, if any.
static const IRPosition callsite_returned(const CallBase &CB)
Create a position describing the returned value of CB.
static const IRPosition returned(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the returned value of F.
LLVM_ABI Argument * getAssociatedArgument() const
Return the associated argument, if any.
static const IRPosition value(const Value &V, const CallBaseContext *CBContext=nullptr)
Create a position describing the value of V.
int getCalleeArgNo() const
Return the callee argument number of the associated value if it is an argument or call site argument,...
static const IRPosition inst(const Instruction &I, const CallBaseContext *CBContext=nullptr)
Create a position describing the instruction I.
static const IRPosition callsite_argument(const CallBase &CB, unsigned ArgNo)
Create a position describing the argument of CB at position ArgNo.
@ IRP_ARGUMENT
An attribute for a function argument.
@ IRP_RETURNED
An attribute for the function return value.
@ IRP_CALL_SITE
An attribute for a call site (function scope).
@ IRP_CALL_SITE_RETURNED
An attribute for a call site return value.
@ IRP_FUNCTION
An attribute for a function (scope).
@ IRP_CALL_SITE_ARGUMENT
An attribute for a call site argument.
@ IRP_INVALID
An invalid position.
Instruction * getCtxI() const
Return the context instruction, if any.
static const IRPosition argument(const Argument &Arg, const CallBaseContext *CBContext=nullptr)
Create a position describing the argument Arg.
Type * getAssociatedType() const
Return the type this abstract attribute is associated with.
static const IRPosition function(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the function scope of F.
const CallBaseContext * getCallBaseContext() const
Get the call base context from the position.
Value & getAssociatedValue() const
Return the value this abstract attribute is associated with.
Value & getAnchorValue() const
Return the value this abstract attribute is anchored with.
int getCallSiteArgNo() const
Return the call site argument number of the associated value if it is an argument or call site argume...
static const IRPosition function_scope(const IRPosition &IRP, const CallBaseContext *CBContext=nullptr)
Create a position with function scope matching the "context" of IRP.
Kind getPositionKind() const
Return the associated position kind.
bool isArgumentPosition() const
Return true if the position is an argument or call site argument.
static const IRPosition callsite_function(const CallBase &CB)
Create a position describing the function scope of CB.
Function * getAnchorScope() const
Return the Function surrounding the anchor value.
ConstantRange getKnown() const
Return the known state encoding.
ConstantRange getAssumed() const
Return the assumed state encoding.
base_t getAssumed() const
Return the assumed state encoding.
static constexpr base_t getWorstState()
Helper that allows to insert a new assumption string in the known assumption set by creating a (stati...
FPClassTest KnownFPClasses
Floating-point classes the value could be one of.
A "must be executed context" for a given program point PP is the set of instructions,...
iterator & end()
Return an universal end iterator.
bool findInContextOf(const Instruction *I, const Instruction *PP)
Helper to look for I in the context of PP.
iterator & begin(const Instruction *PP)
Return an iterator to explore the context around PP.
bool checkForAllContext(const Instruction *PP, function_ref< bool(const Instruction *)> Pred)
}
bool isValidState() const override
See AbstractState::isValidState(...)
static unsigned MaxPotentialValues
Helper to tie a abstract state implementation to an abstract attribute.
StateType & getState() override
See AbstractAttribute::getState(...).
bool isPassthrough() const
LLVM_ABI bool unionAssumed(std::optional< Value * > Other)
Merge Other into the currently assumed simplified value.
std::optional< Value * > SimplifiedAssociatedValue
An assumed simplified value.
Type * Ty
The type of the original value.