54#include "llvm/IR/IntrinsicsAMDGPU.h"
55#include "llvm/IR/IntrinsicsNVPTX.h"
82#define DEBUG_TYPE "attributor"
86 cl::desc(
"Manifest Attributor internal string attributes."),
99 cl::desc(
"Maximum number of potential values to be "
100 "tracked for each position."),
105 "attributor-max-potential-values-iterations",
cl::Hidden,
107 "Maximum number of iterations we keep dismantling potential values."),
110STATISTIC(NumAAs,
"Number of abstract attributes created");
111STATISTIC(NumIndirectCallsPromoted,
"Number of indirect calls promoted");
126#define BUILD_STAT_MSG_IR_ATTR(TYPE, NAME) \
127 ("Number of " #TYPE " marked '" #NAME "'")
128#define BUILD_STAT_NAME(NAME, TYPE) NumIR##TYPE##_##NAME
129#define STATS_DECL_(NAME, MSG) STATISTIC(NAME, MSG);
130#define STATS_DECL(NAME, TYPE, MSG) \
131 STATS_DECL_(BUILD_STAT_NAME(NAME, TYPE), MSG);
132#define STATS_TRACK(NAME, TYPE) ++(BUILD_STAT_NAME(NAME, TYPE));
133#define STATS_DECLTRACK(NAME, TYPE, MSG) \
134 {STATS_DECL(NAME, TYPE, MSG) STATS_TRACK(NAME, TYPE)}
135#define STATS_DECLTRACK_ARG_ATTR(NAME) \
136 STATS_DECLTRACK(NAME, Arguments, BUILD_STAT_MSG_IR_ATTR(arguments, NAME))
137#define STATS_DECLTRACK_CSARG_ATTR(NAME) \
138 STATS_DECLTRACK(NAME, CSArguments, \
139 BUILD_STAT_MSG_IR_ATTR(call site arguments, NAME))
140#define STATS_DECLTRACK_FN_ATTR(NAME) \
141 STATS_DECLTRACK(NAME, Function, BUILD_STAT_MSG_IR_ATTR(functions, NAME))
142#define STATS_DECLTRACK_CS_ATTR(NAME) \
143 STATS_DECLTRACK(NAME, CS, BUILD_STAT_MSG_IR_ATTR(call site, NAME))
144#define STATS_DECLTRACK_FNRET_ATTR(NAME) \
145 STATS_DECLTRACK(NAME, FunctionReturn, \
146 BUILD_STAT_MSG_IR_ATTR(function returns, NAME))
147#define STATS_DECLTRACK_CSRET_ATTR(NAME) \
148 STATS_DECLTRACK(NAME, CSReturn, \
149 BUILD_STAT_MSG_IR_ATTR(call site returns, NAME))
150#define STATS_DECLTRACK_FLOATING_ATTR(NAME) \
151 STATS_DECLTRACK(NAME, Floating, \
152 ("Number of floating values known to be '" #NAME "'"))
157#define PIPE_OPERATOR(CLASS) \
158 raw_ostream &operator<<(raw_ostream &OS, const CLASS &AA) { \
159 return OS << static_cast<const AbstractAttribute &>(AA); \
216 bool HeaderOnly,
Cycle **CPtr =
nullptr) {
219 auto *BB =
I->getParent();
225 return !HeaderOnly || BB ==
C->getHeader();
236 if (
DL.getTypeSizeInBits(Ty) !=
DL.getTypeAllocSizeInBits(Ty))
261 StartPos +=
DL.getTypeAllocSizeInBits(ElTy);
271 bool AllowVolatile) {
272 if (!AllowVolatile &&
I->isVolatile())
276 return LI->getPointerOperand();
280 return SI->getPointerOperand();
284 return CXI->getPointerOperand();
288 return RMWI->getPointerOperand();
310 bool GetMinOffset,
bool AllowNonInbounds,
311 bool UseAssumed =
false) {
313 auto AttributorAnalysis = [&](
Value &V,
APInt &ROffset) ->
bool {
320 if (!ValueConstantRangeAA)
324 if (
Range.isFullSet())
330 ROffset =
Range.getSignedMin();
332 ROffset =
Range.getSignedMax();
343 const Value *Ptr, int64_t &BytesOffset,
348 true, AllowNonInbounds);
356template <
typename AAType,
typename StateType =
typename AAType::StateType,
358 bool RecurseForSelectAndPHI =
true>
360 Attributor &
A,
const AAType &QueryingAA, StateType &S,
362 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp return value states for "
363 << QueryingAA <<
" into " << S <<
"\n");
365 assert((QueryingAA.getIRPosition().getPositionKind() ==
367 QueryingAA.getIRPosition().getPositionKind() ==
369 "Can only clamp returned value states for a function returned or call "
370 "site returned position!");
374 std::optional<StateType>
T;
377 auto CheckReturnValue = [&](
Value &RV) ->
bool {
391 <<
" AA: " <<
AA->getAsStr(&
A) <<
" @ " << RVPos <<
"\n");
392 const StateType &AAS =
AA->getState();
394 T = StateType::getBestState(AAS);
396 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" RV State: " <<
T
398 return T->isValidState();
401 if (!
A.checkForAllReturnedValues(CheckReturnValue, QueryingAA,
403 RecurseForSelectAndPHI))
404 S.indicatePessimisticFixpoint();
411template <
typename AAType,
typename BaseType,
412 typename StateType =
typename BaseType::StateType,
413 bool PropagateCallBaseContext =
false,
415 bool RecurseForSelectAndPHI =
true>
416struct AAReturnedFromReturnedValues :
public BaseType {
417 AAReturnedFromReturnedValues(
const IRPosition &IRP, Attributor &
A)
422 StateType S(StateType::getBestState(this->getState()));
424 RecurseForSelectAndPHI>(
426 PropagateCallBaseContext ? this->getCallBaseContext() : nullptr);
435template <
typename AAType,
typename StateType =
typename AAType::StateType,
437static void clampCallSiteArgumentStates(
Attributor &
A,
const AAType &QueryingAA,
439 LLVM_DEBUG(
dbgs() <<
"[Attributor] Clamp call site argument states for "
440 << QueryingAA <<
" into " << S <<
"\n");
442 assert(QueryingAA.getIRPosition().getPositionKind() ==
444 "Can only clamp call site argument states for an argument position!");
448 std::optional<StateType>
T;
451 unsigned ArgNo = QueryingAA.getIRPosition().getCallSiteArgNo();
471 LLVM_DEBUG(
dbgs() <<
"[Attributor] ACS: " << *ACS.getInstruction()
472 <<
" AA: " <<
AA->getAsStr(&
A) <<
" @" << ACSArgPos
474 const StateType &AAS =
AA->getState();
476 T = StateType::getBestState(AAS);
478 LLVM_DEBUG(
dbgs() <<
"[Attributor] AA State: " << AAS <<
" CSA State: " <<
T
480 return T->isValidState();
483 bool UsedAssumedInformation =
false;
484 if (!
A.checkForAllCallSites(CallSiteCheck, QueryingAA,
true,
485 UsedAssumedInformation))
486 S.indicatePessimisticFixpoint();
493template <
typename AAType,
typename BaseType,
494 typename StateType =
typename AAType::StateType,
496bool getArgumentStateFromCallBaseContext(
Attributor &
A,
500 "Expected an 'argument' position !");
506 assert(ArgNo >= 0 &&
"Invalid Arg No!");
520 const StateType &CBArgumentState =
521 static_cast<const StateType &
>(
AA->getState());
523 LLVM_DEBUG(
dbgs() <<
"[Attributor] Briding Call site context to argument"
524 <<
"Position:" << Pos <<
"CB Arg state:" << CBArgumentState
528 State ^= CBArgumentState;
533template <
typename AAType,
typename BaseType,
534 typename StateType =
typename AAType::StateType,
535 bool BridgeCallBaseContext =
false,
537struct AAArgumentFromCallSiteArguments :
public BaseType {
538 AAArgumentFromCallSiteArguments(
const IRPosition &IRP, Attributor &
A)
543 StateType S = StateType::getBestState(this->getState());
545 if (BridgeCallBaseContext) {
547 getArgumentStateFromCallBaseContext<AAType,
BaseType, StateType,
549 A, *
this, this->getIRPosition(), S);
553 clampCallSiteArgumentStates<AAType, StateType, IRAttributeKind>(
A, *
this,
563template <
typename AAType,
typename BaseType,
564 typename StateType =
typename BaseType::StateType,
565 bool IntroduceCallBaseContext =
false,
567struct AACalleeToCallSite :
public BaseType {
568 AACalleeToCallSite(
const IRPosition &IRP, Attributor &
A) :
BaseType(IRP,
A) {}
572 auto IRPKind = this->getIRPosition().getPositionKind();
575 "Can only wrap function returned positions for call site "
576 "returned positions!");
577 auto &S = this->getState();
580 if (IntroduceCallBaseContext)
581 LLVM_DEBUG(
dbgs() <<
"[Attributor] Introducing call base context:" << CB
586 for (
const Function *Callee : Callees) {
590 IntroduceCallBaseContext ? &CB :
nullptr)
592 *
Callee, IntroduceCallBaseContext ? &CB : nullptr);
594 if (Attribute::isEnumAttrKind(IRAttributeKind)) {
597 A,
this, FnPos, DepClassTy::REQUIRED, IsKnown))
603 A.getAAFor<AAType>(*
this, FnPos, DepClassTy::REQUIRED);
607 if (S.isAtFixpoint())
608 return S.isValidState();
612 if (!
A.checkForAllCallees(CalleePred, *
this, CB))
613 return S.indicatePessimisticFixpoint();
619template <
class AAType,
typename StateType =
typename AAType::StateType>
625 auto EIt = Explorer.
begin(CtxI), EEnd = Explorer.
end(CtxI);
626 for (
unsigned u = 0;
u <
Uses.size(); ++
u) {
630 if (Found &&
AA.followUseInMBEC(
A, U, UserI, State))
645template <
class AAType,
typename StateType =
typename AAType::StateType>
646static void followUsesInMBEC(AAType &
AA,
Attributor &
A, StateType &S,
648 const Value &Val =
AA.getIRPosition().getAssociatedValue();
653 A.getInfoCache().getMustBeExecutedContextExplorer();
659 for (
const Use &U : Val.
uses())
662 followUsesInContext<AAType>(
AA,
A, *Explorer, &CtxI,
Uses, S);
664 if (S.isAtFixpoint())
670 if (Br->isConditional())
709 StateType ParentState;
713 ParentState.indicateOptimisticFixpoint();
715 for (
const BasicBlock *BB : Br->successors()) {
716 StateType ChildState;
718 size_t BeforeSize =
Uses.size();
719 followUsesInContext(
AA,
A, *Explorer, &BB->front(),
Uses, ChildState);
722 for (
auto It =
Uses.begin() + BeforeSize; It !=
Uses.end();)
725 ParentState &= ChildState;
799 R.indicatePessimisticFixpoint();
816 BS.indicateOptimisticFixpoint();
822 BS.indicatePessimisticFixpoint();
892 template <
typename F>
899 if (!
Range.mayOverlap(ItRange))
901 bool IsExact =
Range == ItRange && !
Range.offsetOrSizeAreUnknown();
902 for (
auto Index : It.getSecond()) {
912 template <
typename F>
923 for (
unsigned Index : LocalList->getSecond()) {
926 if (
Range.offsetAndSizeAreUnknown())
942 RemoteI = RemoteI ? RemoteI : &
I;
946 bool AccExists =
false;
948 for (
auto Index : LocalList) {
950 if (
A.getLocalInst() == &
I) {
959 <<
"[AAPointerInfo] Inserting access in new offset bins\n";);
961 for (
auto Key : ToAdd) {
968 AccessList.emplace_back(&
I, RemoteI, Ranges, Content, Kind, Ty);
970 "New Access should have been at AccIndex");
971 LocalList.push_back(AccIndex);
980 auto Before = Current;
982 if (Current == Before)
985 auto &ExistingRanges = Before.getRanges();
986 auto &NewRanges = Current.getRanges();
993 <<
"[AAPointerInfo] Removing access from old offset bins\n";);
1000 "Expected bin to actually contain the Access.");
1001 Bin.erase(AccIndex);
1022struct AAPointerInfoImpl
1023 :
public StateWrapper<AA::PointerInfo::State, AAPointerInfo> {
1028 const std::string getAsStr(
Attributor *
A)
const override {
1029 return std::string(
"PointerInfo ") +
1030 (isValidState() ? (std::string(
"#") +
1031 std::to_string(OffsetBins.size()) +
" bins")
1036 [](int64_t O) {
return std::to_string(O); }),
1044 return AAPointerInfo::manifest(
A);
1047 const_bin_iterator
begin()
const override {
return State::begin(); }
1048 const_bin_iterator
end()
const override {
return State::end(); }
1049 int64_t numOffsetBins()
const override {
return State::numOffsetBins(); }
1050 bool reachesReturn()
const override {
1051 return !ReturnedOffsets.isUnassigned();
1053 void addReturnedOffsetsTo(OffsetInfo &OI)
const override {
1054 if (ReturnedOffsets.isUnknown()) {
1059 OffsetInfo MergedOI;
1060 for (
auto Offset : ReturnedOffsets) {
1061 OffsetInfo TmpOI = OI;
1063 MergedOI.merge(TmpOI);
1065 OI = std::move(MergedOI);
1068 ChangeStatus setReachesReturn(
const OffsetInfo &ReachedReturnedOffsets) {
1069 if (ReturnedOffsets.isUnknown())
1070 return ChangeStatus::UNCHANGED;
1071 if (ReachedReturnedOffsets.isUnknown()) {
1072 ReturnedOffsets.setUnknown();
1073 return ChangeStatus::CHANGED;
1075 if (ReturnedOffsets.merge(ReachedReturnedOffsets))
1076 return ChangeStatus::CHANGED;
1077 return ChangeStatus::UNCHANGED;
1080 bool forallInterferingAccesses(
1082 function_ref<
bool(
const AAPointerInfo::Access &,
bool)> CB)
1084 return State::forallInterferingAccesses(
Range, CB);
1087 bool forallInterferingAccesses(
1088 Attributor &
A,
const AbstractAttribute &QueryingAA, Instruction &
I,
1089 bool FindInterferingWrites,
bool FindInterferingReads,
1090 function_ref<
bool(
const Access &,
bool)> UserCB,
bool &HasBeenWrittenTo,
1092 function_ref<
bool(
const Access &)> SkipCB)
const override {
1093 HasBeenWrittenTo =
false;
1095 SmallPtrSet<const Access *, 8> DominatingWrites;
1103 const auto *ExecDomainAA =
A.lookupAAFor<AAExecutionDomain>(
1105 bool AllInSameNoSyncFn = IsAssumedNoSync;
1106 bool InstIsExecutedByInitialThreadOnly =
1107 ExecDomainAA && ExecDomainAA->isExecutedByInitialThreadOnly(
I);
1114 bool InstIsExecutedInAlignedRegion =
1115 FindInterferingReads && ExecDomainAA &&
1116 ExecDomainAA->isExecutedInAlignedRegion(
A,
I);
1118 if (InstIsExecutedInAlignedRegion || InstIsExecutedByInitialThreadOnly)
1119 A.recordDependence(*ExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1121 InformationCache &InfoCache =
A.getInfoCache();
1122 bool IsThreadLocalObj =
1131 auto CanIgnoreThreadingForInst = [&](
const Instruction &
I) ->
bool {
1132 if (IsThreadLocalObj || AllInSameNoSyncFn)
1134 const auto *FnExecDomainAA =
1135 I.getFunction() == &
Scope
1137 :
A.lookupAAFor<AAExecutionDomain>(
1140 if (!FnExecDomainAA)
1142 if (InstIsExecutedInAlignedRegion ||
1143 (FindInterferingWrites &&
1144 FnExecDomainAA->isExecutedInAlignedRegion(
A,
I))) {
1145 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1148 if (InstIsExecutedByInitialThreadOnly &&
1149 FnExecDomainAA->isExecutedByInitialThreadOnly(
I)) {
1150 A.recordDependence(*FnExecDomainAA, QueryingAA, DepClassTy::OPTIONAL);
1159 auto CanIgnoreThreading = [&](
const Access &Acc) ->
bool {
1160 return CanIgnoreThreadingForInst(*Acc.getRemoteInst()) ||
1161 (Acc.getRemoteInst() != Acc.getLocalInst() &&
1162 CanIgnoreThreadingForInst(*Acc.getLocalInst()));
1166 bool IsKnownNoRecurse;
1174 bool InstInKernel =
A.getInfoCache().isKernel(Scope);
1175 bool ObjHasKernelLifetime =
false;
1176 const bool UseDominanceReasoning =
1177 FindInterferingWrites && IsKnownNoRecurse;
1178 const DominatorTree *DT =
1188 case AA::GPUAddressSpace::Shared:
1189 case AA::GPUAddressSpace::Constant:
1190 case AA::GPUAddressSpace::Local:
1202 std::function<bool(
const Function &)> IsLiveInCalleeCB;
1207 const Function *AIFn = AI->getFunction();
1208 ObjHasKernelLifetime =
A.getInfoCache().isKernel(*AIFn);
1209 bool IsKnownNoRecurse;
1212 IsKnownNoRecurse)) {
1213 IsLiveInCalleeCB = [AIFn](
const Function &Fn) {
return AIFn != &Fn; };
1218 ObjHasKernelLifetime = HasKernelLifetime(GV, *GV->getParent());
1219 if (ObjHasKernelLifetime)
1220 IsLiveInCalleeCB = [&
A](
const Function &Fn) {
1221 return !
A.getInfoCache().isKernel(Fn);
1229 auto AccessCB = [&](
const Access &Acc,
bool Exact) {
1230 Function *AccScope = Acc.getRemoteInst()->getFunction();
1231 bool AccInSameScope = AccScope == &
Scope;
1235 if (InstInKernel && ObjHasKernelLifetime && !AccInSameScope &&
1236 A.getInfoCache().isKernel(*AccScope))
1239 if (Exact && Acc.isMustAccess() && Acc.getRemoteInst() != &
I) {
1240 if (Acc.isWrite() || (
isa<LoadInst>(
I) && Acc.isWriteOrAssumption()))
1241 ExclusionSet.
insert(Acc.getRemoteInst());
1244 if ((!FindInterferingWrites || !Acc.isWriteOrAssumption()) &&
1245 (!FindInterferingReads || !Acc.isRead()))
1248 bool Dominates = FindInterferingWrites && DT && Exact &&
1249 Acc.isMustAccess() && AccInSameScope &&
1252 DominatingWrites.
insert(&Acc);
1256 AllInSameNoSyncFn &= Acc.getRemoteInst()->getFunction() == &
Scope;
1258 InterferingAccesses.
push_back({&Acc, Exact});
1261 if (!State::forallInterferingAccesses(
I, AccessCB,
Range))
1264 HasBeenWrittenTo = !DominatingWrites.
empty();
1268 for (
const Access *Acc : DominatingWrites) {
1269 if (!LeastDominatingWriteInst) {
1270 LeastDominatingWriteInst = Acc->getRemoteInst();
1271 }
else if (DT->
dominates(LeastDominatingWriteInst,
1272 Acc->getRemoteInst())) {
1273 LeastDominatingWriteInst = Acc->getRemoteInst();
1278 auto CanSkipAccess = [&](
const Access &Acc,
bool Exact) {
1279 if (SkipCB && SkipCB(Acc))
1281 if (!CanIgnoreThreading(Acc))
1287 bool ReadChecked = !FindInterferingReads;
1288 bool WriteChecked = !FindInterferingWrites;
1294 &ExclusionSet, IsLiveInCalleeCB))
1299 if (!WriteChecked) {
1301 &ExclusionSet, IsLiveInCalleeCB))
1302 WriteChecked =
true;
1316 if (!WriteChecked && HasBeenWrittenTo &&
1317 Acc.getRemoteInst()->getFunction() != &Scope) {
1319 const auto *FnReachabilityAA =
A.getAAFor<AAInterFnReachability>(
1321 if (FnReachabilityAA) {
1327 if (!FnReachabilityAA->instructionCanReach(
1328 A, *LeastDominatingWriteInst,
1329 *Acc.getRemoteInst()->getFunction(), &ExclusionSet))
1330 WriteChecked =
true;
1337 if (ReadChecked && WriteChecked)
1340 if (!DT || !UseDominanceReasoning)
1342 if (!DominatingWrites.count(&Acc))
1344 return LeastDominatingWriteInst != Acc.getRemoteInst();
1349 for (
auto &It : InterferingAccesses) {
1350 if ((!AllInSameNoSyncFn && !IsThreadLocalObj && !ExecDomainAA) ||
1351 !CanSkipAccess(*It.first, It.second)) {
1352 if (!UserCB(*It.first, It.second))
1360 const AAPointerInfo &OtherAA,
1362 using namespace AA::PointerInfo;
1364 return indicatePessimisticFixpoint();
1367 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1368 bool IsByval = OtherAAImpl.getAssociatedArgument()->hasByValAttr();
1369 Changed |= setReachesReturn(OtherAAImpl.ReturnedOffsets);
1372 const auto &State = OtherAAImpl.getState();
1373 for (
const auto &It : State) {
1374 for (
auto Index : It.getSecond()) {
1375 const auto &RAcc = State.getAccess(Index);
1376 if (IsByval && !RAcc.isRead())
1378 bool UsedAssumedInformation =
false;
1380 auto Content =
A.translateArgumentToCallSiteContent(
1381 RAcc.getContent(), CB, *
this, UsedAssumedInformation);
1382 AK =
AccessKind(AK & (IsByval ? AccessKind::AK_R : AccessKind::AK_RW));
1383 AK =
AccessKind(AK | (RAcc.isMayAccess() ? AK_MAY : AK_MUST));
1385 Changed |= addAccess(
A, RAcc.getRanges(), CB, Content, AK,
1386 RAcc.getType(), RAcc.getRemoteInst());
1392 ChangeStatus translateAndAddState(Attributor &
A,
const AAPointerInfo &OtherAA,
1393 const OffsetInfo &Offsets, CallBase &CB,
1395 using namespace AA::PointerInfo;
1397 return indicatePessimisticFixpoint();
1399 const auto &OtherAAImpl =
static_cast<const AAPointerInfoImpl &
>(OtherAA);
1403 const auto &State = OtherAAImpl.getState();
1404 for (
const auto &It : State) {
1405 for (
auto Index : It.getSecond()) {
1406 const auto &RAcc = State.getAccess(Index);
1407 if (!IsMustAcc && RAcc.isAssumption())
1409 for (
auto Offset : Offsets) {
1413 if (!NewRanges.isUnknown()) {
1414 NewRanges.addToAllOffsets(Offset);
1419 Changed |= addAccess(
A, NewRanges, CB, RAcc.getContent(), AK,
1420 RAcc.getType(), RAcc.getRemoteInst());
1429 void trackPointerInfoStatistics(
const IRPosition &IRP)
const {}
1432 void dumpState(raw_ostream &O) {
1433 for (
auto &It : OffsetBins) {
1434 O <<
"[" << It.first.Offset <<
"-" << It.first.Offset + It.first.Size
1435 <<
"] : " << It.getSecond().size() <<
"\n";
1436 for (
auto AccIndex : It.getSecond()) {
1437 auto &Acc = AccessList[AccIndex];
1438 O <<
" - " << Acc.getKind() <<
" - " << *Acc.getLocalInst() <<
"\n";
1439 if (Acc.getLocalInst() != Acc.getRemoteInst())
1440 O <<
" --> " << *Acc.getRemoteInst()
1442 if (!Acc.isWrittenValueYetUndetermined()) {
1444 O <<
" - c: func " << Acc.getWrittenValue()->getName()
1446 else if (Acc.getWrittenValue())
1447 O <<
" - c: " << *Acc.getWrittenValue() <<
"\n";
1449 O <<
" - c: <unknown>\n";
1456struct AAPointerInfoFloating :
public AAPointerInfoImpl {
1458 AAPointerInfoFloating(
const IRPosition &IRP, Attributor &
A)
1459 : AAPointerInfoImpl(IRP,
A) {}
1462 bool handleAccess(Attributor &
A, Instruction &
I,
1463 std::optional<Value *> Content,
AccessKind Kind,
1466 using namespace AA::PointerInfo;
1468 const DataLayout &
DL =
A.getDataLayout();
1469 TypeSize AccessSize =
DL.getTypeStoreSize(&Ty);
1478 if (!VT || VT->getElementCount().isScalable() ||
1480 (*Content)->getType() != VT ||
1481 DL.getTypeStoreSize(VT->getElementType()).isScalable()) {
1492 int64_t ElementSize =
DL.getTypeStoreSize(ElementType).getFixedValue();
1497 for (
int i = 0, e = VT->getElementCount().getFixedValue(); i != e; ++i) {
1499 ConstContent, ConstantInt::get(
Int32Ty, i));
1506 for (
auto &ElementOffset : ElementOffsets)
1507 ElementOffset += ElementSize;
1520 bool collectConstantsForGEP(Attributor &
A,
const DataLayout &
DL,
1521 OffsetInfo &UsrOI,
const OffsetInfo &PtrOI,
1522 const GEPOperator *
GEP);
1525 void trackStatistics()
const override {
1526 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1530bool AAPointerInfoFloating::collectConstantsForGEP(Attributor &
A,
1531 const DataLayout &
DL,
1533 const OffsetInfo &PtrOI,
1534 const GEPOperator *
GEP) {
1535 unsigned BitWidth =
DL.getIndexTypeSizeInBits(
GEP->getType());
1536 SmallMapVector<Value *, APInt, 4> VariableOffsets;
1539 assert(!UsrOI.isUnknown() && !PtrOI.isUnknown() &&
1540 "Don't look for constant values if the offset has already been "
1541 "determined to be unknown.");
1543 if (!
GEP->collectOffset(
DL,
BitWidth, VariableOffsets, ConstantOffset)) {
1549 << (VariableOffsets.
empty() ?
"" :
"not") <<
" constant "
1553 Union.addToAll(ConstantOffset.getSExtValue());
1558 for (
const auto &VI : VariableOffsets) {
1559 auto *PotentialConstantsAA =
A.getAAFor<AAPotentialConstantValues>(
1561 if (!PotentialConstantsAA || !PotentialConstantsAA->isValidState()) {
1567 if (PotentialConstantsAA->undefIsContained())
1574 auto &AssumedSet = PotentialConstantsAA->getAssumedSet();
1575 if (AssumedSet.empty())
1579 for (
const auto &ConstOffset : AssumedSet) {
1580 auto CopyPerOffset =
Union;
1581 CopyPerOffset.addToAll(ConstOffset.getSExtValue() *
1582 VI.second.getZExtValue());
1583 Product.merge(CopyPerOffset);
1588 UsrOI = std::move(Union);
1592ChangeStatus AAPointerInfoFloating::updateImpl(Attributor &
A) {
1593 using namespace AA::PointerInfo;
1595 const DataLayout &
DL =
A.getDataLayout();
1596 Value &AssociatedValue = getAssociatedValue();
1598 DenseMap<Value *, OffsetInfo> OffsetInfoMap;
1599 OffsetInfoMap[&AssociatedValue].
insert(0);
1601 auto HandlePassthroughUser = [&](
Value *Usr,
Value *CurPtr,
bool &Follow) {
1612 "CurPtr does not exist in the map!");
1614 auto &UsrOI = OffsetInfoMap[Usr];
1615 auto &PtrOI = OffsetInfoMap[CurPtr];
1616 assert(!PtrOI.isUnassigned() &&
1617 "Cannot pass through if the input Ptr was not visited!");
1623 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
1625 User *Usr =
U.getUser();
1626 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Analyze " << *CurPtr <<
" in " << *Usr
1629 "The current pointer offset should have been seeded!");
1630 assert(!OffsetInfoMap[CurPtr].isUnassigned() &&
1631 "Current pointer should be assigned");
1635 return HandlePassthroughUser(Usr, CurPtr, Follow);
1637 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled constant user " << *CE
1645 auto &UsrOI = OffsetInfoMap[Usr];
1646 auto &PtrOI = OffsetInfoMap[CurPtr];
1648 if (UsrOI.isUnknown())
1651 if (PtrOI.isUnknown()) {
1657 Follow = collectConstantsForGEP(
A,
DL, UsrOI, PtrOI,
GEP);
1663 return HandlePassthroughUser(Usr, CurPtr, Follow);
1668 if (RI->getFunction() == getAssociatedFunction()) {
1669 auto &PtrOI = OffsetInfoMap[CurPtr];
1670 Changed |= setReachesReturn(PtrOI);
1683 auto &UsrOI = PhiIt->second;
1684 auto &PtrOI = OffsetInfoMap[CurPtr];
1688 if (PtrOI.isUnknown()) {
1689 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand offset unknown "
1690 << *CurPtr <<
" in " << *
PHI <<
"\n");
1691 Follow = !UsrOI.isUnknown();
1697 if (UsrOI == PtrOI) {
1698 assert(!PtrOI.isUnassigned() &&
1699 "Cannot assign if the current Ptr was not visited!");
1700 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant (so far)");
1710 auto It = OffsetInfoMap.
find(CurPtrBase);
1711 if (It == OffsetInfoMap.
end()) {
1712 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI operand is too complex "
1713 << *CurPtr <<
" in " << *
PHI
1714 <<
" (base: " << *CurPtrBase <<
")\n");
1728 A.getInfoCache().getAnalysisResultForFunction<CycleAnalysis>(
1729 *
PHI->getFunction());
1731 auto BaseOI = It->getSecond();
1732 BaseOI.addToAll(
Offset.getZExtValue());
1733 if (IsFirstPHIUser || BaseOI == UsrOI) {
1734 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] PHI is invariant " << *CurPtr
1735 <<
" in " << *Usr <<
"\n");
1736 return HandlePassthroughUser(Usr, CurPtr, Follow);
1740 dbgs() <<
"[AAPointerInfo] PHI operand pointer offset mismatch "
1741 << *CurPtr <<
" in " << *
PHI <<
"\n");
1760 if (!handleAccess(
A, *LoadI,
nullptr, AK,
1761 OffsetInfoMap[CurPtr].Offsets,
Changed,
1767 return II->isAssumeLikeIntrinsic();
1778 }
while (FromI && FromI != ToI);
1783 auto IsValidAssume = [&](IntrinsicInst &IntrI) {
1784 if (IntrI.getIntrinsicID() != Intrinsic::assume)
1787 if (IntrI.getParent() == BB) {
1788 if (IsImpactedInRange(LoadI->getNextNode(), &IntrI))
1794 if ((*PredIt) != BB)
1799 if (SuccBB == IntrBB)
1805 if (IsImpactedInRange(LoadI->getNextNode(), BB->
getTerminator()))
1807 if (IsImpactedInRange(&IntrBB->
front(), &IntrI))
1813 std::pair<Value *, IntrinsicInst *> Assumption;
1814 for (
const Use &LoadU : LoadI->uses()) {
1816 if (!CmpI->isEquality() || !CmpI->isTrueWhenEqual())
1818 for (
const Use &CmpU : CmpI->uses()) {
1820 if (!IsValidAssume(*IntrI))
1822 int Idx = CmpI->getOperandUse(0) == LoadU;
1823 Assumption = {CmpI->getOperand(Idx), IntrI};
1828 if (Assumption.first)
1833 if (!Assumption.first || !Assumption.second)
1837 << *Assumption.second <<
": " << *LoadI
1838 <<
" == " << *Assumption.first <<
"\n");
1839 bool UsedAssumedInformation =
false;
1840 std::optional<Value *> Content =
nullptr;
1841 if (Assumption.first)
1843 A.getAssumedSimplified(*Assumption.first, *
this,
1845 return handleAccess(
1846 A, *Assumption.second, Content, AccessKind::AK_ASSUMPTION,
1847 OffsetInfoMap[CurPtr].Offsets,
Changed, *LoadI->getType());
1852 for (
auto *OtherOp : OtherOps) {
1853 if (OtherOp == CurPtr) {
1856 <<
"[AAPointerInfo] Escaping use in store like instruction " <<
I
1868 bool UsedAssumedInformation =
false;
1869 std::optional<Value *> Content =
nullptr;
1871 Content =
A.getAssumedSimplified(
1873 return handleAccess(
A,
I, Content, AK, OffsetInfoMap[CurPtr].Offsets,
1878 return HandleStoreLike(*StoreI, StoreI->getValueOperand(),
1879 *StoreI->getValueOperand()->getType(),
1880 {StoreI->getValueOperand()}, AccessKind::AK_W);
1882 return HandleStoreLike(*RMWI,
nullptr, *RMWI->getValOperand()->getType(),
1883 {RMWI->getValOperand()}, AccessKind::AK_RW);
1885 return HandleStoreLike(
1886 *CXI,
nullptr, *CXI->getNewValOperand()->getType(),
1887 {CXI->getCompareOperand(), CXI->getNewValOperand()},
1894 A.getInfoCache().getTargetLibraryInfoForFunction(*CB->
getFunction());
1899 const auto *CSArgPI =
A.getAAFor<AAPointerInfo>(
1905 Changed = translateAndAddState(
A, *CSArgPI, OffsetInfoMap[CurPtr], *CB,
1908 if (!CSArgPI->reachesReturn())
1909 return isValidState();
1912 if (!Callee ||
Callee->arg_size() <= ArgNo)
1914 bool UsedAssumedInformation =
false;
1915 auto ReturnedValue =
A.getAssumedSimplified(
1920 auto *Arg =
Callee->getArg(ArgNo);
1921 if (ReturnedArg && Arg != ReturnedArg)
1923 bool IsRetMustAcc = IsArgMustAcc && (ReturnedArg == Arg);
1924 const auto *CSRetPI =
A.getAAFor<AAPointerInfo>(
1928 OffsetInfo OI = OffsetInfoMap[CurPtr];
1929 CSArgPI->addReturnedOffsetsTo(OI);
1931 translateAndAddState(
A, *CSRetPI, OI, *CB, IsRetMustAcc) |
Changed;
1932 return isValidState();
1934 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Call user not handled " << *CB
1939 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] User not handled " << *Usr <<
"\n");
1942 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
1943 assert(OffsetInfoMap.
count(OldU) &&
"Old use should be known already!");
1944 assert(!OffsetInfoMap[OldU].isUnassigned() &&
"Old use should be assinged");
1945 if (OffsetInfoMap.
count(NewU)) {
1947 if (!(OffsetInfoMap[NewU] == OffsetInfoMap[OldU])) {
1948 dbgs() <<
"[AAPointerInfo] Equivalent use callback failed: "
1949 << OffsetInfoMap[NewU] <<
" vs " << OffsetInfoMap[OldU]
1953 return OffsetInfoMap[NewU] == OffsetInfoMap[OldU];
1956 return HandlePassthroughUser(NewU.get(), OldU.
get(), Unused);
1958 if (!
A.checkForAllUses(UsePred, *
this, AssociatedValue,
1960 true, EquivalentUseCB)) {
1961 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Check for all uses failed, abort!\n");
1962 return indicatePessimisticFixpoint();
1966 dbgs() <<
"Accesses by bin after update:\n";
1973struct AAPointerInfoReturned final : AAPointerInfoImpl {
1974 AAPointerInfoReturned(
const IRPosition &IRP, Attributor &
A)
1975 : AAPointerInfoImpl(IRP,
A) {}
1979 return indicatePessimisticFixpoint();
1983 void trackStatistics()
const override {
1984 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1988struct AAPointerInfoArgument final : AAPointerInfoFloating {
1989 AAPointerInfoArgument(
const IRPosition &IRP, Attributor &
A)
1990 : AAPointerInfoFloating(IRP,
A) {}
1993 void trackStatistics()
const override {
1994 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
1998struct AAPointerInfoCallSiteArgument final : AAPointerInfoFloating {
1999 AAPointerInfoCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
2000 : AAPointerInfoFloating(IRP,
A) {}
2004 using namespace AA::PointerInfo;
2010 if (
auto Length =
MI->getLengthInBytes())
2011 LengthVal =
Length->getSExtValue();
2012 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
2015 LLVM_DEBUG(
dbgs() <<
"[AAPointerInfo] Unhandled memory intrinsic "
2017 return indicatePessimisticFixpoint();
2020 ArgNo == 0 ? AccessKind::AK_MUST_WRITE : AccessKind::AK_MUST_READ;
2022 Changed | addAccess(
A, {0, LengthVal}, *
MI,
nullptr,
Kind,
nullptr);
2025 dbgs() <<
"Accesses by bin after update:\n";
2036 Argument *Arg = getAssociatedArgument();
2040 A.getAAFor<AAPointerInfo>(*
this, ArgPos, DepClassTy::REQUIRED);
2041 if (ArgAA && ArgAA->getState().isValidState())
2042 return translateAndAddStateFromCallee(
A, *ArgAA,
2045 return indicatePessimisticFixpoint();
2048 bool IsKnownNoCapture;
2050 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNoCapture))
2051 return indicatePessimisticFixpoint();
2053 bool IsKnown =
false;
2055 return ChangeStatus::UNCHANGED;
2058 ReadOnly ? AccessKind::AK_MAY_READ : AccessKind::AK_MAY_READ_WRITE;
2064 void trackStatistics()
const override {
2065 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2069struct AAPointerInfoCallSiteReturned final : AAPointerInfoFloating {
2070 AAPointerInfoCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
2071 : AAPointerInfoFloating(IRP,
A) {}
2074 void trackStatistics()
const override {
2075 AAPointerInfoImpl::trackPointerInfoStatistics(getIRPosition());
2083struct AANoUnwindImpl : AANoUnwind {
2084 AANoUnwindImpl(
const IRPosition &IRP, Attributor &
A) : AANoUnwind(IRP,
A) {}
2090 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2094 const std::string getAsStr(Attributor *
A)
const override {
2095 return getAssumed() ?
"nounwind" :
"may-unwind";
2101 (unsigned)Instruction::Invoke, (
unsigned)Instruction::CallBr,
2102 (unsigned)Instruction::Call, (
unsigned)Instruction::CleanupRet,
2103 (unsigned)Instruction::CatchSwitch, (
unsigned)Instruction::Resume};
2106 if (!
I.mayThrow(
true))
2110 bool IsKnownNoUnwind;
2118 bool UsedAssumedInformation =
false;
2119 if (!
A.checkForAllInstructions(CheckForNoUnwind, *
this, Opcodes,
2120 UsedAssumedInformation))
2121 return indicatePessimisticFixpoint();
2123 return ChangeStatus::UNCHANGED;
2127struct AANoUnwindFunction final :
public AANoUnwindImpl {
2128 AANoUnwindFunction(
const IRPosition &IRP, Attributor &
A)
2129 : AANoUnwindImpl(IRP,
A) {}
2136struct AANoUnwindCallSite final
2137 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl> {
2138 AANoUnwindCallSite(
const IRPosition &IRP, Attributor &
A)
2139 : AACalleeToCallSite<AANoUnwind, AANoUnwindImpl>(IRP,
A) {}
2150 case Intrinsic::nvvm_barrier_cta_sync_aligned_all:
2151 case Intrinsic::nvvm_barrier_cta_sync_aligned_count:
2152 case Intrinsic::nvvm_barrier_cta_red_and_aligned_all:
2153 case Intrinsic::nvvm_barrier_cta_red_and_aligned_count:
2154 case Intrinsic::nvvm_barrier_cta_red_or_aligned_all:
2155 case Intrinsic::nvvm_barrier_cta_red_or_aligned_count:
2156 case Intrinsic::nvvm_barrier_cta_red_popc_aligned_all:
2157 case Intrinsic::nvvm_barrier_cta_red_popc_aligned_count:
2159 case Intrinsic::amdgcn_s_barrier:
2160 if (ExecutedAligned)
2183 switch (
I->getOpcode()) {
2184 case Instruction::AtomicRMW:
2187 case Instruction::Store:
2190 case Instruction::Load:
2195 "New atomic operations need to be known in the attributor.");
2207 return !
MI->isVolatile();
2223 const std::string getAsStr(Attributor *
A)
const override {
2224 return getAssumed() ?
"nosync" :
"may-sync";
2240 if (
I.mayReadOrWriteMemory())
2254 bool UsedAssumedInformation =
false;
2255 if (!
A.checkForAllReadWriteInstructions(CheckRWInstForNoSync, *
this,
2256 UsedAssumedInformation) ||
2257 !
A.checkForAllCallLikeInstructions(CheckForNoSync, *
this,
2258 UsedAssumedInformation))
2259 return indicatePessimisticFixpoint();
2264struct AANoSyncFunction final :
public AANoSyncImpl {
2265 AANoSyncFunction(
const IRPosition &IRP, Attributor &
A)
2266 : AANoSyncImpl(IRP,
A) {}
2273struct AANoSyncCallSite final : AACalleeToCallSite<AANoSync, AANoSyncImpl> {
2274 AANoSyncCallSite(
const IRPosition &IRP, Attributor &
A)
2275 : AACalleeToCallSite<AANoSync, AANoSyncImpl>(IRP,
A) {}
2285struct AANoFreeImpl :
public AANoFree {
2286 AANoFreeImpl(
const IRPosition &IRP, Attributor &
A) : AANoFree(IRP,
A) {}
2292 DepClassTy::NONE, IsKnown));
2302 DepClassTy::REQUIRED, IsKnown);
2305 bool UsedAssumedInformation =
false;
2306 if (!
A.checkForAllCallLikeInstructions(CheckForNoFree, *
this,
2307 UsedAssumedInformation))
2308 return indicatePessimisticFixpoint();
2309 return ChangeStatus::UNCHANGED;
2313 const std::string getAsStr(Attributor *
A)
const override {
2314 return getAssumed() ?
"nofree" :
"may-free";
2318struct AANoFreeFunction final :
public AANoFreeImpl {
2319 AANoFreeFunction(
const IRPosition &IRP, Attributor &
A)
2320 : AANoFreeImpl(IRP,
A) {}
2327struct AANoFreeCallSite final : AACalleeToCallSite<AANoFree, AANoFreeImpl> {
2328 AANoFreeCallSite(
const IRPosition &IRP, Attributor &
A)
2329 : AACalleeToCallSite<AANoFree, AANoFreeImpl>(IRP,
A) {}
2336struct AANoFreeFloating : AANoFreeImpl {
2337 AANoFreeFloating(
const IRPosition &IRP, Attributor &
A)
2338 : AANoFreeImpl(IRP,
A) {}
2345 const IRPosition &IRP = getIRPosition();
2350 DepClassTy::OPTIONAL, IsKnown))
2351 return ChangeStatus::UNCHANGED;
2353 Value &AssociatedValue = getIRPosition().getAssociatedValue();
2354 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
2366 DepClassTy::REQUIRED, IsKnown);
2383 if (!
A.checkForAllUses(Pred, *
this, AssociatedValue))
2384 return indicatePessimisticFixpoint();
2386 return ChangeStatus::UNCHANGED;
2391struct AANoFreeArgument final : AANoFreeFloating {
2392 AANoFreeArgument(
const IRPosition &IRP, Attributor &
A)
2393 : AANoFreeFloating(IRP,
A) {}
2400struct AANoFreeCallSiteArgument final : AANoFreeFloating {
2401 AANoFreeCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
2402 : AANoFreeFloating(IRP,
A) {}
2410 Argument *Arg = getAssociatedArgument();
2412 return indicatePessimisticFixpoint();
2416 DepClassTy::REQUIRED, IsKnown))
2417 return ChangeStatus::UNCHANGED;
2418 return indicatePessimisticFixpoint();
2426struct AANoFreeReturned final : AANoFreeFloating {
2427 AANoFreeReturned(
const IRPosition &IRP, Attributor &
A)
2428 : AANoFreeFloating(IRP,
A) {
2443 void trackStatistics()
const override {}
2447struct AANoFreeCallSiteReturned final : AANoFreeFloating {
2448 AANoFreeCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
2449 : AANoFreeFloating(IRP,
A) {}
2452 return ChangeStatus::UNCHANGED;
2463 bool IgnoreSubsumingPositions) {
2465 AttrKinds.
push_back(Attribute::NonNull);
2468 AttrKinds.
push_back(Attribute::Dereferenceable);
2469 if (
A.hasAttr(IRP, AttrKinds, IgnoreSubsumingPositions, Attribute::NonNull))
2476 if (!Fn->isDeclaration()) {
2486 bool UsedAssumedInformation =
false;
2487 if (!
A.checkForAllInstructions(
2489 Worklist.push_back({*cast<ReturnInst>(I).getReturnValue(), &I});
2493 UsedAssumedInformation,
false,
true))
2505 Attribute::NonNull)});
2510static int64_t getKnownNonNullAndDerefBytesForUse(
2511 Attributor &
A,
const AbstractAttribute &QueryingAA,
Value &AssociatedValue,
2512 const Use *U,
const Instruction *
I,
bool &IsNonNull,
bool &TrackUse) {
2515 const Value *UseV =
U->get();
2536 const DataLayout &
DL =
A.getInfoCache().getDL();
2540 U, {Attribute::NonNull, Attribute::Dereferenceable})) {
2557 bool IsKnownNonNull;
2560 IsNonNull |= IsKnownNonNull;
2563 return DerefAA ? DerefAA->getKnownDereferenceableBytes() : 0;
2567 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
2568 Loc->Size.isScalable() ||
I->isVolatile())
2574 if (
Base &&
Base == &AssociatedValue) {
2575 int64_t DerefBytes = Loc->Size.getValue() +
Offset;
2577 return std::max(int64_t(0), DerefBytes);
2584 int64_t DerefBytes = Loc->Size.getValue();
2586 return std::max(int64_t(0), DerefBytes);
2592struct AANonNullImpl : AANonNull {
2593 AANonNullImpl(
const IRPosition &IRP, Attributor &
A) : AANonNull(IRP,
A) {}
2597 Value &
V = *getAssociatedValue().stripPointerCasts();
2599 indicatePessimisticFixpoint();
2603 if (Instruction *CtxI = getCtxI())
2604 followUsesInMBEC(*
this,
A, getState(), *CtxI);
2608 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
2609 AANonNull::StateType &State) {
2610 bool IsNonNull =
false;
2611 bool TrackUse =
false;
2612 getKnownNonNullAndDerefBytesForUse(
A, *
this, getAssociatedValue(), U,
I,
2613 IsNonNull, TrackUse);
2614 State.setKnown(IsNonNull);
2619 const std::string getAsStr(Attributor *
A)
const override {
2620 return getAssumed() ?
"nonnull" :
"may-null";
2625struct AANonNullFloating :
public AANonNullImpl {
2626 AANonNullFloating(
const IRPosition &IRP, Attributor &
A)
2627 : AANonNullImpl(IRP,
A) {}
2631 auto CheckIRP = [&](
const IRPosition &IRP) {
2632 bool IsKnownNonNull;
2634 A, *
this, IRP, DepClassTy::OPTIONAL, IsKnownNonNull);
2638 bool UsedAssumedInformation =
false;
2639 Value *AssociatedValue = &getAssociatedValue();
2641 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
2646 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
2652 return AA::hasAssumedIRAttr<Attribute::NonNull>(
2653 A, this, IRPosition::value(*Op), DepClassTy::OPTIONAL,
2656 return ChangeStatus::UNCHANGED;
2660 DepClassTy::OPTIONAL, IsKnown) &&
2663 DepClassTy::OPTIONAL, IsKnown))
2664 return ChangeStatus::UNCHANGED;
2671 if (AVIRP == getIRPosition() || !CheckIRP(AVIRP))
2672 return indicatePessimisticFixpoint();
2673 return ChangeStatus::UNCHANGED;
2676 for (
const auto &VAC : Values)
2678 return indicatePessimisticFixpoint();
2680 return ChangeStatus::UNCHANGED;
2688struct AANonNullReturned final
2689 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2690 false, AANonNull::IRAttributeKind, false> {
2691 AANonNullReturned(
const IRPosition &IRP, Attributor &
A)
2692 : AAReturnedFromReturnedValues<AANonNull, AANonNull, AANonNull::StateType,
2697 const std::string getAsStr(Attributor *
A)
const override {
2698 return getAssumed() ?
"nonnull" :
"may-null";
2706struct AANonNullArgument final
2707 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl> {
2708 AANonNullArgument(
const IRPosition &IRP, Attributor &
A)
2709 : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl>(IRP,
A) {}
2715struct AANonNullCallSiteArgument final : AANonNullFloating {
2716 AANonNullCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
2717 : AANonNullFloating(IRP,
A) {}
2724struct AANonNullCallSiteReturned final
2725 : AACalleeToCallSite<AANonNull, AANonNullImpl> {
2726 AANonNullCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
2727 : AACalleeToCallSite<AANonNull, AANonNullImpl>(IRP,
A) {}
2736struct AAMustProgressImpl :
public AAMustProgress {
2737 AAMustProgressImpl(
const IRPosition &IRP, Attributor &
A)
2738 : AAMustProgress(IRP,
A) {}
2744 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2749 const std::string getAsStr(Attributor *
A)
const override {
2750 return getAssumed() ?
"mustprogress" :
"may-not-progress";
2754struct AAMustProgressFunction final : AAMustProgressImpl {
2755 AAMustProgressFunction(
const IRPosition &IRP, Attributor &
A)
2756 : AAMustProgressImpl(IRP,
A) {}
2762 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnown)) {
2764 return indicateOptimisticFixpoint();
2765 return ChangeStatus::UNCHANGED;
2768 auto CheckForMustProgress = [&](AbstractCallSite ACS) {
2770 bool IsKnownMustProgress;
2772 A,
this, IPos, DepClassTy::REQUIRED, IsKnownMustProgress,
2776 bool AllCallSitesKnown =
true;
2777 if (!
A.checkForAllCallSites(CheckForMustProgress, *
this,
2780 return indicatePessimisticFixpoint();
2782 return ChangeStatus::UNCHANGED;
2786 void trackStatistics()
const override {
2792struct AAMustProgressCallSite final : AAMustProgressImpl {
2793 AAMustProgressCallSite(
const IRPosition &IRP, Attributor &
A)
2794 : AAMustProgressImpl(IRP,
A) {}
2803 bool IsKnownMustProgress;
2805 A,
this, FnPos, DepClassTy::REQUIRED, IsKnownMustProgress))
2806 return indicatePessimisticFixpoint();
2807 return ChangeStatus::UNCHANGED;
2811 void trackStatistics()
const override {
2820struct AANoRecurseImpl :
public AANoRecurse {
2821 AANoRecurseImpl(
const IRPosition &IRP, Attributor &
A) : AANoRecurse(IRP,
A) {}
2827 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
2832 const std::string getAsStr(Attributor *
A)
const override {
2833 return getAssumed() ?
"norecurse" :
"may-recurse";
2837struct AANoRecurseFunction final : AANoRecurseImpl {
2838 AANoRecurseFunction(
const IRPosition &IRP, Attributor &
A)
2839 : AANoRecurseImpl(IRP,
A) {}
2845 auto CallSitePred = [&](AbstractCallSite ACS) {
2846 bool IsKnownNoRecurse;
2850 DepClassTy::NONE, IsKnownNoRecurse))
2852 return IsKnownNoRecurse;
2854 bool UsedAssumedInformation =
false;
2855 if (
A.checkForAllCallSites(CallSitePred, *
this,
true,
2856 UsedAssumedInformation)) {
2862 if (!UsedAssumedInformation)
2863 indicateOptimisticFixpoint();
2864 return ChangeStatus::UNCHANGED;
2867 const AAInterFnReachability *EdgeReachability =
2868 A.getAAFor<AAInterFnReachability>(*
this, getIRPosition(),
2869 DepClassTy::REQUIRED);
2870 if (EdgeReachability && EdgeReachability->
canReach(
A, *getAnchorScope()))
2871 return indicatePessimisticFixpoint();
2872 return ChangeStatus::UNCHANGED;
2879struct AANoRecurseCallSite final
2880 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl> {
2881 AANoRecurseCallSite(
const IRPosition &IRP, Attributor &
A)
2882 : AACalleeToCallSite<AANoRecurse, AANoRecurseImpl>(IRP,
A) {}
2892struct AANonConvergentImpl :
public AANonConvergent {
2893 AANonConvergentImpl(
const IRPosition &IRP, Attributor &
A)
2894 : AANonConvergent(IRP,
A) {}
2897 const std::string getAsStr(Attributor *
A)
const override {
2898 return getAssumed() ?
"non-convergent" :
"may-be-convergent";
2902struct AANonConvergentFunction final : AANonConvergentImpl {
2903 AANonConvergentFunction(
const IRPosition &IRP, Attributor &
A)
2904 : AANonConvergentImpl(IRP,
A) {}
2910 auto CalleeIsNotConvergent = [&](
Instruction &Inst) {
2913 if (!Callee ||
Callee->isIntrinsic()) {
2916 if (
Callee->isDeclaration()) {
2917 return !
Callee->hasFnAttribute(Attribute::Convergent);
2919 const auto *ConvergentAA =
A.getAAFor<AANonConvergent>(
2921 return ConvergentAA && ConvergentAA->isAssumedNotConvergent();
2924 bool UsedAssumedInformation =
false;
2925 if (!
A.checkForAllCallLikeInstructions(CalleeIsNotConvergent, *
this,
2926 UsedAssumedInformation)) {
2927 return indicatePessimisticFixpoint();
2929 return ChangeStatus::UNCHANGED;
2933 if (isKnownNotConvergent() &&
2934 A.hasAttr(getIRPosition(), Attribute::Convergent)) {
2935 A.removeAttrs(getIRPosition(), {Attribute::Convergent});
2936 return ChangeStatus::CHANGED;
2938 return ChangeStatus::UNCHANGED;
2948struct AAUndefinedBehaviorImpl :
public AAUndefinedBehavior {
2949 AAUndefinedBehaviorImpl(
const IRPosition &IRP, Attributor &
A)
2950 : AAUndefinedBehavior(IRP,
A) {}
2955 const size_t UBPrevSize = KnownUBInsts.size();
2956 const size_t NoUBPrevSize = AssumedNoUBInsts.size();
2960 if (
I.isVolatile() &&
I.mayWriteToMemory())
2964 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
2973 "Expected pointer operand of memory accessing instruction");
2977 std::optional<Value *> SimplifiedPtrOp =
2978 stopOnUndefOrAssumed(
A, PtrOp, &
I);
2979 if (!SimplifiedPtrOp || !*SimplifiedPtrOp)
2981 const Value *PtrOpVal = *SimplifiedPtrOp;
2987 AssumedNoUBInsts.insert(&
I);
2999 AssumedNoUBInsts.insert(&
I);
3001 KnownUBInsts.insert(&
I);
3010 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3017 if (BrInst->isUnconditional())
3022 std::optional<Value *> SimplifiedCond =
3023 stopOnUndefOrAssumed(
A, BrInst->getCondition(), BrInst);
3024 if (!SimplifiedCond || !*SimplifiedCond)
3026 AssumedNoUBInsts.insert(&
I);
3034 if (AssumedNoUBInsts.count(&
I) || KnownUBInsts.count(&
I))
3043 for (
unsigned idx = 0; idx < CB.
arg_size(); idx++) {
3049 if (idx >=
Callee->arg_size())
3061 bool IsKnownNoUndef;
3063 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNoUndef);
3064 if (!IsKnownNoUndef)
3066 bool UsedAssumedInformation =
false;
3067 std::optional<Value *> SimplifiedVal =
3070 if (UsedAssumedInformation)
3072 if (SimplifiedVal && !*SimplifiedVal)
3075 KnownUBInsts.insert(&
I);
3081 bool IsKnownNonNull;
3083 A,
this, CalleeArgumentIRP, DepClassTy::NONE, IsKnownNonNull);
3085 KnownUBInsts.insert(&
I);
3094 std::optional<Value *> SimplifiedRetValue =
3095 stopOnUndefOrAssumed(
A, RI.getReturnValue(), &
I);
3096 if (!SimplifiedRetValue || !*SimplifiedRetValue)
3114 bool IsKnownNonNull;
3119 KnownUBInsts.insert(&
I);
3125 bool UsedAssumedInformation =
false;
3126 A.checkForAllInstructions(InspectMemAccessInstForUB, *
this,
3127 {Instruction::Load, Instruction::Store,
3128 Instruction::AtomicCmpXchg,
3129 Instruction::AtomicRMW},
3130 UsedAssumedInformation,
3132 A.checkForAllInstructions(InspectBrInstForUB, *
this, {Instruction::Br},
3133 UsedAssumedInformation,
3135 A.checkForAllCallLikeInstructions(InspectCallSiteForUB, *
this,
3136 UsedAssumedInformation);
3140 if (!getAnchorScope()->getReturnType()->isVoidTy()) {
3142 if (!
A.isAssumedDead(ReturnIRP,
this,
nullptr, UsedAssumedInformation)) {
3143 bool IsKnownNoUndef;
3145 A,
this, ReturnIRP, DepClassTy::NONE, IsKnownNoUndef);
3147 A.checkForAllInstructions(InspectReturnInstForUB, *
this,
3148 {Instruction::Ret}, UsedAssumedInformation,
3153 if (NoUBPrevSize != AssumedNoUBInsts.size() ||
3154 UBPrevSize != KnownUBInsts.size())
3155 return ChangeStatus::CHANGED;
3156 return ChangeStatus::UNCHANGED;
3159 bool isKnownToCauseUB(Instruction *
I)
const override {
3160 return KnownUBInsts.count(
I);
3163 bool isAssumedToCauseUB(Instruction *
I)
const override {
3170 switch (
I->getOpcode()) {
3171 case Instruction::Load:
3172 case Instruction::Store:
3173 case Instruction::AtomicCmpXchg:
3174 case Instruction::AtomicRMW:
3175 return !AssumedNoUBInsts.count(
I);
3176 case Instruction::Br: {
3178 if (BrInst->isUnconditional())
3180 return !AssumedNoUBInsts.count(
I);
3189 if (KnownUBInsts.empty())
3190 return ChangeStatus::UNCHANGED;
3191 for (Instruction *
I : KnownUBInsts)
3192 A.changeToUnreachableAfterManifest(
I);
3193 return ChangeStatus::CHANGED;
3197 const std::string getAsStr(Attributor *
A)
const override {
3198 return getAssumed() ?
"undefined-behavior" :
"no-ub";
3226 SmallPtrSet<Instruction *, 8> KnownUBInsts;
3230 SmallPtrSet<Instruction *, 8> AssumedNoUBInsts;
3241 std::optional<Value *> stopOnUndefOrAssumed(Attributor &
A,
Value *V,
3243 bool UsedAssumedInformation =
false;
3244 std::optional<Value *> SimplifiedV =
3247 if (!UsedAssumedInformation) {
3252 KnownUBInsts.insert(
I);
3253 return std::nullopt;
3260 KnownUBInsts.insert(
I);
3261 return std::nullopt;
3267struct AAUndefinedBehaviorFunction final : AAUndefinedBehaviorImpl {
3268 AAUndefinedBehaviorFunction(
const IRPosition &IRP, Attributor &
A)
3269 : AAUndefinedBehaviorImpl(IRP,
A) {}
3272 void trackStatistics()
const override {
3273 STATS_DECL(UndefinedBehaviorInstruction, Instruction,
3274 "Number of instructions known to have UB");
3276 KnownUBInsts.size();
3287static bool mayContainUnboundedCycle(Function &
F, Attributor &
A) {
3288 ScalarEvolution *SE =
3289 A.getInfoCache().getAnalysisResultForFunction<ScalarEvolutionAnalysis>(
F);
3290 LoopInfo *LI =
A.getInfoCache().getAnalysisResultForFunction<LoopAnalysis>(
F);
3296 for (scc_iterator<Function *> SCCI =
scc_begin(&
F); !SCCI.isAtEnd(); ++SCCI)
3297 if (SCCI.hasCycle())
3307 for (
auto *L : LI->getLoopsInPreorder()) {
3314struct AAWillReturnImpl :
public AAWillReturn {
3315 AAWillReturnImpl(
const IRPosition &IRP, Attributor &
A)
3316 : AAWillReturn(IRP,
A) {}
3322 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
3327 bool isImpliedByMustprogressAndReadonly(Attributor &
A,
bool KnownOnly) {
3328 if (!
A.hasAttr(getIRPosition(), {Attribute::MustProgress}))
3333 return IsKnown || !KnownOnly;
3339 if (isImpliedByMustprogressAndReadonly(
A,
false))
3340 return ChangeStatus::UNCHANGED;
3346 A,
this, IPos, DepClassTy::REQUIRED, IsKnown)) {
3352 bool IsKnownNoRecurse;
3354 A,
this, IPos, DepClassTy::REQUIRED, IsKnownNoRecurse);
3357 bool UsedAssumedInformation =
false;
3358 if (!
A.checkForAllCallLikeInstructions(CheckForWillReturn, *
this,
3359 UsedAssumedInformation))
3360 return indicatePessimisticFixpoint();
3362 return ChangeStatus::UNCHANGED;
3366 const std::string getAsStr(Attributor *
A)
const override {
3367 return getAssumed() ?
"willreturn" :
"may-noreturn";
3371struct AAWillReturnFunction final : AAWillReturnImpl {
3372 AAWillReturnFunction(
const IRPosition &IRP, Attributor &
A)
3373 : AAWillReturnImpl(IRP,
A) {}
3377 AAWillReturnImpl::initialize(
A);
3380 assert(
F &&
"Did expect an anchor function");
3381 if (
F->isDeclaration() || mayContainUnboundedCycle(*
F,
A))
3382 indicatePessimisticFixpoint();
3390struct AAWillReturnCallSite final
3391 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl> {
3392 AAWillReturnCallSite(
const IRPosition &IRP, Attributor &
A)
3393 : AACalleeToCallSite<AAWillReturn, AAWillReturnImpl>(IRP,
A) {}
3397 if (isImpliedByMustprogressAndReadonly(
A,
false))
3398 return ChangeStatus::UNCHANGED;
3400 return AACalleeToCallSite::updateImpl(
A);
3422 const ToTy *
To =
nullptr;
3449 if (!ES || ES->
empty()) {
3450 ExclusionSet = nullptr;
3451 }
else if (MakeUnique) {
3452 ExclusionSet =
A.getInfoCache().getOrCreateUniqueBlockExecutionSet(ES);
3477 if (!PairDMI::isEqual({LHS->From, LHS->To}, {RHS->From, RHS->To}))
3479 return InstSetDMI::isEqual(LHS->ExclusionSet, RHS->ExclusionSet);
3483#define DefineKeys(ToTy) \
3485 ReachabilityQueryInfo<ToTy> \
3486 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::EmptyKey = \
3487 ReachabilityQueryInfo<ToTy>( \
3488 DenseMapInfo<const Instruction *>::getEmptyKey(), \
3489 DenseMapInfo<const ToTy *>::getEmptyKey()); \
3491 ReachabilityQueryInfo<ToTy> \
3492 DenseMapInfo<ReachabilityQueryInfo<ToTy> *>::TombstoneKey = \
3493 ReachabilityQueryInfo<ToTy>( \
3494 DenseMapInfo<const Instruction *>::getTombstoneKey(), \
3495 DenseMapInfo<const ToTy *>::getTombstoneKey());
3504template <
typename BaseTy,
typename ToTy>
3505struct CachedReachabilityAA :
public BaseTy {
3506 using RQITy = ReachabilityQueryInfo<ToTy>;
3508 CachedReachabilityAA(
const IRPosition &IRP, Attributor &
A) : BaseTy(IRP,
A) {}
3511 bool isQueryAA()
const override {
return true; }
3516 for (
unsigned u = 0,
e = QueryVector.size();
u <
e; ++
u) {
3517 RQITy *RQI = QueryVector[
u];
3518 if (RQI->Result == RQITy::Reachable::No &&
3520 Changed = ChangeStatus::CHANGED;
3526 bool IsTemporaryRQI) = 0;
3528 bool rememberResult(Attributor &
A,
typename RQITy::Reachable
Result,
3529 RQITy &RQI,
bool UsedExclusionSet,
bool IsTemporaryRQI) {
3534 QueryCache.erase(&RQI);
3540 if (
Result == RQITy::Reachable::Yes || !UsedExclusionSet) {
3541 RQITy PlainRQI(RQI.From, RQI.To);
3542 if (!QueryCache.count(&PlainRQI)) {
3543 RQITy *RQIPtr =
new (
A.Allocator) RQITy(RQI.From, RQI.To);
3545 QueryVector.push_back(RQIPtr);
3546 QueryCache.insert(RQIPtr);
3551 if (IsTemporaryRQI &&
Result != RQITy::Reachable::Yes && UsedExclusionSet) {
3552 assert((!RQI.ExclusionSet || !RQI.ExclusionSet->empty()) &&
3553 "Did not expect empty set!");
3554 RQITy *RQIPtr =
new (
A.Allocator)
3555 RQITy(
A, *RQI.From, *RQI.To, RQI.ExclusionSet,
true);
3556 assert(RQIPtr->Result == RQITy::Reachable::No &&
"Already reachable?");
3558 assert(!QueryCache.count(RQIPtr));
3559 QueryVector.push_back(RQIPtr);
3560 QueryCache.insert(RQIPtr);
3563 if (
Result == RQITy::Reachable::No && IsTemporaryRQI)
3564 A.registerForUpdate(*
this);
3565 return Result == RQITy::Reachable::Yes;
3568 const std::string getAsStr(Attributor *
A)
const override {
3570 return "#queries(" + std::to_string(QueryVector.size()) +
")";
3573 bool checkQueryCache(Attributor &
A, RQITy &StackRQI,
3574 typename RQITy::Reachable &
Result) {
3575 if (!this->getState().isValidState()) {
3576 Result = RQITy::Reachable::Yes;
3582 if (StackRQI.ExclusionSet) {
3583 RQITy PlainRQI(StackRQI.From, StackRQI.To);
3584 auto It = QueryCache.find(&PlainRQI);
3585 if (It != QueryCache.end() && (*It)->Result == RQITy::Reachable::No) {
3586 Result = RQITy::Reachable::No;
3591 auto It = QueryCache.find(&StackRQI);
3592 if (It != QueryCache.end()) {
3599 QueryCache.insert(&StackRQI);
3605 DenseSet<RQITy *> QueryCache;
3608struct AAIntraFnReachabilityFunction final
3609 :
public CachedReachabilityAA<AAIntraFnReachability, Instruction> {
3610 using Base = CachedReachabilityAA<AAIntraFnReachability, Instruction>;
3611 AAIntraFnReachabilityFunction(
const IRPosition &IRP, Attributor &
A)
3613 DT =
A.getInfoCache().getAnalysisResultForFunction<DominatorTreeAnalysis>(
3617 bool isAssumedReachable(
3618 Attributor &
A,
const Instruction &From,
const Instruction &To,
3620 auto *NonConstThis =
const_cast<AAIntraFnReachabilityFunction *
>(
this);
3624 RQITy StackRQI(
A, From, To, ExclusionSet,
false);
3626 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
3627 return NonConstThis->isReachableImpl(
A, StackRQI,
3629 return Result == RQITy::Reachable::Yes;
3636 A.getAAFor<AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3639 [&](
const auto &DeadEdge) {
3640 return LivenessAA->isEdgeDead(DeadEdge.first,
3644 return LivenessAA->isAssumedDead(BB);
3646 return ChangeStatus::UNCHANGED;
3650 return Base::updateImpl(
A);
3654 bool IsTemporaryRQI)
override {
3656 bool UsedExclusionSet =
false;
3661 while (IP && IP != &To) {
3662 if (ExclusionSet && IP != Origin && ExclusionSet->
count(IP)) {
3663 UsedExclusionSet =
true;
3671 const BasicBlock *FromBB = RQI.From->getParent();
3672 const BasicBlock *ToBB = RQI.To->getParent();
3674 "Not an intra-procedural query!");
3678 if (FromBB == ToBB &&
3679 WillReachInBlock(*RQI.From, *RQI.To, RQI.ExclusionSet))
3680 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3685 if (!WillReachInBlock(ToBB->
front(), *RQI.To, RQI.ExclusionSet))
3686 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3690 SmallPtrSet<const BasicBlock *, 16> ExclusionBlocks;
3691 if (RQI.ExclusionSet)
3692 for (
auto *
I : *RQI.ExclusionSet)
3693 if (
I->getFunction() == Fn)
3694 ExclusionBlocks.
insert(
I->getParent());
3697 if (ExclusionBlocks.
count(FromBB) &&
3700 return rememberResult(
A, RQITy::Reachable::No, RQI,
true, IsTemporaryRQI);
3703 A.getAAFor<AAIsDead>(*
this, getIRPosition(), DepClassTy::OPTIONAL);
3704 if (LivenessAA && LivenessAA->isAssumedDead(ToBB)) {
3705 DeadBlocks.insert(ToBB);
3706 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3710 SmallPtrSet<const BasicBlock *, 16> Visited;
3714 DenseSet<std::pair<const BasicBlock *, const BasicBlock *>> LocalDeadEdges;
3715 while (!Worklist.
empty()) {
3717 if (!Visited.
insert(BB).second)
3719 for (
const BasicBlock *SuccBB :
successors(BB)) {
3720 if (LivenessAA && LivenessAA->isEdgeDead(BB, SuccBB)) {
3721 LocalDeadEdges.
insert({BB, SuccBB});
3726 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3729 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
3732 if (ExclusionBlocks.
count(SuccBB)) {
3733 UsedExclusionSet =
true;
3740 DeadEdges.insert_range(LocalDeadEdges);
3741 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
3746 void trackStatistics()
const override {}
3751 DenseSet<const BasicBlock *> DeadBlocks;
3755 DenseSet<std::pair<const BasicBlock *, const BasicBlock *>> DeadEdges;
3758 const DominatorTree *DT =
nullptr;
3766 bool IgnoreSubsumingPositions) {
3767 assert(ImpliedAttributeKind == Attribute::NoAlias &&
3768 "Unexpected attribute kind");
3774 IgnoreSubsumingPositions =
true;
3785 if (
A.hasAttr(IRP, {Attribute::ByVal, Attribute::NoAlias},
3786 IgnoreSubsumingPositions, Attribute::NoAlias))
3796 "Noalias is a pointer attribute");
3799 const std::string getAsStr(
Attributor *
A)
const override {
3800 return getAssumed() ?
"noalias" :
"may-alias";
3805struct AANoAliasFloating final : AANoAliasImpl {
3806 AANoAliasFloating(
const IRPosition &IRP, Attributor &
A)
3807 : AANoAliasImpl(IRP,
A) {}
3812 return indicatePessimisticFixpoint();
3816 void trackStatistics()
const override {
3822struct AANoAliasArgument final
3823 : AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl> {
3824 using Base = AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl>;
3825 AANoAliasArgument(
const IRPosition &IRP, Attributor &
A) :
Base(IRP,
A) {}
3838 DepClassTy::OPTIONAL, IsKnownNoSycn))
3839 return Base::updateImpl(
A);
3844 return Base::updateImpl(
A);
3848 bool UsedAssumedInformation =
false;
3849 if (
A.checkForAllCallSites(
3850 [](AbstractCallSite ACS) { return !ACS.isCallbackCall(); }, *
this,
3851 true, UsedAssumedInformation))
3852 return Base::updateImpl(
A);
3860 return indicatePessimisticFixpoint();
3867struct AANoAliasCallSiteArgument final : AANoAliasImpl {
3868 AANoAliasCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
3869 : AANoAliasImpl(IRP,
A) {}
3873 bool mayAliasWithArgument(Attributor &
A, AAResults *&AAR,
3874 const AAMemoryBehavior &MemBehaviorAA,
3875 const CallBase &CB,
unsigned OtherArgNo) {
3877 if (this->getCalleeArgNo() == (
int)OtherArgNo)
3885 auto *CBArgMemBehaviorAA =
A.getAAFor<AAMemoryBehavior>(
3889 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadNone()) {
3890 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3897 if (CBArgMemBehaviorAA && CBArgMemBehaviorAA->isAssumedReadOnly() &&
3899 A.recordDependence(MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3900 A.recordDependence(*CBArgMemBehaviorAA, *
this, DepClassTy::OPTIONAL);
3906 AAR =
A.getInfoCache().getAnalysisResultForFunction<AAManager>(
3910 bool IsAliasing = !AAR || !AAR->
isNoAlias(&getAssociatedValue(), ArgOp);
3912 "callsite arguments: "
3913 << getAssociatedValue() <<
" " << *ArgOp <<
" => "
3914 << (IsAliasing ?
"" :
"no-") <<
"alias \n");
3919 bool isKnownNoAliasDueToNoAliasPreservation(
3920 Attributor &
A, AAResults *&AAR,
const AAMemoryBehavior &MemBehaviorAA) {
3933 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
3949 bool IsKnownNoCapture;
3952 DepClassTy::OPTIONAL, IsKnownNoCapture))
3958 A, *UserI, *getCtxI(), *
this,
nullptr,
3959 [ScopeFn](
const Function &Fn) {
return &Fn != ScopeFn; }))
3974 LLVM_DEBUG(
dbgs() <<
"[AANoAliasCSArg] Unknown user: " << *UserI <<
"\n");
3978 bool IsKnownNoCapture;
3979 const AANoCapture *NoCaptureAA =
nullptr;
3981 A,
this, VIRP, DepClassTy::NONE, IsKnownNoCapture,
false, &NoCaptureAA);
3982 if (!IsAssumedNoCapture &&
3984 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue())) {
3986 dbgs() <<
"[AANoAliasCSArg] " << getAssociatedValue()
3987 <<
" cannot be noalias as it is potentially captured\n");
3992 A.recordDependence(*NoCaptureAA, *
this, DepClassTy::OPTIONAL);
3998 for (
unsigned OtherArgNo = 0; OtherArgNo < CB.
arg_size(); OtherArgNo++)
3999 if (mayAliasWithArgument(
A, AAR, MemBehaviorAA, CB, OtherArgNo))
4009 auto *MemBehaviorAA =
4010 A.getAAFor<AAMemoryBehavior>(*
this, getIRPosition(), DepClassTy::NONE);
4012 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
4013 return ChangeStatus::UNCHANGED;
4016 bool IsKnownNoAlias;
4019 A,
this, VIRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
4021 <<
" is not no-alias at the definition\n");
4022 return indicatePessimisticFixpoint();
4025 AAResults *AAR =
nullptr;
4026 if (MemBehaviorAA &&
4027 isKnownNoAliasDueToNoAliasPreservation(
A, AAR, *MemBehaviorAA)) {
4029 dbgs() <<
"[AANoAlias] No-Alias deduced via no-alias preservation\n");
4030 return ChangeStatus::UNCHANGED;
4033 return indicatePessimisticFixpoint();
4041struct AANoAliasReturned final : AANoAliasImpl {
4042 AANoAliasReturned(
const IRPosition &IRP, Attributor &
A)
4043 : AANoAliasImpl(IRP,
A) {}
4048 auto CheckReturnValue = [&](
Value &RV) ->
bool {
4059 bool IsKnownNoAlias;
4061 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoAlias))
4064 bool IsKnownNoCapture;
4065 const AANoCapture *NoCaptureAA =
nullptr;
4067 A,
this, RVPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
4069 return IsAssumedNoCapture ||
4073 if (!
A.checkForAllReturnedValues(CheckReturnValue, *
this))
4074 return indicatePessimisticFixpoint();
4076 return ChangeStatus::UNCHANGED;
4084struct AANoAliasCallSiteReturned final
4085 : AACalleeToCallSite<AANoAlias, AANoAliasImpl> {
4086 AANoAliasCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
4087 : AACalleeToCallSite<AANoAlias, AANoAliasImpl>(IRP,
A) {}
4097struct AAIsDeadValueImpl :
public AAIsDead {
4098 AAIsDeadValueImpl(
const IRPosition &IRP, Attributor &
A) : AAIsDead(IRP,
A) {}
4101 bool isAssumedDead()
const override {
return isAssumed(IS_DEAD); }
4104 bool isKnownDead()
const override {
return isKnown(IS_DEAD); }
4107 bool isAssumedDead(
const BasicBlock *BB)
const override {
return false; }
4110 bool isKnownDead(
const BasicBlock *BB)
const override {
return false; }
4113 bool isAssumedDead(
const Instruction *
I)
const override {
4114 return I == getCtxI() && isAssumedDead();
4118 bool isKnownDead(
const Instruction *
I)
const override {
4119 return isAssumedDead(
I) && isKnownDead();
4123 const std::string getAsStr(Attributor *
A)
const override {
4124 return isAssumedDead() ?
"assumed-dead" :
"assumed-live";
4128 bool areAllUsesAssumedDead(Attributor &
A,
Value &V) {
4130 if (
V.getType()->isVoidTy() ||
V.use_empty())
4136 if (!
A.isRunOn(*
I->getFunction()))
4138 bool UsedAssumedInformation =
false;
4139 std::optional<Constant *>
C =
4140 A.getAssumedConstant(V, *
this, UsedAssumedInformation);
4145 auto UsePred = [&](
const Use &
U,
bool &Follow) {
return false; };
4150 return A.checkForAllUses(UsePred, *
this, V,
false,
4151 DepClassTy::REQUIRED,
4156 bool isAssumedSideEffectFree(Attributor &
A, Instruction *
I) {
4166 bool IsKnownNoUnwind;
4168 A,
this, CallIRP, DepClassTy::OPTIONAL, IsKnownNoUnwind))
4176struct AAIsDeadFloating :
public AAIsDeadValueImpl {
4177 AAIsDeadFloating(
const IRPosition &IRP, Attributor &
A)
4178 : AAIsDeadValueImpl(IRP,
A) {}
4182 AAIsDeadValueImpl::initialize(
A);
4185 indicatePessimisticFixpoint();
4190 if (!isAssumedSideEffectFree(
A,
I)) {
4192 indicatePessimisticFixpoint();
4194 removeAssumedBits(HAS_NO_EFFECT);
4198 bool isDeadFence(Attributor &
A, FenceInst &FI) {
4199 const auto *ExecDomainAA =
A.lookupAAFor<AAExecutionDomain>(
4201 if (!ExecDomainAA || !ExecDomainAA->isNoOpFence(FI))
4203 A.recordDependence(*ExecDomainAA, *
this, DepClassTy::OPTIONAL);
4207 bool isDeadStore(Attributor &
A, StoreInst &SI,
4208 SmallSetVector<Instruction *, 8> *AssumeOnlyInst =
nullptr) {
4210 if (
SI.isVolatile())
4216 bool UsedAssumedInformation =
false;
4217 if (!AssumeOnlyInst) {
4218 PotentialCopies.clear();
4220 UsedAssumedInformation)) {
4223 <<
"[AAIsDead] Could not determine potential copies of store!\n");
4227 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Store has " << PotentialCopies.size()
4228 <<
" potential copies.\n");
4230 InformationCache &InfoCache =
A.getInfoCache();
4233 UsedAssumedInformation))
4237 auto &UserI = cast<Instruction>(*U.getUser());
4238 if (InfoCache.isOnlyUsedByAssume(UserI)) {
4240 AssumeOnlyInst->insert(&UserI);
4243 return A.isAssumedDead(U,
this,
nullptr, UsedAssumedInformation);
4249 <<
" is assumed live!\n");
4255 const std::string getAsStr(Attributor *
A)
const override {
4259 return "assumed-dead-store";
4262 return "assumed-dead-fence";
4263 return AAIsDeadValueImpl::getAsStr(
A);
4270 if (!isDeadStore(
A, *SI))
4271 return indicatePessimisticFixpoint();
4273 if (!isDeadFence(
A, *FI))
4274 return indicatePessimisticFixpoint();
4276 if (!isAssumedSideEffectFree(
A,
I))
4277 return indicatePessimisticFixpoint();
4278 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4279 return indicatePessimisticFixpoint();
4284 bool isRemovableStore()
const override {
4285 return isAssumed(IS_REMOVABLE) &&
isa<StoreInst>(&getAssociatedValue());
4290 Value &
V = getAssociatedValue();
4297 SmallSetVector<Instruction *, 8> AssumeOnlyInst;
4298 bool IsDead = isDeadStore(
A, *SI, &AssumeOnlyInst);
4301 A.deleteAfterManifest(*
I);
4302 for (
size_t i = 0; i < AssumeOnlyInst.
size(); ++i) {
4304 for (
auto *Usr : AOI->
users())
4306 A.deleteAfterManifest(*AOI);
4312 A.deleteAfterManifest(*FI);
4316 A.deleteAfterManifest(*
I);
4324 void trackStatistics()
const override {
4330 SmallSetVector<Value *, 4> PotentialCopies;
4333struct AAIsDeadArgument :
public AAIsDeadFloating {
4334 AAIsDeadArgument(
const IRPosition &IRP, Attributor &
A)
4335 : AAIsDeadFloating(IRP,
A) {}
4339 Argument &Arg = *getAssociatedArgument();
4340 if (
A.isValidFunctionSignatureRewrite(Arg, {}))
4341 if (
A.registerFunctionSignatureRewrite(
4345 return ChangeStatus::CHANGED;
4347 return ChangeStatus::UNCHANGED;
4354struct AAIsDeadCallSiteArgument :
public AAIsDeadValueImpl {
4355 AAIsDeadCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
4356 : AAIsDeadValueImpl(IRP,
A) {}
4360 AAIsDeadValueImpl::initialize(
A);
4362 indicatePessimisticFixpoint();
4371 Argument *Arg = getAssociatedArgument();
4373 return indicatePessimisticFixpoint();
4375 auto *ArgAA =
A.getAAFor<AAIsDead>(*
this, ArgPos, DepClassTy::REQUIRED);
4377 return indicatePessimisticFixpoint();
4386 "Expected undef values to be filtered out!");
4388 if (
A.changeUseAfterManifest(U, UV))
4389 return ChangeStatus::CHANGED;
4390 return ChangeStatus::UNCHANGED;
4397struct AAIsDeadCallSiteReturned :
public AAIsDeadFloating {
4398 AAIsDeadCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
4399 : AAIsDeadFloating(IRP,
A) {}
4402 bool isAssumedDead()
const override {
4403 return AAIsDeadFloating::isAssumedDead() && IsAssumedSideEffectFree;
4408 AAIsDeadFloating::initialize(
A);
4410 indicatePessimisticFixpoint();
4415 IsAssumedSideEffectFree = isAssumedSideEffectFree(
A, getCtxI());
4421 if (IsAssumedSideEffectFree && !isAssumedSideEffectFree(
A, getCtxI())) {
4422 IsAssumedSideEffectFree =
false;
4423 Changed = ChangeStatus::CHANGED;
4425 if (!areAllUsesAssumedDead(
A, getAssociatedValue()))
4426 return indicatePessimisticFixpoint();
4431 void trackStatistics()
const override {
4432 if (IsAssumedSideEffectFree)
4439 const std::string getAsStr(Attributor *
A)
const override {
4440 return isAssumedDead()
4442 : (getAssumed() ?
"assumed-dead-users" :
"assumed-live");
4446 bool IsAssumedSideEffectFree =
true;
4449struct AAIsDeadReturned :
public AAIsDeadValueImpl {
4450 AAIsDeadReturned(
const IRPosition &IRP, Attributor &
A)
4451 : AAIsDeadValueImpl(IRP,
A) {}
4456 bool UsedAssumedInformation =
false;
4457 A.checkForAllInstructions([](Instruction &) {
return true; }, *
this,
4458 {Instruction::Ret}, UsedAssumedInformation);
4460 auto PredForCallSite = [&](AbstractCallSite ACS) {
4461 if (ACS.isCallbackCall() || !ACS.getInstruction())
4463 return areAllUsesAssumedDead(
A, *ACS.getInstruction());
4466 if (!
A.checkForAllCallSites(PredForCallSite, *
this,
true,
4467 UsedAssumedInformation))
4468 return indicatePessimisticFixpoint();
4470 return ChangeStatus::UNCHANGED;
4476 bool AnyChange =
false;
4477 UndefValue &UV = *
UndefValue::get(getAssociatedFunction()->getReturnType());
4484 bool UsedAssumedInformation =
false;
4485 A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
4486 UsedAssumedInformation);
4487 return AnyChange ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
4494struct AAIsDeadFunction :
public AAIsDead {
4495 AAIsDeadFunction(
const IRPosition &IRP, Attributor &
A) : AAIsDead(IRP,
A) {}
4500 assert(
F &&
"Did expect an anchor function");
4501 if (!isAssumedDeadInternalFunction(
A)) {
4502 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4503 assumeLive(
A,
F->getEntryBlock());
4507 bool isAssumedDeadInternalFunction(Attributor &
A) {
4508 if (!getAnchorScope()->hasLocalLinkage())
4510 bool UsedAssumedInformation =
false;
4511 return A.checkForAllCallSites([](AbstractCallSite) {
return false; }, *
this,
4512 true, UsedAssumedInformation);
4516 const std::string getAsStr(Attributor *
A)
const override {
4517 return "Live[#BB " + std::to_string(AssumedLiveBlocks.size()) +
"/" +
4518 std::to_string(getAnchorScope()->
size()) +
"][#TBEP " +
4519 std::to_string(ToBeExploredFrom.size()) +
"][#KDE " +
4520 std::to_string(KnownDeadEnds.size()) +
"]";
4525 assert(getState().isValidState() &&
4526 "Attempted to manifest an invalid state!");
4531 if (AssumedLiveBlocks.empty()) {
4532 A.deleteAfterManifest(
F);
4533 return ChangeStatus::CHANGED;
4539 bool Invoke2CallAllowed = !mayCatchAsynchronousExceptions(
F);
4541 KnownDeadEnds.set_union(ToBeExploredFrom);
4542 for (
const Instruction *DeadEndI : KnownDeadEnds) {
4546 bool IsKnownNoReturn;
4554 A.registerInvokeWithDeadSuccessor(
const_cast<InvokeInst &
>(*
II));
4556 A.changeToUnreachableAfterManifest(
4557 const_cast<Instruction *
>(DeadEndI->getNextNode()));
4558 HasChanged = ChangeStatus::CHANGED;
4561 STATS_DECL(AAIsDead, BasicBlock,
"Number of dead basic blocks deleted.");
4562 for (BasicBlock &BB :
F)
4563 if (!AssumedLiveBlocks.count(&BB)) {
4564 A.deleteAfterManifest(BB);
4566 HasChanged = ChangeStatus::CHANGED;
4575 bool isEdgeDead(
const BasicBlock *From,
const BasicBlock *To)
const override {
4578 "Used AAIsDead of the wrong function");
4579 return isValidState() && !AssumedLiveEdges.count(std::make_pair(From, To));
4583 void trackStatistics()
const override {}
4586 bool isAssumedDead()
const override {
return false; }
4589 bool isKnownDead()
const override {
return false; }
4592 bool isAssumedDead(
const BasicBlock *BB)
const override {
4594 "BB must be in the same anchor scope function.");
4598 return !AssumedLiveBlocks.count(BB);
4602 bool isKnownDead(
const BasicBlock *BB)
const override {
4603 return getKnown() && isAssumedDead(BB);
4607 bool isAssumedDead(
const Instruction *
I)
const override {
4608 assert(
I->getParent()->getParent() == getAnchorScope() &&
4609 "Instruction must be in the same anchor scope function.");
4616 if (!AssumedLiveBlocks.count(
I->getParent()))
4622 if (KnownDeadEnds.count(PrevI) || ToBeExploredFrom.count(PrevI))
4630 bool isKnownDead(
const Instruction *
I)
const override {
4631 return getKnown() && isAssumedDead(
I);
4636 bool assumeLive(Attributor &
A,
const BasicBlock &BB) {
4637 if (!AssumedLiveBlocks.insert(&BB).second)
4644 for (
const Instruction &
I : BB)
4647 if (
F->hasLocalLinkage())
4648 A.markLiveInternalFunction(*
F);
4654 SmallSetVector<const Instruction *, 8> ToBeExploredFrom;
4657 SmallSetVector<const Instruction *, 8> KnownDeadEnds;
4660 DenseSet<std::pair<const BasicBlock *, const BasicBlock *>> AssumedLiveEdges;
4663 DenseSet<const BasicBlock *> AssumedLiveBlocks;
4667identifyAliveSuccessors(Attributor &
A,
const CallBase &CB,
4668 AbstractAttribute &AA,
4669 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4672 bool IsKnownNoReturn;
4675 return !IsKnownNoReturn;
4684identifyAliveSuccessors(Attributor &
A,
const InvokeInst &
II,
4685 AbstractAttribute &AA,
4686 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4687 bool UsedAssumedInformation =
4693 if (AAIsDeadFunction::mayCatchAsynchronousExceptions(*
II.getFunction())) {
4694 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4698 bool IsKnownNoUnwind;
4701 UsedAssumedInformation |= !IsKnownNoUnwind;
4703 AliveSuccessors.
push_back(&
II.getUnwindDest()->front());
4706 return UsedAssumedInformation;
4710identifyAliveSuccessors(Attributor &
A,
const BranchInst &BI,
4711 AbstractAttribute &AA,
4712 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4713 bool UsedAssumedInformation =
false;
4717 std::optional<Constant *>
C =
4718 A.getAssumedConstant(*BI.
getCondition(), AA, UsedAssumedInformation);
4728 UsedAssumedInformation =
false;
4731 return UsedAssumedInformation;
4735identifyAliveSuccessors(Attributor &
A,
const SwitchInst &SI,
4736 AbstractAttribute &AA,
4737 SmallVectorImpl<const Instruction *> &AliveSuccessors) {
4738 bool UsedAssumedInformation =
false;
4742 UsedAssumedInformation)) {
4744 for (
const BasicBlock *SuccBB :
successors(
SI.getParent()))
4749 if (Values.
empty() ||
4750 (Values.
size() == 1 &&
4753 return UsedAssumedInformation;
4756 Type &Ty = *
SI.getCondition()->getType();
4757 SmallPtrSet<ConstantInt *, 8>
Constants;
4758 auto CheckForConstantInt = [&](
Value *
V) {
4766 if (!
all_of(Values, [&](AA::ValueAndContext &VAC) {
4767 return CheckForConstantInt(VAC.
getValue());
4769 for (
const BasicBlock *SuccBB :
successors(
SI.getParent()))
4771 return UsedAssumedInformation;
4774 unsigned MatchedCases = 0;
4775 for (
const auto &CaseIt :
SI.cases()) {
4776 if (
Constants.count(CaseIt.getCaseValue())) {
4778 AliveSuccessors.
push_back(&CaseIt.getCaseSuccessor()->front());
4785 AliveSuccessors.
push_back(&
SI.getDefaultDest()->front());
4786 return UsedAssumedInformation;
4792 if (AssumedLiveBlocks.empty()) {
4793 if (isAssumedDeadInternalFunction(
A))
4797 ToBeExploredFrom.insert(&
F->getEntryBlock().front());
4798 assumeLive(
A,
F->getEntryBlock());
4802 LLVM_DEBUG(
dbgs() <<
"[AAIsDead] Live [" << AssumedLiveBlocks.size() <<
"/"
4803 << getAnchorScope()->
size() <<
"] BBs and "
4804 << ToBeExploredFrom.size() <<
" exploration points and "
4805 << KnownDeadEnds.size() <<
" known dead ends\n");
4810 ToBeExploredFrom.end());
4811 decltype(ToBeExploredFrom) NewToBeExploredFrom;
4814 while (!Worklist.
empty()) {
4821 I =
I->getNextNode();
4823 AliveSuccessors.
clear();
4825 bool UsedAssumedInformation =
false;
4826 switch (
I->getOpcode()) {
4830 "Expected non-terminators to be handled already!");
4831 for (
const BasicBlock *SuccBB :
successors(
I->getParent()))
4834 case Instruction::Call:
4836 *
this, AliveSuccessors);
4838 case Instruction::Invoke:
4840 *
this, AliveSuccessors);
4842 case Instruction::Br:
4844 *
this, AliveSuccessors);
4846 case Instruction::Switch:
4848 *
this, AliveSuccessors);
4852 if (UsedAssumedInformation) {
4853 NewToBeExploredFrom.insert(
I);
4854 }
else if (AliveSuccessors.
empty() ||
4855 (
I->isTerminator() &&
4856 AliveSuccessors.
size() <
I->getNumSuccessors())) {
4857 if (KnownDeadEnds.insert(
I))
4862 << AliveSuccessors.
size() <<
" UsedAssumedInformation: "
4863 << UsedAssumedInformation <<
"\n");
4865 for (
const Instruction *AliveSuccessor : AliveSuccessors) {
4866 if (!
I->isTerminator()) {
4867 assert(AliveSuccessors.size() == 1 &&
4868 "Non-terminator expected to have a single successor!");
4872 auto Edge = std::make_pair(
I->getParent(), AliveSuccessor->getParent());
4873 if (AssumedLiveEdges.insert(
Edge).second)
4875 if (assumeLive(
A, *AliveSuccessor->getParent()))
4882 if (NewToBeExploredFrom.size() != ToBeExploredFrom.size() ||
4883 llvm::any_of(NewToBeExploredFrom, [&](
const Instruction *
I) {
4884 return !ToBeExploredFrom.count(I);
4887 ToBeExploredFrom = std::move(NewToBeExploredFrom);
4896 if (ToBeExploredFrom.empty() &&
4897 getAnchorScope()->
size() == AssumedLiveBlocks.size() &&
4898 llvm::all_of(KnownDeadEnds, [](
const Instruction *DeadEndI) {
4899 return DeadEndI->isTerminator() && DeadEndI->getNumSuccessors() == 0;
4901 return indicatePessimisticFixpoint();
4906struct AAIsDeadCallSite final : AAIsDeadFunction {
4907 AAIsDeadCallSite(
const IRPosition &IRP, Attributor &
A)
4908 : AAIsDeadFunction(IRP,
A) {}
4917 "supported for call sites yet!");
4922 return indicatePessimisticFixpoint();
4926 void trackStatistics()
const override {}
4933struct AADereferenceableImpl : AADereferenceable {
4934 AADereferenceableImpl(
const IRPosition &IRP, Attributor &
A)
4935 : AADereferenceable(IRP,
A) {}
4936 using StateType = DerefState;
4940 Value &
V = *getAssociatedValue().stripPointerCasts();
4942 A.getAttrs(getIRPosition(),
4943 {Attribute::Dereferenceable, Attribute::DereferenceableOrNull},
4946 takeKnownDerefBytesMaximum(Attr.getValueAsInt());
4949 bool IsKnownNonNull;
4951 A,
this, getIRPosition(), DepClassTy::OPTIONAL, IsKnownNonNull);
4953 bool CanBeNull, CanBeFreed;
4954 takeKnownDerefBytesMaximum(
V.getPointerDereferenceableBytes(
4955 A.getDataLayout(), CanBeNull, CanBeFreed));
4957 if (Instruction *CtxI = getCtxI())
4958 followUsesInMBEC(*
this,
A, getState(), *CtxI);
4963 StateType &getState()
override {
return *
this; }
4964 const StateType &getState()
const override {
return *
this; }
4968 void addAccessedBytesForUse(Attributor &
A,
const Use *U,
const Instruction *
I,
4969 DerefState &State) {
4970 const Value *UseV =
U->get();
4975 if (!Loc || Loc->Ptr != UseV || !Loc->Size.isPrecise() ||
I->isVolatile())
4980 Loc->Ptr,
Offset,
A.getDataLayout(),
true);
4981 if (
Base &&
Base == &getAssociatedValue())
4982 State.addAccessedBytes(
Offset, Loc->Size.getValue());
4986 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
4987 AADereferenceable::StateType &State) {
4988 bool IsNonNull =
false;
4989 bool TrackUse =
false;
4990 int64_t DerefBytes = getKnownNonNullAndDerefBytesForUse(
4991 A, *
this, getAssociatedValue(), U,
I, IsNonNull, TrackUse);
4992 LLVM_DEBUG(
dbgs() <<
"[AADereferenceable] Deref bytes: " << DerefBytes
4993 <<
" for instruction " << *
I <<
"\n");
4995 addAccessedBytesForUse(
A, U,
I, State);
4996 State.takeKnownDerefBytesMaximum(DerefBytes);
5003 bool IsKnownNonNull;
5005 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5006 if (IsAssumedNonNull &&
5007 A.hasAttr(getIRPosition(), Attribute::DereferenceableOrNull)) {
5008 A.removeAttrs(getIRPosition(), {Attribute::DereferenceableOrNull});
5009 return ChangeStatus::CHANGED;
5014 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
5015 SmallVectorImpl<Attribute> &Attrs)
const override {
5017 bool IsKnownNonNull;
5019 A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5020 if (IsAssumedNonNull)
5021 Attrs.emplace_back(Attribute::getWithDereferenceableBytes(
5022 Ctx, getAssumedDereferenceableBytes()));
5024 Attrs.emplace_back(Attribute::getWithDereferenceableOrNullBytes(
5025 Ctx, getAssumedDereferenceableBytes()));
5029 const std::string getAsStr(Attributor *
A)
const override {
5030 if (!getAssumedDereferenceableBytes())
5031 return "unknown-dereferenceable";
5032 bool IsKnownNonNull;
5033 bool IsAssumedNonNull =
false;
5036 *
A,
this, getIRPosition(), DepClassTy::NONE, IsKnownNonNull);
5037 return std::string(
"dereferenceable") +
5038 (IsAssumedNonNull ?
"" :
"_or_null") +
5039 (isAssumedGlobal() ?
"_globally" :
"") +
"<" +
5040 std::to_string(getKnownDereferenceableBytes()) +
"-" +
5041 std::to_string(getAssumedDereferenceableBytes()) +
">" +
5042 (!
A ?
" [non-null is unknown]" :
"");
5047struct AADereferenceableFloating : AADereferenceableImpl {
5048 AADereferenceableFloating(
const IRPosition &IRP, Attributor &
A)
5049 : AADereferenceableImpl(IRP,
A) {}
5054 bool UsedAssumedInformation =
false;
5056 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5058 Values.
push_back({getAssociatedValue(), getCtxI()});
5061 Stripped = Values.
size() != 1 ||
5062 Values.
front().getValue() != &getAssociatedValue();
5065 const DataLayout &
DL =
A.getDataLayout();
5068 auto VisitValueCB = [&](
const Value &
V) ->
bool {
5070 DL.getIndexSizeInBits(
V.getType()->getPointerAddressSpace());
5071 APInt
Offset(IdxWidth, 0);
5076 const auto *AA =
A.getAAFor<AADereferenceable>(
5078 int64_t DerefBytes = 0;
5079 if (!AA || (!Stripped &&
this == AA)) {
5082 bool CanBeNull, CanBeFreed;
5084 Base->getPointerDereferenceableBytes(
DL, CanBeNull, CanBeFreed);
5085 T.GlobalState.indicatePessimisticFixpoint();
5088 DerefBytes =
DS.DerefBytesState.getAssumed();
5089 T.GlobalState &=
DS.GlobalState;
5095 int64_t OffsetSExt =
Offset.getSExtValue();
5099 T.takeAssumedDerefBytesMinimum(
5100 std::max(int64_t(0), DerefBytes - OffsetSExt));
5105 T.takeKnownDerefBytesMaximum(
5106 std::max(int64_t(0), DerefBytes - OffsetSExt));
5107 T.indicatePessimisticFixpoint();
5108 }
else if (OffsetSExt > 0) {
5114 T.indicatePessimisticFixpoint();
5118 return T.isValidState();
5121 for (
const auto &VAC : Values)
5122 if (!VisitValueCB(*VAC.
getValue()))
5123 return indicatePessimisticFixpoint();
5129 void trackStatistics()
const override {
5135struct AADereferenceableReturned final
5136 : AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl> {
5138 AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl>;
5139 AADereferenceableReturned(
const IRPosition &IRP, Attributor &
A)
5143 void trackStatistics()
const override {
5149struct AADereferenceableArgument final
5150 : AAArgumentFromCallSiteArguments<AADereferenceable,
5151 AADereferenceableImpl> {
5153 AAArgumentFromCallSiteArguments<AADereferenceable, AADereferenceableImpl>;
5154 AADereferenceableArgument(
const IRPosition &IRP, Attributor &
A)
5158 void trackStatistics()
const override {
5164struct AADereferenceableCallSiteArgument final : AADereferenceableFloating {
5165 AADereferenceableCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
5166 : AADereferenceableFloating(IRP,
A) {}
5169 void trackStatistics()
const override {
5175struct AADereferenceableCallSiteReturned final
5176 : AACalleeToCallSite<AADereferenceable, AADereferenceableImpl> {
5177 using Base = AACalleeToCallSite<AADereferenceable, AADereferenceableImpl>;
5178 AADereferenceableCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
5182 void trackStatistics()
const override {
5192static unsigned getKnownAlignForUse(Attributor &
A, AAAlign &QueryingAA,
5193 Value &AssociatedValue,
const Use *U,
5194 const Instruction *
I,
bool &TrackUse) {
5203 if (
GEP->hasAllConstantIndices())
5208 switch (
II->getIntrinsicID()) {
5209 case Intrinsic::ptrmask: {
5211 const auto *ConstVals =
A.getAAFor<AAPotentialConstantValues>(
5213 const auto *AlignAA =
A.getAAFor<AAAlign>(
5215 if (ConstVals && ConstVals->isValidState() && ConstVals->isAtFixpoint()) {
5216 unsigned ShiftValue = std::min(ConstVals->getAssumedMinTrailingZeros(),
5218 Align ConstAlign(UINT64_C(1) << ShiftValue);
5219 if (ConstAlign >= AlignAA->getKnownAlign())
5220 return Align(1).value();
5223 return AlignAA->getKnownAlign().
value();
5226 case Intrinsic::amdgcn_make_buffer_rsrc: {
5227 const auto *AlignAA =
A.getAAFor<AAAlign>(
5230 return AlignAA->getKnownAlign().
value();
5248 MA = MaybeAlign(AlignAA->getKnownAlign());
5251 const DataLayout &
DL =
A.getDataLayout();
5252 const Value *UseV =
U->get();
5254 if (
SI->getPointerOperand() == UseV)
5255 MA =
SI->getAlign();
5257 if (LI->getPointerOperand() == UseV)
5258 MA = LI->getAlign();
5260 if (AI->getPointerOperand() == UseV)
5261 MA = AI->getAlign();
5263 if (AI->getPointerOperand() == UseV)
5264 MA = AI->getAlign();
5270 unsigned Alignment = MA->value();
5274 if (
Base == &AssociatedValue) {
5279 uint32_t
gcd = std::gcd(uint32_t(
abs((int32_t)
Offset)), Alignment);
5287struct AAAlignImpl : AAAlign {
5288 AAAlignImpl(
const IRPosition &IRP, Attributor &
A) : AAAlign(IRP,
A) {}
5293 A.getAttrs(getIRPosition(), {Attribute::Alignment},
Attrs);
5295 takeKnownMaximum(Attr.getValueAsInt());
5297 Value &
V = *getAssociatedValue().stripPointerCasts();
5298 takeKnownMaximum(
V.getPointerAlignment(
A.getDataLayout()).value());
5300 if (Instruction *CtxI = getCtxI())
5301 followUsesInMBEC(*
this,
A, getState(), *CtxI);
5309 Value &AssociatedValue = getAssociatedValue();
5311 return ChangeStatus::UNCHANGED;
5313 for (
const Use &U : AssociatedValue.
uses()) {
5315 if (
SI->getPointerOperand() == &AssociatedValue)
5316 if (
SI->getAlign() < getAssumedAlign()) {
5318 "Number of times alignment added to a store");
5319 SI->setAlignment(getAssumedAlign());
5320 InstrChanged = ChangeStatus::CHANGED;
5323 if (LI->getPointerOperand() == &AssociatedValue)
5324 if (LI->getAlign() < getAssumedAlign()) {
5325 LI->setAlignment(getAssumedAlign());
5327 "Number of times alignment added to a load");
5328 InstrChanged = ChangeStatus::CHANGED;
5331 if (RMW->getPointerOperand() == &AssociatedValue) {
5332 if (RMW->getAlign() < getAssumedAlign()) {
5334 "Number of times alignment added to atomicrmw");
5336 RMW->setAlignment(getAssumedAlign());
5337 InstrChanged = ChangeStatus::CHANGED;
5341 if (CAS->getPointerOperand() == &AssociatedValue) {
5342 if (CAS->getAlign() < getAssumedAlign()) {
5344 "Number of times alignment added to cmpxchg");
5345 CAS->setAlignment(getAssumedAlign());
5346 InstrChanged = ChangeStatus::CHANGED;
5354 Align InheritAlign =
5355 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5356 if (InheritAlign >= getAssumedAlign())
5357 return InstrChanged;
5358 return Changed | InstrChanged;
5366 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
5367 SmallVectorImpl<Attribute> &Attrs)
const override {
5368 if (getAssumedAlign() > 1)
5370 Attribute::getWithAlignment(Ctx,
Align(getAssumedAlign())));
5374 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
5375 AAAlign::StateType &State) {
5376 bool TrackUse =
false;
5378 unsigned int KnownAlign =
5379 getKnownAlignForUse(
A, *
this, getAssociatedValue(), U,
I, TrackUse);
5380 State.takeKnownMaximum(KnownAlign);
5386 const std::string getAsStr(Attributor *
A)
const override {
5387 return "align<" + std::to_string(getKnownAlign().value()) +
"-" +
5388 std::to_string(getAssumedAlign().value()) +
">";
5393struct AAAlignFloating : AAAlignImpl {
5394 AAAlignFloating(
const IRPosition &IRP, Attributor &
A) : AAAlignImpl(IRP,
A) {}
5398 const DataLayout &
DL =
A.getDataLayout();
5401 bool UsedAssumedInformation =
false;
5403 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
5405 Values.
push_back({getAssociatedValue(), getCtxI()});
5408 Stripped = Values.
size() != 1 ||
5409 Values.
front().getValue() != &getAssociatedValue();
5413 auto VisitValueCB = [&](
Value &
V) ->
bool {
5417 DepClassTy::REQUIRED);
5418 if (!AA || (!Stripped &&
this == AA)) {
5420 unsigned Alignment = 1;
5433 Alignment =
V.getPointerAlignment(
DL).value();
5436 T.takeKnownMaximum(Alignment);
5437 T.indicatePessimisticFixpoint();
5440 const AAAlign::StateType &
DS = AA->
getState();
5443 return T.isValidState();
5446 for (
const auto &VAC : Values) {
5447 if (!VisitValueCB(*VAC.
getValue()))
5448 return indicatePessimisticFixpoint();
5461struct AAAlignReturned final
5462 : AAReturnedFromReturnedValues<AAAlign, AAAlignImpl> {
5463 using Base = AAReturnedFromReturnedValues<AAAlign, AAAlignImpl>;
5464 AAAlignReturned(
const IRPosition &IRP, Attributor &
A) :
Base(IRP,
A) {}
5471struct AAAlignArgument final
5472 : AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl> {
5473 using Base = AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl>;
5474 AAAlignArgument(
const IRPosition &IRP, Attributor &
A) :
Base(IRP,
A) {}
5481 if (
A.getInfoCache().isInvolvedInMustTailCall(*getAssociatedArgument()))
5482 return ChangeStatus::UNCHANGED;
5483 return Base::manifest(
A);
5490struct AAAlignCallSiteArgument final : AAAlignFloating {
5491 AAAlignCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
5492 : AAAlignFloating(IRP,
A) {}
5499 if (Argument *Arg = getAssociatedArgument())
5500 if (
A.getInfoCache().isInvolvedInMustTailCall(*Arg))
5501 return ChangeStatus::UNCHANGED;
5503 Align InheritAlign =
5504 getAssociatedValue().getPointerAlignment(
A.getDataLayout());
5505 if (InheritAlign >= getAssumedAlign())
5506 Changed = ChangeStatus::UNCHANGED;
5513 if (Argument *Arg = getAssociatedArgument()) {
5516 const auto *ArgAlignAA =
A.getAAFor<AAAlign>(
5519 takeKnownMaximum(ArgAlignAA->getKnownAlign().value());
5529struct AAAlignCallSiteReturned final
5530 : AACalleeToCallSite<AAAlign, AAAlignImpl> {
5531 using Base = AACalleeToCallSite<AAAlign, AAAlignImpl>;
5532 AAAlignCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
5538 switch (
II->getIntrinsicID()) {
5539 case Intrinsic::ptrmask: {
5543 const auto *ConstVals =
A.getAAFor<AAPotentialConstantValues>(
5545 if (ConstVals && ConstVals->isValidState()) {
5546 unsigned ShiftValue =
5547 std::min(ConstVals->getAssumedMinTrailingZeros(),
5548 Value::MaxAlignmentExponent);
5549 Alignment =
Align(UINT64_C(1) << ShiftValue);
5553 const auto *AlignAA =
5555 DepClassTy::REQUIRED);
5557 Alignment = std::max(AlignAA->getAssumedAlign(), Alignment);
5564 std::min(this->getAssumedAlign(), Alignment).value());
5570 case Intrinsic::amdgcn_make_buffer_rsrc: {
5571 const auto *AlignAA =
5573 DepClassTy::REQUIRED);
5576 this->getState(), AlignAA->getAssumedAlign().
value());
5583 return Base::updateImpl(
A);
5592struct AANoReturnImpl :
public AANoReturn {
5593 AANoReturnImpl(
const IRPosition &IRP, Attributor &
A) : AANoReturn(IRP,
A) {}
5599 A,
nullptr, getIRPosition(), DepClassTy::NONE, IsKnown));
5604 const std::string getAsStr(Attributor *
A)
const override {
5605 return getAssumed() ?
"noreturn" :
"may-return";
5610 auto CheckForNoReturn = [](
Instruction &) {
return false; };
5611 bool UsedAssumedInformation =
false;
5612 if (!
A.checkForAllInstructions(CheckForNoReturn, *
this,
5613 {(unsigned)Instruction::Ret},
5614 UsedAssumedInformation))
5615 return indicatePessimisticFixpoint();
5616 return ChangeStatus::UNCHANGED;
5620struct AANoReturnFunction final : AANoReturnImpl {
5621 AANoReturnFunction(
const IRPosition &IRP, Attributor &
A)
5622 : AANoReturnImpl(IRP,
A) {}
5629struct AANoReturnCallSite final
5630 : AACalleeToCallSite<AANoReturn, AANoReturnImpl> {
5631 AANoReturnCallSite(
const IRPosition &IRP, Attributor &
A)
5632 : AACalleeToCallSite<AANoReturn, AANoReturnImpl>(IRP,
A) {}
5643struct AAInstanceInfoImpl :
public AAInstanceInfo {
5644 AAInstanceInfoImpl(
const IRPosition &IRP, Attributor &
A)
5645 : AAInstanceInfo(IRP,
A) {}
5649 Value &
V = getAssociatedValue();
5651 if (
C->isThreadDependent())
5652 indicatePessimisticFixpoint();
5654 indicateOptimisticFixpoint();
5660 indicateOptimisticFixpoint();
5665 A.getInfoCache().getAnalysisResultForFunction<CycleAnalysis>(
5668 indicatePessimisticFixpoint();
5678 Value &
V = getAssociatedValue();
5681 Scope =
I->getFunction();
5684 if (!
Scope->hasLocalLinkage())
5688 return indicateOptimisticFixpoint();
5690 bool IsKnownNoRecurse;
5696 auto UsePred = [&](
const Use &
U,
bool &Follow) {
5711 if (!Callee || !
Callee->hasLocalLinkage())
5715 const auto *ArgInstanceInfoAA =
A.getAAFor<AAInstanceInfo>(
5717 DepClassTy::OPTIONAL);
5718 if (!ArgInstanceInfoAA ||
5719 !ArgInstanceInfoAA->isAssumedUniqueForAnalysis())
5724 A, *CB, *Scope, *
this,
nullptr,
5725 [Scope](
const Function &Fn) {
return &Fn !=
Scope; }))
5732 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
5734 auto *Ptr =
SI->getPointerOperand()->stripPointerCasts();
5742 if (!
A.checkForAllUses(UsePred, *
this, V,
true,
5743 DepClassTy::OPTIONAL,
5744 true, EquivalentUseCB))
5745 return indicatePessimisticFixpoint();
5751 const std::string getAsStr(Attributor *
A)
const override {
5752 return isAssumedUniqueForAnalysis() ?
"<unique [fAa]>" :
"<unknown>";
5756 void trackStatistics()
const override {}
5760struct AAInstanceInfoFloating : AAInstanceInfoImpl {
5761 AAInstanceInfoFloating(
const IRPosition &IRP, Attributor &
A)
5762 : AAInstanceInfoImpl(IRP,
A) {}
5766struct AAInstanceInfoArgument final : AAInstanceInfoFloating {
5767 AAInstanceInfoArgument(
const IRPosition &IRP, Attributor &
A)
5768 : AAInstanceInfoFloating(IRP,
A) {}
5772struct AAInstanceInfoCallSiteArgument final : AAInstanceInfoImpl {
5773 AAInstanceInfoCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
5774 : AAInstanceInfoImpl(IRP,
A) {}
5782 Argument *Arg = getAssociatedArgument();
5784 return indicatePessimisticFixpoint();
5787 A.getAAFor<AAInstanceInfo>(*
this, ArgPos, DepClassTy::REQUIRED);
5789 return indicatePessimisticFixpoint();
5795struct AAInstanceInfoReturned final : AAInstanceInfoImpl {
5796 AAInstanceInfoReturned(
const IRPosition &IRP, Attributor &
A)
5797 : AAInstanceInfoImpl(IRP,
A) {
5813struct AAInstanceInfoCallSiteReturned final : AAInstanceInfoFloating {
5814 AAInstanceInfoCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
5815 : AAInstanceInfoFloating(IRP,
A) {}
5822 bool IgnoreSubsumingPositions) {
5823 assert(ImpliedAttributeKind == Attribute::Captures &&
5824 "Unexpected attribute kind");
5834 V.getType()->getPointerAddressSpace() == 0)) {
5839 A.getAttrs(IRP, {Attribute::Captures}, Attrs,
5849 {Attribute::Captures, Attribute::ByVal}, Attrs,
5886 bool ReadOnly =
F.onlyReadsMemory();
5887 bool NoThrow =
F.doesNotThrow();
5888 bool IsVoidReturn =
F.getReturnType()->isVoidTy();
5889 if (ReadOnly && NoThrow && IsVoidReturn) {
5902 if (NoThrow && IsVoidReturn)
5907 if (!NoThrow || ArgNo < 0 ||
5908 !
F.getAttributes().hasAttrSomewhere(Attribute::Returned))
5911 for (
unsigned U = 0, E =
F.arg_size(); U < E; ++U)
5912 if (
F.hasParamAttribute(U, Attribute::Returned)) {
5913 if (U ==
unsigned(ArgNo))
5940 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
5941 SmallVectorImpl<Attribute> &Attrs)
const override {
5942 if (!isAssumedNoCaptureMaybeReturned())
5945 if (isArgumentPosition()) {
5946 if (isAssumedNoCapture())
5947 Attrs.emplace_back(Attribute::get(Ctx, Attribute::Captures));
5949 Attrs.emplace_back(Attribute::get(Ctx,
"no-capture-maybe-returned"));
5954 const std::string getAsStr(Attributor *
A)
const override {
5955 if (isKnownNoCapture())
5956 return "known not-captured";
5957 if (isAssumedNoCapture())
5958 return "assumed not-captured";
5959 if (isKnownNoCaptureMaybeReturned())
5960 return "known not-captured-maybe-returned";
5961 if (isAssumedNoCaptureMaybeReturned())
5962 return "assumed not-captured-maybe-returned";
5963 return "assumed-captured";
5968 bool checkUse(Attributor &
A, AANoCapture::StateType &State,
const Use &U,
5971 LLVM_DEBUG(
dbgs() <<
"[AANoCapture] Check use: " << *
U.get() <<
" in "
5977 return isCapturedIn(State,
true,
true,
5984 return isCapturedIn(State,
true,
true,
5990 return isCapturedIn(State,
false,
false,
5992 return isCapturedIn(State,
true,
true,
6000 return isCapturedIn(State,
true,
true,
6007 bool IsKnownNoCapture;
6008 const AANoCapture *ArgNoCaptureAA =
nullptr;
6010 A,
this, CSArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
6012 if (IsAssumedNoCapture)
6013 return isCapturedIn(State,
false,
false,
6017 return isCapturedIn(State,
false,
false,
6022 return isCapturedIn(State,
true,
true,
6029 static bool isCapturedIn(AANoCapture::StateType &State,
bool CapturedInMem,
6030 bool CapturedInInt,
bool CapturedInRet) {
6031 LLVM_DEBUG(
dbgs() <<
" - captures [Mem " << CapturedInMem <<
"|Int "
6032 << CapturedInInt <<
"|Ret " << CapturedInRet <<
"]\n");
6044 const IRPosition &IRP = getIRPosition();
6048 return indicatePessimisticFixpoint();
6055 return indicatePessimisticFixpoint();
6063 T.addKnownBits(NOT_CAPTURED_IN_MEM);
6065 addKnownBits(NOT_CAPTURED_IN_MEM);
6072 auto CheckReturnedArgs = [&](
bool &UsedAssumedInformation) {
6076 UsedAssumedInformation))
6078 bool SeenConstant =
false;
6079 for (
const AA::ValueAndContext &VAC : Values) {
6083 SeenConstant =
true;
6085 VAC.
getValue() == getAssociatedArgument())
6091 bool IsKnownNoUnwind;
6094 bool IsVoidTy =
F->getReturnType()->isVoidTy();
6095 bool UsedAssumedInformation =
false;
6096 if (IsVoidTy || CheckReturnedArgs(UsedAssumedInformation)) {
6097 T.addKnownBits(NOT_CAPTURED_IN_RET);
6098 if (
T.isKnown(NOT_CAPTURED_IN_MEM))
6100 if (IsKnownNoUnwind && (IsVoidTy || !UsedAssumedInformation)) {
6101 addKnownBits(NOT_CAPTURED_IN_RET);
6102 if (isKnown(NOT_CAPTURED_IN_MEM))
6103 return indicateOptimisticFixpoint();
6108 auto UseCheck = [&](
const Use &
U,
bool &Follow) ->
bool {
6117 return checkUse(
A,
T, U, Follow);
6120 if (!
A.checkForAllUses(UseCheck, *
this, *V))
6121 return indicatePessimisticFixpoint();
6124 auto Assumed = S.getAssumed();
6125 S.intersectAssumedBits(
T.getAssumed());
6126 if (!isAssumedNoCaptureMaybeReturned())
6127 return indicatePessimisticFixpoint();
6133struct AANoCaptureArgument final : AANoCaptureImpl {
6134 AANoCaptureArgument(
const IRPosition &IRP, Attributor &
A)
6135 : AANoCaptureImpl(IRP,
A) {}
6142struct AANoCaptureCallSiteArgument final : AANoCaptureImpl {
6143 AANoCaptureCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
6144 : AANoCaptureImpl(IRP,
A) {}
6152 Argument *Arg = getAssociatedArgument();
6154 return indicatePessimisticFixpoint();
6156 bool IsKnownNoCapture;
6157 const AANoCapture *ArgAA =
nullptr;
6159 A,
this, ArgPos, DepClassTy::REQUIRED, IsKnownNoCapture,
false,
6161 return ChangeStatus::UNCHANGED;
6163 return indicatePessimisticFixpoint();
6168 void trackStatistics()
const override {
6174struct AANoCaptureFloating final : AANoCaptureImpl {
6175 AANoCaptureFloating(
const IRPosition &IRP, Attributor &
A)
6176 : AANoCaptureImpl(IRP,
A) {}
6179 void trackStatistics()
const override {
6185struct AANoCaptureReturned final : AANoCaptureImpl {
6186 AANoCaptureReturned(
const IRPosition &IRP, Attributor &
A)
6187 : AANoCaptureImpl(IRP,
A) {
6202 void trackStatistics()
const override {}
6206struct AANoCaptureCallSiteReturned final : AANoCaptureImpl {
6207 AANoCaptureCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
6208 : AANoCaptureImpl(IRP,
A) {}
6214 determineFunctionCaptureCapabilities(getIRPosition(), *
F, *
this);
6218 void trackStatistics()
const override {
6235 dbgs() <<
"[ValueSimplify] is assumed to be "
6238 dbgs() <<
"[ValueSimplify] is assumed to be <none>\n";
6250 if (getAssociatedValue().
getType()->isVoidTy())
6251 indicatePessimisticFixpoint();
6252 if (
A.hasSimplificationCallback(getIRPosition()))
6253 indicatePessimisticFixpoint();
6257 const std::string getAsStr(Attributor *
A)
const override {
6259 dbgs() <<
"SAV: " << (bool)SimplifiedAssociatedValue <<
" ";
6260 if (SimplifiedAssociatedValue && *SimplifiedAssociatedValue)
6261 dbgs() <<
"SAV: " << **SimplifiedAssociatedValue <<
" ";
6263 return isValidState() ? (isAtFixpoint() ?
"simplified" :
"maybe-simple")
6268 void trackStatistics()
const override {}
6271 std::optional<Value *>
6272 getAssumedSimplifiedValue(Attributor &
A)
const override {
6273 return SimplifiedAssociatedValue;
6280 static Value *ensureType(Attributor &
A,
Value &V,
Type &Ty, Instruction *CtxI,
6284 if (CtxI &&
V.getType()->canLosslesslyBitCastTo(&Ty))
6286 : BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6295 static Value *reproduceInst(Attributor &
A,
6296 const AbstractAttribute &QueryingAA,
6297 Instruction &
I,
Type &Ty, Instruction *CtxI,
6299 assert(CtxI &&
"Cannot reproduce an instruction without context!");
6300 if (
Check && (
I.mayReadFromMemory() ||
6305 Value *NewOp = reproduceValue(
A, QueryingAA, *
Op, Ty, CtxI,
Check, VMap);
6307 assert(
Check &&
"Manifest of new value unexpectedly failed!");
6329 static Value *reproduceValue(Attributor &
A,
6330 const AbstractAttribute &QueryingAA,
Value &V,
6331 Type &Ty, Instruction *CtxI,
bool Check,
6333 if (
const auto &NewV = VMap.
lookup(&V))
6335 bool UsedAssumedInformation =
false;
6336 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
6338 if (!SimpleV.has_value())
6342 EffectiveV = *SimpleV;
6347 return ensureType(
A, *EffectiveV, Ty, CtxI,
Check);
6349 if (
Value *NewV = reproduceInst(
A, QueryingAA, *
I, Ty, CtxI,
Check, VMap))
6350 return ensureType(
A, *NewV, Ty, CtxI,
Check);
6356 Value *manifestReplacementValue(Attributor &
A, Instruction *CtxI)
const {
6357 Value *NewV = SimplifiedAssociatedValue
6358 ? *SimplifiedAssociatedValue
6360 if (NewV && NewV != &getAssociatedValue()) {
6364 if (reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6366 return reproduceValue(
A, *
this, *NewV, *getAssociatedType(), CtxI,
6374 bool checkAndUpdate(Attributor &
A,
const AbstractAttribute &QueryingAA,
6375 const IRPosition &IRP,
bool Simplify =
true) {
6376 bool UsedAssumedInformation =
false;
6379 QueryingValueSimplified =
A.getAssumedSimplified(
6381 return unionAssumed(QueryingValueSimplified);
6385 template <
typename AAType>
bool askSimplifiedValueFor(Attributor &
A) {
6386 if (!getAssociatedValue().
getType()->isIntegerTy())
6391 A.getAAFor<AAType>(*
this, getIRPosition(), DepClassTy::NONE);
6395 std::optional<Constant *> COpt = AA->getAssumedConstant(
A);
6398 SimplifiedAssociatedValue = std::nullopt;
6399 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6402 if (
auto *
C = *COpt) {
6403 SimplifiedAssociatedValue =
C;
6404 A.recordDependence(*AA, *
this, DepClassTy::OPTIONAL);
6410 bool askSimplifiedValueForOtherAAs(Attributor &
A) {
6411 if (askSimplifiedValueFor<AAValueConstantRange>(
A))
6413 if (askSimplifiedValueFor<AAPotentialConstantValues>(
A))
6421 for (
auto &U : getAssociatedValue().uses()) {
6426 IP =
PHI->getIncomingBlock(U)->getTerminator();
6427 if (
auto *NewV = manifestReplacementValue(
A, IP)) {
6429 <<
" -> " << *NewV <<
" :: " << *
this <<
"\n");
6430 if (
A.changeUseAfterManifest(U, *NewV))
6431 Changed = ChangeStatus::CHANGED;
6435 return Changed | AAValueSimplify::manifest(
A);
6440 SimplifiedAssociatedValue = &getAssociatedValue();
6441 return AAValueSimplify::indicatePessimisticFixpoint();
6445struct AAValueSimplifyArgument final : AAValueSimplifyImpl {
6446 AAValueSimplifyArgument(
const IRPosition &IRP, Attributor &
A)
6447 : AAValueSimplifyImpl(IRP,
A) {}
6450 AAValueSimplifyImpl::initialize(
A);
6451 if (
A.hasAttr(getIRPosition(),
6452 {Attribute::InAlloca, Attribute::Preallocated,
6453 Attribute::StructRet, Attribute::Nest, Attribute::ByVal},
6455 indicatePessimisticFixpoint();
6462 Argument *Arg = getAssociatedArgument();
6468 return indicatePessimisticFixpoint();
6471 auto Before = SimplifiedAssociatedValue;
6473 auto PredForCallSite = [&](AbstractCallSite ACS) {
6474 const IRPosition &ACSArgPos =
6485 bool UsedAssumedInformation =
false;
6486 std::optional<Constant *> SimpleArgOp =
6487 A.getAssumedConstant(ACSArgPos, *
this, UsedAssumedInformation);
6494 return unionAssumed(*SimpleArgOp);
6499 bool UsedAssumedInformation =
false;
6500 if (hasCallBaseContext() &&
6501 getCallBaseContext()->getCalledOperand() == Arg->
getParent())
6503 AbstractCallSite(&getCallBaseContext()->getCalledOperandUse()));
6505 Success =
A.checkForAllCallSites(PredForCallSite, *
this,
true,
6506 UsedAssumedInformation);
6509 if (!askSimplifiedValueForOtherAAs(
A))
6510 return indicatePessimisticFixpoint();
6513 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6514 : ChangeStatus ::CHANGED;
6518 void trackStatistics()
const override {
6523struct AAValueSimplifyReturned : AAValueSimplifyImpl {
6524 AAValueSimplifyReturned(
const IRPosition &IRP, Attributor &
A)
6525 : AAValueSimplifyImpl(IRP,
A) {}
6528 std::optional<Value *>
6529 getAssumedSimplifiedValue(Attributor &
A)
const override {
6530 if (!isValidState())
6532 return SimplifiedAssociatedValue;
6537 auto Before = SimplifiedAssociatedValue;
6541 return checkAndUpdate(
6546 bool UsedAssumedInformation =
false;
6547 if (!
A.checkForAllInstructions(ReturnInstCB, *
this, {Instruction::Ret},
6548 UsedAssumedInformation))
6549 if (!askSimplifiedValueForOtherAAs(
A))
6550 return indicatePessimisticFixpoint();
6553 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6554 : ChangeStatus ::CHANGED;
6560 return ChangeStatus::UNCHANGED;
6564 void trackStatistics()
const override {
6569struct AAValueSimplifyFloating : AAValueSimplifyImpl {
6570 AAValueSimplifyFloating(
const IRPosition &IRP, Attributor &
A)
6571 : AAValueSimplifyImpl(IRP,
A) {}
6575 AAValueSimplifyImpl::initialize(
A);
6576 Value &
V = getAnchorValue();
6580 indicatePessimisticFixpoint();
6585 auto Before = SimplifiedAssociatedValue;
6586 if (!askSimplifiedValueForOtherAAs(
A))
6587 return indicatePessimisticFixpoint();
6590 return Before == SimplifiedAssociatedValue ? ChangeStatus::UNCHANGED
6591 : ChangeStatus ::CHANGED;
6595 void trackStatistics()
const override {
6600struct AAValueSimplifyFunction : AAValueSimplifyImpl {
6601 AAValueSimplifyFunction(
const IRPosition &IRP, Attributor &
A)
6602 : AAValueSimplifyImpl(IRP,
A) {}
6606 SimplifiedAssociatedValue =
nullptr;
6607 indicateOptimisticFixpoint();
6612 "AAValueSimplify(Function|CallSite)::updateImpl will not be called");
6615 void trackStatistics()
const override {
6620struct AAValueSimplifyCallSite : AAValueSimplifyFunction {
6621 AAValueSimplifyCallSite(
const IRPosition &IRP, Attributor &
A)
6622 : AAValueSimplifyFunction(IRP,
A) {}
6624 void trackStatistics()
const override {
6629struct AAValueSimplifyCallSiteReturned : AAValueSimplifyImpl {
6630 AAValueSimplifyCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
6631 : AAValueSimplifyImpl(IRP,
A) {}
6634 AAValueSimplifyImpl::initialize(
A);
6635 Function *Fn = getAssociatedFunction();
6636 assert(Fn &&
"Did expect an associted function");
6637 for (Argument &Arg : Fn->
args()) {
6642 checkAndUpdate(
A, *
this, IRP))
6643 indicateOptimisticFixpoint();
6645 indicatePessimisticFixpoint();
6653 return indicatePessimisticFixpoint();
6656 void trackStatistics()
const override {
6661struct AAValueSimplifyCallSiteArgument : AAValueSimplifyFloating {
6662 AAValueSimplifyCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
6663 : AAValueSimplifyFloating(IRP,
A) {}
6669 auto *FloatAA =
A.lookupAAFor<AAValueSimplify>(
6671 if (FloatAA && FloatAA->getState().isValidState())
6674 if (
auto *NewV = manifestReplacementValue(
A, getCtxI())) {
6676 ->getArgOperandUse(getCallSiteArgNo());
6677 if (
A.changeUseAfterManifest(U, *NewV))
6678 Changed = ChangeStatus::CHANGED;
6681 return Changed | AAValueSimplify::manifest(
A);
6684 void trackStatistics()
const override {
6692struct AAHeapToStackFunction final :
public AAHeapToStack {
6694 struct AllocationInfo {
6699 LibFunc LibraryFunctionId = NotLibFunc;
6706 } Status = STACK_DUE_TO_USE;
6710 bool HasPotentiallyFreeingUnknownUses =
false;
6714 bool MoveAllocaIntoEntry =
true;
6717 SmallSetVector<CallBase *, 1> PotentialFreeCalls{};
6720 struct DeallocationInfo {
6728 bool MightFreeUnknownObjects =
false;
6731 SmallSetVector<CallBase *, 1> PotentialAllocationCalls{};
6734 AAHeapToStackFunction(
const IRPosition &IRP, Attributor &
A)
6735 : AAHeapToStack(IRP,
A) {}
6737 ~AAHeapToStackFunction()
override {
6740 for (
auto &It : AllocationInfos)
6741 It.second->~AllocationInfo();
6742 for (
auto &It : DeallocationInfos)
6743 It.second->~DeallocationInfo();
6747 AAHeapToStack::initialize(
A);
6750 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6757 DeallocationInfos[CB] =
new (
A.Allocator) DeallocationInfo{CB, FreedOp};
6764 auto *I8Ty = Type::getInt8Ty(CB->
getParent()->getContext());
6766 AllocationInfo *AI =
new (
A.Allocator) AllocationInfo{CB};
6767 AllocationInfos[CB] = AI;
6769 TLI->getLibFunc(*CB, AI->LibraryFunctionId);
6775 bool UsedAssumedInformation =
false;
6776 bool Success =
A.checkForAllCallLikeInstructions(
6777 AllocationIdentifierCB, *
this, UsedAssumedInformation,
6781 assert(
Success &&
"Did not expect the call base visit callback to fail!");
6784 [](
const IRPosition &,
const AbstractAttribute *,
6785 bool &) -> std::optional<Value *> {
return nullptr; };
6786 for (
const auto &It : AllocationInfos)
6789 for (
const auto &It : DeallocationInfos)
6794 const std::string getAsStr(Attributor *
A)
const override {
6795 unsigned NumH2SMallocs = 0, NumInvalidMallocs = 0;
6796 for (
const auto &It : AllocationInfos) {
6797 if (It.second->Status == AllocationInfo::INVALID)
6798 ++NumInvalidMallocs;
6802 return "[H2S] Mallocs Good/Bad: " + std::to_string(NumH2SMallocs) +
"/" +
6803 std::to_string(NumInvalidMallocs);
6807 void trackStatistics()
const override {
6809 MallocCalls, Function,
6810 "Number of malloc/calloc/aligned_alloc calls converted to allocas");
6811 for (
const auto &It : AllocationInfos)
6812 if (It.second->Status != AllocationInfo::INVALID)
6816 bool isAssumedHeapToStack(
const CallBase &CB)
const override {
6818 if (AllocationInfo *AI =
6819 AllocationInfos.lookup(
const_cast<CallBase *
>(&CB)))
6820 return AI->Status != AllocationInfo::INVALID;
6824 bool isAssumedHeapToStackRemovedFree(CallBase &CB)
const override {
6825 if (!isValidState())
6828 for (
const auto &It : AllocationInfos) {
6829 AllocationInfo &AI = *It.second;
6830 if (AI.Status == AllocationInfo::INVALID)
6833 if (AI.PotentialFreeCalls.count(&CB))
6841 assert(getState().isValidState() &&
6842 "Attempted to manifest an invalid state!");
6846 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6848 for (
auto &It : AllocationInfos) {
6849 AllocationInfo &AI = *It.second;
6850 if (AI.Status == AllocationInfo::INVALID)
6853 for (CallBase *FreeCall : AI.PotentialFreeCalls) {
6854 LLVM_DEBUG(
dbgs() <<
"H2S: Removing free call: " << *FreeCall <<
"\n");
6855 A.deleteAfterManifest(*FreeCall);
6856 HasChanged = ChangeStatus::CHANGED;
6859 LLVM_DEBUG(
dbgs() <<
"H2S: Removing malloc-like call: " << *AI.CB
6862 auto Remark = [&](OptimizationRemark
OR) {
6863 LibFunc IsAllocShared;
6864 if (TLI->getLibFunc(*AI.CB, IsAllocShared))
6865 if (IsAllocShared == LibFunc___kmpc_alloc_shared)
6866 return OR <<
"Moving globalized variable to the stack.";
6867 return OR <<
"Moving memory allocation from the heap to the stack.";
6869 if (AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
6870 A.emitRemark<OptimizationRemark>(AI.CB,
"OMP110",
Remark);
6872 A.emitRemark<OptimizationRemark>(AI.CB,
"HeapToStack",
Remark);
6874 const DataLayout &
DL =
A.getInfoCache().getDL();
6876 std::optional<APInt> SizeAPI =
getSize(
A, *
this, AI);
6878 Size = ConstantInt::get(AI.CB->getContext(), *SizeAPI);
6880 LLVMContext &Ctx = AI.CB->getContext();
6881 ObjectSizeOpts Opts;
6882 ObjectSizeOffsetEvaluator Eval(
DL, TLI, Ctx, Opts);
6883 SizeOffsetValue SizeOffsetPair = Eval.compute(AI.CB);
6890 ?
F->getEntryBlock().begin()
6891 : AI.CB->getIterator();
6894 if (MaybeAlign RetAlign = AI.CB->getRetAlign())
6895 Alignment = std::max(Alignment, *RetAlign);
6897 std::optional<APInt> AlignmentAPI = getAPInt(
A, *
this, *Align);
6898 assert(AlignmentAPI && AlignmentAPI->getZExtValue() > 0 &&
6899 "Expected an alignment during manifest!");
6901 std::max(Alignment,
assumeAligned(AlignmentAPI->getZExtValue()));
6905 unsigned AS =
DL.getAllocaAddrSpace();
6907 new AllocaInst(Type::getInt8Ty(
F->getContext()), AS,
Size, Alignment,
6908 AI.CB->getName() +
".h2s", IP);
6910 if (Alloca->
getType() != AI.CB->getType())
6911 Alloca = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
6912 Alloca, AI.CB->getType(),
"malloc_cast", AI.CB->getIterator());
6914 auto *I8Ty = Type::getInt8Ty(
F->getContext());
6917 "Must be able to materialize initial memory state of allocation");
6922 auto *NBB =
II->getNormalDest();
6924 A.deleteAfterManifest(*AI.CB);
6926 A.deleteAfterManifest(*AI.CB);
6935 Builder.CreateMemSet(Alloca, InitVal,
Size, std::nullopt);
6937 HasChanged = ChangeStatus::CHANGED;
6943 std::optional<APInt> getAPInt(Attributor &
A,
const AbstractAttribute &AA,
6945 bool UsedAssumedInformation =
false;
6946 std::optional<Constant *> SimpleV =
6947 A.getAssumedConstant(V, AA, UsedAssumedInformation);
6949 return APInt(64, 0);
6951 return CI->getValue();
6952 return std::nullopt;
6955 std::optional<APInt>
getSize(Attributor &
A,
const AbstractAttribute &AA,
6956 AllocationInfo &AI) {
6957 auto Mapper = [&](
const Value *
V) ->
const Value * {
6958 bool UsedAssumedInformation =
false;
6959 if (std::optional<Constant *> SimpleV =
6960 A.getAssumedConstant(*V, AA, UsedAssumedInformation))
6967 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6973 MapVector<CallBase *, AllocationInfo *> AllocationInfos;
6977 MapVector<CallBase *, DeallocationInfo *> DeallocationInfos;
6982ChangeStatus AAHeapToStackFunction::updateImpl(Attributor &
A) {
6985 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
6987 const auto *LivenessAA =
6990 MustBeExecutedContextExplorer *Explorer =
6991 A.getInfoCache().getMustBeExecutedContextExplorer();
6993 bool StackIsAccessibleByOtherThreads =
6994 A.getInfoCache().stackIsAccessibleByOtherThreads();
6997 A.getInfoCache().getAnalysisResultForFunction<LoopAnalysis>(*F);
6998 std::optional<bool> MayContainIrreducibleControl;
7000 if (&
F->getEntryBlock() == &BB)
7002 if (!MayContainIrreducibleControl.has_value())
7004 if (*MayContainIrreducibleControl)
7013 bool HasUpdatedFrees =
false;
7015 auto UpdateFrees = [&]() {
7016 HasUpdatedFrees =
true;
7018 for (
auto &It : DeallocationInfos) {
7019 DeallocationInfo &DI = *It.second;
7022 if (DI.MightFreeUnknownObjects)
7026 bool UsedAssumedInformation =
false;
7027 if (
A.isAssumedDead(*DI.CB,
this, LivenessAA, UsedAssumedInformation,
7034 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown underlying object for free!\n");
7035 DI.MightFreeUnknownObjects =
true;
7048 DI.MightFreeUnknownObjects =
true;
7052 AllocationInfo *AI = AllocationInfos.lookup(ObjCB);
7054 LLVM_DEBUG(
dbgs() <<
"[H2S] Free of a non-allocation object: " << *Obj
7056 DI.MightFreeUnknownObjects =
true;
7060 DI.PotentialAllocationCalls.insert(ObjCB);
7064 auto FreeCheck = [&](AllocationInfo &AI) {
7068 if (!StackIsAccessibleByOtherThreads) {
7073 dbgs() <<
"[H2S] found an escaping use, stack is not accessible by "
7074 "other threads and function is not nosync:\n");
7078 if (!HasUpdatedFrees)
7082 if (AI.PotentialFreeCalls.size() != 1) {
7084 << AI.PotentialFreeCalls.size() <<
"\n");
7087 CallBase *UniqueFree = *AI.PotentialFreeCalls.begin();
7088 DeallocationInfo *DI = DeallocationInfos.lookup(UniqueFree);
7091 dbgs() <<
"[H2S] unique free call was not known as deallocation call "
7092 << *UniqueFree <<
"\n");
7095 if (DI->MightFreeUnknownObjects) {
7097 dbgs() <<
"[H2S] unique free call might free unknown allocations\n");
7100 if (DI->PotentialAllocationCalls.empty())
7102 if (DI->PotentialAllocationCalls.size() > 1) {
7104 << DI->PotentialAllocationCalls.size()
7105 <<
" different allocations\n");
7108 if (*DI->PotentialAllocationCalls.begin() != AI.CB) {
7111 <<
"[H2S] unique free call not known to free this allocation but "
7112 << **DI->PotentialAllocationCalls.begin() <<
"\n");
7117 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared) {
7119 if (!Explorer || !Explorer->findInContextOf(UniqueFree, CtxI)) {
7120 LLVM_DEBUG(
dbgs() <<
"[H2S] unique free call might not be executed "
7121 "with the allocation "
7122 << *UniqueFree <<
"\n");
7129 auto UsesCheck = [&](AllocationInfo &AI) {
7130 bool ValidUsesOnly =
true;
7132 auto Pred = [&](
const Use &
U,
bool &Follow) ->
bool {
7137 if (
SI->getValueOperand() ==
U.get()) {
7139 <<
"[H2S] escaping store to memory: " << *UserI <<
"\n");
7140 ValidUsesOnly =
false;
7149 if (DeallocationInfos.count(CB)) {
7150 AI.PotentialFreeCalls.insert(CB);
7157 bool IsKnownNoCapture;
7166 if (!IsAssumedNoCapture ||
7167 (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7168 !IsAssumedNoFree)) {
7169 AI.HasPotentiallyFreeingUnknownUses |= !IsAssumedNoFree;
7172 auto Remark = [&](OptimizationRemarkMissed ORM) {
7174 <<
"Could not move globalized variable to the stack. "
7175 "Variable is potentially captured in call. Mark "
7176 "parameter as `__attribute__((noescape))` to override.";
7179 if (ValidUsesOnly &&
7180 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared)
7181 A.emitRemark<OptimizationRemarkMissed>(CB,
"OMP113",
Remark);
7184 ValidUsesOnly =
false;
7197 ValidUsesOnly =
false;
7200 if (!
A.checkForAllUses(Pred, *
this, *AI.CB,
false,
7202 [&](
const Use &OldU,
const Use &NewU) {
7203 auto *SI = dyn_cast<StoreInst>(OldU.getUser());
7204 return !SI || StackIsAccessibleByOtherThreads ||
7205 AA::isAssumedThreadLocalObject(
7206 A, *SI->getPointerOperand(), *this);
7209 return ValidUsesOnly;
7214 for (
auto &It : AllocationInfos) {
7215 AllocationInfo &AI = *It.second;
7216 if (AI.Status == AllocationInfo::INVALID)
7220 std::optional<APInt> APAlign = getAPInt(
A, *
this, *Align);
7224 LLVM_DEBUG(
dbgs() <<
"[H2S] Unknown allocation alignment: " << *AI.CB
7226 AI.Status = AllocationInfo::INVALID;
7231 !APAlign->isPowerOf2()) {
7232 LLVM_DEBUG(
dbgs() <<
"[H2S] Invalid allocation alignment: " << APAlign
7234 AI.Status = AllocationInfo::INVALID;
7241 if (AI.LibraryFunctionId != LibFunc___kmpc_alloc_shared &&
7246 dbgs() <<
"[H2S] Unknown allocation size: " << *AI.CB <<
"\n";
7248 dbgs() <<
"[H2S] Allocation size too large: " << *AI.CB <<
" vs. "
7252 AI.Status = AllocationInfo::INVALID;
7258 switch (AI.Status) {
7259 case AllocationInfo::STACK_DUE_TO_USE:
7262 AI.Status = AllocationInfo::STACK_DUE_TO_FREE;
7264 case AllocationInfo::STACK_DUE_TO_FREE:
7267 AI.Status = AllocationInfo::INVALID;
7270 case AllocationInfo::INVALID:
7277 bool IsGlobalizedLocal =
7278 AI.LibraryFunctionId == LibFunc___kmpc_alloc_shared;
7279 if (AI.MoveAllocaIntoEntry &&
7280 (!
Size.has_value() ||
7281 (!IsGlobalizedLocal && IsInLoop(*AI.CB->getParent()))))
7282 AI.MoveAllocaIntoEntry =
false;
7291struct AAPrivatizablePtrImpl :
public AAPrivatizablePtr {
7292 AAPrivatizablePtrImpl(
const IRPosition &IRP, Attributor &
A)
7293 : AAPrivatizablePtr(IRP,
A), PrivatizableType(std::nullopt) {}
7296 AAPrivatizablePtr::indicatePessimisticFixpoint();
7297 PrivatizableType =
nullptr;
7298 return ChangeStatus::CHANGED;
7304 virtual std::optional<Type *> identifyPrivatizableType(Attributor &
A) = 0;
7308 std::optional<Type *> combineTypes(std::optional<Type *> T0,
7309 std::optional<Type *>
T1) {
7319 std::optional<Type *> getPrivatizableType()
const override {
7320 return PrivatizableType;
7323 const std::string getAsStr(Attributor *
A)
const override {
7324 return isAssumedPrivatizablePtr() ?
"[priv]" :
"[no-priv]";
7328 std::optional<Type *> PrivatizableType;
7333struct AAPrivatizablePtrArgument final :
public AAPrivatizablePtrImpl {
7334 AAPrivatizablePtrArgument(
const IRPosition &IRP, Attributor &
A)
7335 : AAPrivatizablePtrImpl(IRP,
A) {}
7338 std::optional<Type *> identifyPrivatizableType(Attributor &
A)
override {
7341 bool UsedAssumedInformation =
false;
7343 A.getAttrs(getIRPosition(), {Attribute::ByVal},
Attrs,
7345 if (!
Attrs.empty() &&
7346 A.checkForAllCallSites([](AbstractCallSite ACS) { return true; }, *
this,
7347 true, UsedAssumedInformation))
7348 return Attrs[0].getValueAsType();
7350 std::optional<Type *> Ty;
7351 unsigned ArgNo = getIRPosition().getCallSiteArgNo();
7359 auto CallSiteCheck = [&](AbstractCallSite ACS) {
7368 A.getAAFor<AAPrivatizablePtr>(*
this, ACSArgPos, DepClassTy::REQUIRED);
7371 std::optional<Type *> CSTy = PrivCSArgAA->getPrivatizableType();
7374 dbgs() <<
"[AAPrivatizablePtr] ACSPos: " << ACSArgPos <<
", CSTy: ";
7378 dbgs() <<
"<nullptr>";
7383 Ty = combineTypes(Ty, CSTy);
7386 dbgs() <<
" : New Type: ";
7388 (*Ty)->print(
dbgs());
7390 dbgs() <<
"<nullptr>";
7399 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7400 UsedAssumedInformation))
7407 PrivatizableType = identifyPrivatizableType(
A);
7408 if (!PrivatizableType)
7409 return ChangeStatus::UNCHANGED;
7410 if (!*PrivatizableType)
7411 return indicatePessimisticFixpoint();
7416 DepClassTy::OPTIONAL);
7419 if (!
A.hasAttr(getIRPosition(), Attribute::ByVal) &&
7422 return indicatePessimisticFixpoint();
7428 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7432 Function &Fn = *getIRPosition().getAnchorScope();
7434 A.getInfoCache().getAnalysisResultForFunction<TargetIRAnalysis>(Fn);
7436 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Missing TTI for function "
7438 return indicatePessimisticFixpoint();
7441 auto CallSiteCheck = [&](AbstractCallSite ACS) {
7448 bool UsedAssumedInformation =
false;
7449 if (!
A.checkForAllCallSites(CallSiteCheck, *
this,
true,
7450 UsedAssumedInformation)) {
7452 dbgs() <<
"[AAPrivatizablePtr] ABI incompatibility detected for "
7454 return indicatePessimisticFixpoint();
7458 Argument *Arg = getAssociatedArgument();
7459 if (!
A.isValidFunctionSignatureRewrite(*Arg, ReplacementTypes)) {
7461 return indicatePessimisticFixpoint();
7468 auto IsCompatiblePrivArgOfCallback = [&](CallBase &CB) {
7471 for (
const Use *U : CallbackUses) {
7472 AbstractCallSite CBACS(U);
7473 assert(CBACS && CBACS.isCallbackCall());
7474 for (Argument &CBArg : CBACS.getCalledFunction()->args()) {
7475 int CBArgNo = CBACS.getCallArgOperandNo(CBArg);
7479 <<
"[AAPrivatizablePtr] Argument " << *Arg
7480 <<
"check if can be privatized in the context of its parent ("
7482 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7484 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7485 <<
")\n[AAPrivatizablePtr] " << CBArg <<
" : "
7486 << CBACS.getCallArgOperand(CBArg) <<
" vs "
7488 <<
"[AAPrivatizablePtr] " << CBArg <<
" : "
7489 << CBACS.getCallArgOperandNo(CBArg) <<
" vs " << ArgNo <<
"\n";
7492 if (CBArgNo !=
int(ArgNo))
7494 const auto *CBArgPrivAA =
A.getAAFor<AAPrivatizablePtr>(
7496 if (CBArgPrivAA && CBArgPrivAA->isValidState()) {
7497 auto CBArgPrivTy = CBArgPrivAA->getPrivatizableType();
7500 if (*CBArgPrivTy == PrivatizableType)
7505 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7506 <<
" cannot be privatized in the context of its parent ("
7508 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7510 << CBArgNo <<
"@" << CBACS.getCalledFunction()->getName()
7511 <<
").\n[AAPrivatizablePtr] for which the argument "
7512 "privatization is not compatible.\n";
7522 auto IsCompatiblePrivArgOfDirectCS = [&](AbstractCallSite ACS) {
7526 "Expected a direct call operand for callback call operand");
7531 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7532 <<
" check if be privatized in the context of its parent ("
7534 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7536 << DCArgNo <<
"@" << DCCallee->
getName() <<
").\n";
7539 if (
unsigned(DCArgNo) < DCCallee->
arg_size()) {
7540 const auto *DCArgPrivAA =
A.getAAFor<AAPrivatizablePtr>(
7542 DepClassTy::REQUIRED);
7543 if (DCArgPrivAA && DCArgPrivAA->isValidState()) {
7544 auto DCArgPrivTy = DCArgPrivAA->getPrivatizableType();
7547 if (*DCArgPrivTy == PrivatizableType)
7553 dbgs() <<
"[AAPrivatizablePtr] Argument " << *Arg
7554 <<
" cannot be privatized in the context of its parent ("
7556 <<
")\n[AAPrivatizablePtr] because it is an argument in a "
7559 <<
").\n[AAPrivatizablePtr] for which the argument "
7560 "privatization is not compatible.\n";
7568 auto IsCompatiblePrivArgOfOtherCallSite = [&](AbstractCallSite ACS) {
7572 return IsCompatiblePrivArgOfDirectCS(ACS);
7576 if (!
A.checkForAllCallSites(IsCompatiblePrivArgOfOtherCallSite, *
this,
true,
7577 UsedAssumedInformation))
7578 return indicatePessimisticFixpoint();
7580 return ChangeStatus::UNCHANGED;
7586 identifyReplacementTypes(
Type *PrivType,
7587 SmallVectorImpl<Type *> &ReplacementTypes) {
7590 assert(PrivType &&
"Expected privatizable type!");
7594 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++)
7595 ReplacementTypes.
push_back(PrivStructType->getElementType(u));
7597 ReplacementTypes.
append(PrivArrayType->getNumElements(),
7598 PrivArrayType->getElementType());
7607 static void createInitialization(
Type *PrivType,
Value &
Base, Function &
F,
7609 assert(PrivType &&
"Expected privatizable type!");
7612 const DataLayout &
DL =
F.getDataLayout();
7616 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7617 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7620 new StoreInst(
F.getArg(ArgNo + u), Ptr, IP);
7623 Type *PointeeTy = PrivArrayType->getElementType();
7624 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7625 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7627 new StoreInst(
F.getArg(ArgNo + u), Ptr, IP);
7630 new StoreInst(
F.getArg(ArgNo), &
Base, IP);
7636 void createReplacementValues(Align Alignment,
Type *PrivType,
7638 SmallVectorImpl<Value *> &ReplacementValues) {
7640 assert(PrivType &&
"Expected privatizable type!");
7648 const StructLayout *PrivStructLayout =
DL.getStructLayout(PrivStructType);
7649 for (
unsigned u = 0, e = PrivStructType->getNumElements(); u < e; u++) {
7650 Type *PointeeTy = PrivStructType->getElementType(u);
7653 LoadInst *
L =
new LoadInst(PointeeTy, Ptr,
"", IP->
getIterator());
7654 L->setAlignment(Alignment);
7658 Type *PointeeTy = PrivArrayType->getElementType();
7659 uint64_t PointeeTySize =
DL.getTypeStoreSize(PointeeTy);
7660 for (
unsigned u = 0, e = PrivArrayType->getNumElements(); u < e; u++) {
7662 LoadInst *
L =
new LoadInst(PointeeTy, Ptr,
"", IP->
getIterator());
7663 L->setAlignment(Alignment);
7668 L->setAlignment(Alignment);
7675 if (!PrivatizableType)
7676 return ChangeStatus::UNCHANGED;
7677 assert(*PrivatizableType &&
"Expected privatizable type!");
7683 bool UsedAssumedInformation =
false;
7684 if (!
A.checkForAllInstructions(
7685 [&](Instruction &
I) {
7686 CallInst &CI = cast<CallInst>(I);
7687 if (CI.isTailCall())
7688 TailCalls.push_back(&CI);
7691 *
this, {Instruction::Call}, UsedAssumedInformation))
7692 return ChangeStatus::UNCHANGED;
7694 Argument *Arg = getAssociatedArgument();
7697 const auto *AlignAA =
7704 [=](
const Attributor::ArgumentReplacementInfo &ARI,
7706 BasicBlock &EntryBB = ReplacementFn.getEntryBlock();
7708 const DataLayout &
DL = IP->getDataLayout();
7709 unsigned AS =
DL.getAllocaAddrSpace();
7710 Instruction *AI =
new AllocaInst(*PrivatizableType, AS,
7711 Arg->
getName() +
".priv", IP);
7712 createInitialization(*PrivatizableType, *AI, ReplacementFn,
7713 ArgIt->getArgNo(), IP);
7716 AI = BitCastInst::CreatePointerBitCastOrAddrSpaceCast(
7720 for (CallInst *CI : TailCalls)
7721 CI->setTailCall(
false);
7728 [=](
const Attributor::ArgumentReplacementInfo &ARI,
7729 AbstractCallSite ACS, SmallVectorImpl<Value *> &NewArgOperands) {
7732 createReplacementValues(
7733 AlignAA ? AlignAA->getAssumedAlign() :
Align(0),
7734 *PrivatizableType, ACS,
7742 identifyReplacementTypes(*PrivatizableType, ReplacementTypes);
7745 if (
A.registerFunctionSignatureRewrite(*Arg, ReplacementTypes,
7746 std::move(FnRepairCB),
7747 std::move(ACSRepairCB)))
7748 return ChangeStatus::CHANGED;
7749 return ChangeStatus::UNCHANGED;
7753 void trackStatistics()
const override {
7758struct AAPrivatizablePtrFloating :
public AAPrivatizablePtrImpl {
7759 AAPrivatizablePtrFloating(
const IRPosition &IRP, Attributor &
A)
7760 : AAPrivatizablePtrImpl(IRP,
A) {}
7765 indicatePessimisticFixpoint();
7770 "updateImpl will not be called");
7774 std::optional<Type *> identifyPrivatizableType(Attributor &
A)
override {
7777 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] No underlying object found!\n");
7784 return AI->getAllocatedType();
7786 auto *PrivArgAA =
A.getAAFor<AAPrivatizablePtr>(
7788 if (PrivArgAA && PrivArgAA->isAssumedPrivatizablePtr())
7789 return PrivArgAA->getPrivatizableType();
7792 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] Underlying object neither valid "
7793 "alloca nor privatizable argument: "
7799 void trackStatistics()
const override {
7804struct AAPrivatizablePtrCallSiteArgument final
7805 :
public AAPrivatizablePtrFloating {
7806 AAPrivatizablePtrCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
7807 : AAPrivatizablePtrFloating(IRP,
A) {}
7811 if (
A.hasAttr(getIRPosition(), Attribute::ByVal))
7812 indicateOptimisticFixpoint();
7817 PrivatizableType = identifyPrivatizableType(
A);
7818 if (!PrivatizableType)
7819 return ChangeStatus::UNCHANGED;
7820 if (!*PrivatizableType)
7821 return indicatePessimisticFixpoint();
7823 const IRPosition &IRP = getIRPosition();
7824 bool IsKnownNoCapture;
7826 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoCapture);
7827 if (!IsAssumedNoCapture) {
7828 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might be captured!\n");
7829 return indicatePessimisticFixpoint();
7832 bool IsKnownNoAlias;
7834 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoAlias)) {
7835 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer might alias!\n");
7836 return indicatePessimisticFixpoint();
7841 LLVM_DEBUG(
dbgs() <<
"[AAPrivatizablePtr] pointer is written!\n");
7842 return indicatePessimisticFixpoint();
7845 return ChangeStatus::UNCHANGED;
7849 void trackStatistics()
const override {
7854struct AAPrivatizablePtrCallSiteReturned final
7855 :
public AAPrivatizablePtrFloating {
7856 AAPrivatizablePtrCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
7857 : AAPrivatizablePtrFloating(IRP,
A) {}
7862 indicatePessimisticFixpoint();
7866 void trackStatistics()
const override {
7871struct AAPrivatizablePtrReturned final :
public AAPrivatizablePtrFloating {
7872 AAPrivatizablePtrReturned(
const IRPosition &IRP, Attributor &
A)
7873 : AAPrivatizablePtrFloating(IRP,
A) {}
7878 indicatePessimisticFixpoint();
7882 void trackStatistics()
const override {
7892struct AAMemoryBehaviorImpl :
public AAMemoryBehavior {
7893 AAMemoryBehaviorImpl(
const IRPosition &IRP, Attributor &
A)
7894 : AAMemoryBehavior(IRP,
A) {}
7898 intersectAssumedBits(BEST_STATE);
7899 getKnownStateFromValue(
A, getIRPosition(), getState());
7900 AAMemoryBehavior::initialize(
A);
7904 static void getKnownStateFromValue(Attributor &
A,
const IRPosition &IRP,
7905 BitIntegerState &State,
7906 bool IgnoreSubsumingPositions =
false) {
7908 A.getAttrs(IRP, AttrKinds, Attrs, IgnoreSubsumingPositions);
7910 switch (Attr.getKindAsEnum()) {
7911 case Attribute::ReadNone:
7914 case Attribute::ReadOnly:
7917 case Attribute::WriteOnly:
7926 if (!
I->mayReadFromMemory())
7928 if (!
I->mayWriteToMemory())
7934 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
7935 SmallVectorImpl<Attribute> &Attrs)
const override {
7938 Attrs.push_back(Attribute::get(Ctx, Attribute::ReadNone));
7940 Attrs.push_back(Attribute::get(Ctx, Attribute::ReadOnly));
7941 else if (isAssumedWriteOnly())
7942 Attrs.push_back(Attribute::get(Ctx, Attribute::WriteOnly));
7948 const IRPosition &IRP = getIRPosition();
7950 if (
A.hasAttr(IRP, Attribute::ReadNone,
7952 return ChangeStatus::UNCHANGED;
7961 return ChangeStatus::UNCHANGED;
7964 A.removeAttrs(IRP, AttrKinds);
7967 A.removeAttrs(IRP, Attribute::Writable);
7974 const std::string getAsStr(Attributor *
A)
const override {
7979 if (isAssumedWriteOnly())
7981 return "may-read/write";
7985 static const Attribute::AttrKind AttrKinds[3];
7989 Attribute::ReadNone, Attribute::ReadOnly, Attribute::WriteOnly};
7992struct AAMemoryBehaviorFloating : AAMemoryBehaviorImpl {
7993 AAMemoryBehaviorFloating(
const IRPosition &IRP, Attributor &
A)
7994 : AAMemoryBehaviorImpl(IRP,
A) {}
8000 void trackStatistics()
const override {
8005 else if (isAssumedWriteOnly())
8012 bool followUsersOfUseIn(Attributor &
A,
const Use &U,
8013 const Instruction *UserI);
8016 void analyzeUseIn(Attributor &
A,
const Use &U,
const Instruction *UserI);
8020struct AAMemoryBehaviorArgument : AAMemoryBehaviorFloating {
8021 AAMemoryBehaviorArgument(
const IRPosition &IRP, Attributor &
A)
8022 : AAMemoryBehaviorFloating(IRP,
A) {}
8026 intersectAssumedBits(BEST_STATE);
8027 const IRPosition &IRP = getIRPosition();
8031 bool HasByVal =
A.hasAttr(IRP, {Attribute::ByVal},
8033 getKnownStateFromValue(
A, IRP, getState(),
8040 return ChangeStatus::UNCHANGED;
8044 if (
A.hasAttr(getIRPosition(),
8045 {Attribute::InAlloca, Attribute::Preallocated})) {
8046 removeKnownBits(NO_WRITES);
8047 removeAssumedBits(NO_WRITES);
8049 A.removeAttrs(getIRPosition(), AttrKinds);
8050 return AAMemoryBehaviorFloating::manifest(
A);
8054 void trackStatistics()
const override {
8059 else if (isAssumedWriteOnly())
8064struct AAMemoryBehaviorCallSiteArgument final : AAMemoryBehaviorArgument {
8065 AAMemoryBehaviorCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
8066 : AAMemoryBehaviorArgument(IRP,
A) {}
8072 Argument *Arg = getAssociatedArgument();
8074 indicatePessimisticFixpoint();
8078 addKnownBits(NO_WRITES);
8079 removeKnownBits(NO_READS);
8080 removeAssumedBits(NO_READS);
8082 AAMemoryBehaviorArgument::initialize(
A);
8083 if (getAssociatedFunction()->isDeclaration())
8084 indicatePessimisticFixpoint();
8093 Argument *Arg = getAssociatedArgument();
8096 A.getAAFor<AAMemoryBehavior>(*
this, ArgPos, DepClassTy::REQUIRED);
8098 return indicatePessimisticFixpoint();
8103 void trackStatistics()
const override {
8108 else if (isAssumedWriteOnly())
8114struct AAMemoryBehaviorCallSiteReturned final : AAMemoryBehaviorFloating {
8115 AAMemoryBehaviorCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
8116 : AAMemoryBehaviorFloating(IRP,
A) {}
8120 AAMemoryBehaviorImpl::initialize(
A);
8125 return ChangeStatus::UNCHANGED;
8129 void trackStatistics()
const override {}
8133struct AAMemoryBehaviorFunction final :
public AAMemoryBehaviorImpl {
8134 AAMemoryBehaviorFunction(
const IRPosition &IRP, Attributor &
A)
8135 : AAMemoryBehaviorImpl(IRP,
A) {}
8151 else if (isAssumedWriteOnly())
8154 A.removeAttrs(getIRPosition(), AttrKinds);
8157 for (Argument &Arg :
F.args())
8159 return A.manifestAttrs(getIRPosition(),
8160 Attribute::getWithMemoryEffects(
F.getContext(), ME));
8164 void trackStatistics()
const override {
8169 else if (isAssumedWriteOnly())
8175struct AAMemoryBehaviorCallSite final
8176 : AACalleeToCallSite<AAMemoryBehavior, AAMemoryBehaviorImpl> {
8177 AAMemoryBehaviorCallSite(
const IRPosition &IRP, Attributor &
A)
8178 : AACalleeToCallSite<AAMemoryBehavior, AAMemoryBehaviorImpl>(IRP,
A) {}
8189 else if (isAssumedWriteOnly())
8192 A.removeAttrs(getIRPosition(), AttrKinds);
8195 for (Use &U : CB.
args())
8197 Attribute::Writable);
8198 return A.manifestAttrs(
8199 getIRPosition(), Attribute::getWithMemoryEffects(CB.
getContext(), ME));
8203 void trackStatistics()
const override {
8208 else if (isAssumedWriteOnly())
8213ChangeStatus AAMemoryBehaviorFunction::updateImpl(Attributor &
A) {
8216 auto AssumedState = getAssumed();
8223 const auto *MemBehaviorAA =
A.getAAFor<AAMemoryBehavior>(
8225 if (MemBehaviorAA) {
8226 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8227 return !isAtFixpoint();
8232 if (
I.mayReadFromMemory())
8233 removeAssumedBits(NO_READS);
8234 if (
I.mayWriteToMemory())
8235 removeAssumedBits(NO_WRITES);
8236 return !isAtFixpoint();
8239 bool UsedAssumedInformation =
false;
8240 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8241 UsedAssumedInformation))
8242 return indicatePessimisticFixpoint();
8248ChangeStatus AAMemoryBehaviorFloating::updateImpl(Attributor &
A) {
8250 const IRPosition &IRP = getIRPosition();
8261 const auto *FnMemAA =
8264 FnMemAssumedState = FnMemAA->getAssumed();
8265 S.addKnownBits(FnMemAA->getKnown());
8266 if ((S.getAssumed() & FnMemAA->getAssumed()) == S.getAssumed())
8272 auto AssumedState = S.getAssumed();
8278 bool IsKnownNoCapture;
8279 const AANoCapture *ArgNoCaptureAA =
nullptr;
8284 if (!IsAssumedNoCapture &&
8286 S.intersectAssumedBits(FnMemAssumedState);
8292 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
8294 LLVM_DEBUG(
dbgs() <<
"[AAMemoryBehavior] Use: " << *U <<
" in " << *UserI
8302 Follow = followUsersOfUseIn(
A, U, UserI);
8306 analyzeUseIn(
A, U, UserI);
8308 return !isAtFixpoint();
8311 if (!
A.checkForAllUses(UsePred, *
this, getAssociatedValue()))
8312 return indicatePessimisticFixpoint();
8318bool AAMemoryBehaviorFloating::followUsersOfUseIn(Attributor &
A,
const Use &U,
8319 const Instruction *UserI) {
8337 if (
U.get()->getType()->isPointerTy()) {
8339 bool IsKnownNoCapture;
8348void AAMemoryBehaviorFloating::analyzeUseIn(Attributor &
A,
const Use &U,
8349 const Instruction *UserI) {
8356 case Instruction::Load:
8358 removeAssumedBits(NO_READS);
8361 case Instruction::Store:
8366 removeAssumedBits(NO_WRITES);
8368 indicatePessimisticFixpoint();
8371 case Instruction::Call:
8372 case Instruction::CallBr:
8373 case Instruction::Invoke: {
8380 indicatePessimisticFixpoint();
8387 removeAssumedBits(NO_READS);
8394 if (
U.get()->getType()->isPointerTy())
8398 const auto *MemBehaviorAA =
8404 intersectAssumedBits(MemBehaviorAA->
getAssumed());
8412 removeAssumedBits(NO_READS);
8414 removeAssumedBits(NO_WRITES);
8426 return "all memory";
8429 std::string S =
"memory:";
8435 S +=
"internal global,";
8437 S +=
"external global,";
8441 S +=
"inaccessible,";
8455 AccessKind2Accesses.fill(
nullptr);
8458 ~AAMemoryLocationImpl()
override {
8461 for (AccessSet *AS : AccessKind2Accesses)
8468 intersectAssumedBits(BEST_STATE);
8469 getKnownStateFromValue(
A, getIRPosition(), getState());
8470 AAMemoryLocation::initialize(
A);
8474 static void getKnownStateFromValue(Attributor &
A,
const IRPosition &IRP,
8475 BitIntegerState &State,
8476 bool IgnoreSubsumingPositions =
false) {
8485 bool UseArgMemOnly =
true;
8487 if (AnchorFn &&
A.isRunOn(*AnchorFn))
8491 A.getAttrs(IRP, {Attribute::Memory},
Attrs, IgnoreSubsumingPositions);
8500 State.
addKnownBits(inverseLocation(NO_INACCESSIBLE_MEM,
true,
true));
8505 State.
addKnownBits(inverseLocation(NO_ARGUMENT_MEM,
true,
true));
8509 A.manifestAttrs(IRP,
8510 Attribute::getWithMemoryEffects(
8519 NO_INACCESSIBLE_MEM | NO_ARGUMENT_MEM,
true,
true));
8523 A.manifestAttrs(IRP,
8524 Attribute::getWithMemoryEffects(
8534 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
8535 SmallVectorImpl<Attribute> &Attrs)
const override {
8542 else if (isAssumedInaccessibleMemOnly())
8543 Attrs.push_back(Attribute::getWithMemoryEffects(
8545 else if (isAssumedArgMemOnly())
8548 else if (isAssumedInaccessibleOrArgMemOnly())
8549 Attrs.push_back(Attribute::getWithMemoryEffects(
8559 const IRPosition &IRP = getIRPosition();
8563 if (DeducedAttrs.
size() != 1)
8564 return ChangeStatus::UNCHANGED;
8567 return A.manifestAttrs(IRP, Attribute::getWithMemoryEffects(
8572 bool checkForAllAccessesToMemoryKind(
8574 MemoryLocationsKind)>
8576 MemoryLocationsKind RequestedMLK)
const override {
8577 if (!isValidState())
8580 MemoryLocationsKind AssumedMLK = getAssumedNotAccessedLocation();
8581 if (AssumedMLK == NO_LOCATIONS)
8585 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS;
8586 CurMLK *= 2, ++Idx) {
8587 if (CurMLK & RequestedMLK)
8590 if (
const AccessSet *
Accesses = AccessKind2Accesses[Idx])
8591 for (
const AccessInfo &AI : *
Accesses)
8592 if (!Pred(AI.I, AI.Ptr, AI.Kind, CurMLK))
8605 MemoryLocationsKind KnownMLK = getKnown();
8607 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2)
8608 if (!(CurMLK & KnownMLK))
8609 updateStateAndAccessesMap(getState(), CurMLK,
I,
nullptr,
Changed,
8610 getAccessKindFromInst(
I));
8611 return AAMemoryLocation::indicatePessimisticFixpoint();
8631 bool operator()(
const AccessInfo &
LHS,
const AccessInfo &
RHS)
const {
8635 return LHS.Ptr <
RHS.Ptr;
8636 if (
LHS.Kind !=
RHS.Kind)
8637 return LHS.Kind <
RHS.Kind;
8644 using AccessSet = SmallSet<AccessInfo, 2, AccessInfo>;
8645 std::array<AccessSet *, llvm::ConstantLog2<VALID_STATE>()>
8646 AccessKind2Accesses;
8651 categorizeArgumentPointerLocations(Attributor &
A, CallBase &CB,
8652 AAMemoryLocation::StateType &AccessedLocs,
8657 categorizeAccessedLocations(Attributor &
A, Instruction &
I,
bool &
Changed);
8660 AccessKind getAccessKindFromInst(
const Instruction *
I) {
8663 AK =
I->mayReadFromMemory() ? READ :
NONE;
8672 void updateStateAndAccessesMap(AAMemoryLocation::StateType &State,
8673 MemoryLocationsKind MLK,
const Instruction *
I,
8682 if (MLK == NO_UNKOWN_MEM)
8684 State.removeAssumedBits(MLK);
8689 void categorizePtrValue(Attributor &
A,
const Instruction &
I,
const Value &Ptr,
8690 AAMemoryLocation::StateType &State,
bool &
Changed,
8691 unsigned AccessAS = 0);
8697void AAMemoryLocationImpl::categorizePtrValue(
8698 Attributor &
A,
const Instruction &
I,
const Value &Ptr,
8700 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize pointer locations for "
8705 unsigned ObjectAS =
Obj.getType()->getPointerAddressSpace();
8707 MemoryLocationsKind MLK = NO_LOCATIONS;
8726 MLK = NO_ARGUMENT_MEM;
8732 if (GVar->isConstant())
8735 if (GV->hasLocalLinkage())
8736 MLK = NO_GLOBAL_INTERNAL_MEM;
8738 MLK = NO_GLOBAL_EXTERNAL_MEM;
8746 bool IsKnownNoAlias;
8750 MLK = NO_MALLOCED_MEM;
8752 MLK = NO_UNKOWN_MEM;
8754 MLK = NO_UNKOWN_MEM;
8757 assert(MLK != NO_LOCATIONS &&
"No location specified!");
8758 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Ptr value can be categorized: "
8759 << Obj <<
" -> " << getMemoryLocationsAsStr(MLK) <<
"\n");
8761 getAccessKindFromInst(&
I));
8766 const auto *AA =
A.getAAFor<AAUnderlyingObjects>(
8770 dbgs() <<
"[AAMemoryLocation] Pointer locations not categorized\n");
8771 updateStateAndAccessesMap(
State, NO_UNKOWN_MEM, &
I,
nullptr,
Changed,
8772 getAccessKindFromInst(&
I));
8777 dbgs() <<
"[AAMemoryLocation] Accessed locations with pointer locations: "
8781void AAMemoryLocationImpl::categorizeArgumentPointerLocations(
8784 for (
unsigned ArgNo = 0,
E = CB.
arg_size(); ArgNo <
E; ++ArgNo) {
8793 const auto *ArgOpMemLocationAA =
8796 if (ArgOpMemLocationAA && ArgOpMemLocationAA->isAssumedReadNone())
8801 categorizePtrValue(
A, CB, *ArgOp, AccessedLocs,
Changed);
8806AAMemoryLocationImpl::categorizeAccessedLocations(Attributor &
A, Instruction &
I,
8808 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Categorize accessed locations for "
8812 AccessedLocs.intersectAssumedBits(NO_LOCATIONS);
8817 const auto *CBMemLocationAA =
A.getAAFor<AAMemoryLocation>(
8820 <<
" [" << CBMemLocationAA <<
"]\n");
8821 if (!CBMemLocationAA) {
8822 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr,
8823 Changed, getAccessKindFromInst(&
I));
8824 return NO_UNKOWN_MEM;
8827 if (CBMemLocationAA->isAssumedReadNone())
8828 return NO_LOCATIONS;
8830 if (CBMemLocationAA->isAssumedInaccessibleMemOnly()) {
8831 updateStateAndAccessesMap(AccessedLocs, NO_INACCESSIBLE_MEM, &
I,
nullptr,
8832 Changed, getAccessKindFromInst(&
I));
8833 return AccessedLocs.getAssumed();
8836 uint32_t CBAssumedNotAccessedLocs =
8837 CBMemLocationAA->getAssumedNotAccessedLocation();
8840 uint32_t CBAssumedNotAccessedLocsNoArgMem =
8841 CBAssumedNotAccessedLocs | NO_ARGUMENT_MEM | NO_GLOBAL_MEM;
8843 for (MemoryLocationsKind CurMLK = 1; CurMLK < NO_LOCATIONS; CurMLK *= 2) {
8844 if (CBAssumedNotAccessedLocsNoArgMem & CurMLK)
8846 updateStateAndAccessesMap(AccessedLocs, CurMLK, &
I,
nullptr,
Changed,
8847 getAccessKindFromInst(&
I));
8852 bool HasGlobalAccesses = ((~CBAssumedNotAccessedLocs) & NO_GLOBAL_MEM);
8853 if (HasGlobalAccesses) {
8856 updateStateAndAccessesMap(AccessedLocs, MLK, &
I, Ptr,
Changed,
8857 getAccessKindFromInst(&
I));
8860 if (!CBMemLocationAA->checkForAllAccessesToMemoryKind(
8861 AccessPred, inverseLocation(NO_GLOBAL_MEM,
false,
false)))
8862 return AccessedLocs.getWorstState();
8866 dbgs() <<
"[AAMemoryLocation] Accessed state before argument handling: "
8867 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8870 bool HasArgAccesses = ((~CBAssumedNotAccessedLocs) & NO_ARGUMENT_MEM);
8872 categorizeArgumentPointerLocations(
A, *CB, AccessedLocs,
Changed);
8875 dbgs() <<
"[AAMemoryLocation] Accessed state after argument handling: "
8876 << getMemoryLocationsAsStr(AccessedLocs.getAssumed()) <<
"\n");
8878 return AccessedLocs.getAssumed();
8883 dbgs() <<
"[AAMemoryLocation] Categorize memory access with pointer: "
8884 <<
I <<
" [" << *Ptr <<
"]\n");
8885 categorizePtrValue(
A,
I, *Ptr, AccessedLocs,
Changed,
8886 Ptr->getType()->getPointerAddressSpace());
8887 return AccessedLocs.getAssumed();
8890 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Failed to categorize instruction: "
8892 updateStateAndAccessesMap(AccessedLocs, NO_UNKOWN_MEM, &
I,
nullptr,
Changed,
8893 getAccessKindFromInst(&
I));
8894 return AccessedLocs.getAssumed();
8898struct AAMemoryLocationFunction final :
public AAMemoryLocationImpl {
8899 AAMemoryLocationFunction(
const IRPosition &IRP, Attributor &
A)
8900 : AAMemoryLocationImpl(IRP,
A) {}
8905 const auto *MemBehaviorAA =
8906 A.getAAFor<AAMemoryBehavior>(*
this, getIRPosition(), DepClassTy::NONE);
8909 return indicateOptimisticFixpoint();
8911 "AAMemoryLocation was not read-none but AAMemoryBehavior was!");
8912 A.recordDependence(*MemBehaviorAA, *
this, DepClassTy::OPTIONAL);
8913 return ChangeStatus::UNCHANGED;
8917 auto AssumedState = getAssumed();
8921 MemoryLocationsKind MLK = categorizeAccessedLocations(
A,
I,
Changed);
8922 LLVM_DEBUG(
dbgs() <<
"[AAMemoryLocation] Accessed locations for " <<
I
8923 <<
": " << getMemoryLocationsAsStr(MLK) <<
"\n");
8924 removeAssumedBits(inverseLocation(MLK,
false,
false));
8927 return getAssumedNotAccessedLocation() != VALID_STATE;
8930 bool UsedAssumedInformation =
false;
8931 if (!
A.checkForAllReadWriteInstructions(CheckRWInst, *
this,
8932 UsedAssumedInformation))
8933 return indicatePessimisticFixpoint();
8935 Changed |= AssumedState != getAssumed();
8936 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8940 void trackStatistics()
const override {
8943 else if (isAssumedArgMemOnly())
8945 else if (isAssumedInaccessibleMemOnly())
8947 else if (isAssumedInaccessibleOrArgMemOnly())
8953struct AAMemoryLocationCallSite final : AAMemoryLocationImpl {
8954 AAMemoryLocationCallSite(
const IRPosition &IRP, Attributor &
A)
8955 : AAMemoryLocationImpl(IRP,
A) {}
8966 A.getAAFor<AAMemoryLocation>(*
this, FnPos, DepClassTy::REQUIRED);
8968 return indicatePessimisticFixpoint();
8972 updateStateAndAccessesMap(getState(), MLK,
I, Ptr,
Changed,
8973 getAccessKindFromInst(
I));
8976 if (!FnAA->checkForAllAccessesToMemoryKind(AccessPred, ALL_LOCATIONS))
8977 return indicatePessimisticFixpoint();
8978 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
8982 void trackStatistics()
const override {
8992struct AADenormalFPMathImpl :
public AADenormalFPMath {
8993 AADenormalFPMathImpl(
const IRPosition &IRP, Attributor &
A)
8994 : AADenormalFPMath(IRP,
A) {}
8996 const std::string getAsStr(Attributor *
A)
const override {
8997 std::string Str(
"AADenormalFPMath[");
8998 raw_string_ostream OS(Str);
9000 DenormalState Known = getKnown();
9001 if (Known.Mode.isValid())
9002 OS <<
"denormal-fp-math=" << Known.Mode;
9006 if (Known.ModeF32.isValid())
9007 OS <<
" denormal-fp-math-f32=" << Known.ModeF32;
9013struct AADenormalFPMathFunction final : AADenormalFPMathImpl {
9014 AADenormalFPMathFunction(
const IRPosition &IRP, Attributor &
A)
9015 : AADenormalFPMathImpl(IRP,
A) {}
9019 DenormalMode
Mode =
F->getDenormalModeRaw();
9020 DenormalMode ModeF32 =
F->getDenormalModeF32Raw();
9027 Known = DenormalState{
Mode, ModeF32};
9035 auto CheckCallSite = [=, &Change, &
A](AbstractCallSite CS) {
9038 <<
"->" << getAssociatedFunction()->
getName() <<
'\n');
9040 const auto *CallerInfo =
A.getAAFor<AADenormalFPMath>(
9046 CallerInfo->getState());
9050 bool AllCallSitesKnown =
true;
9051 if (!
A.checkForAllCallSites(CheckCallSite, *
this,
true, AllCallSitesKnown))
9052 return indicatePessimisticFixpoint();
9054 if (Change == ChangeStatus::CHANGED && isModeFixed())
9060 LLVMContext &Ctx = getAssociatedFunction()->getContext();
9065 AttrToRemove.
push_back(
"denormal-fp-math");
9068 Attribute::get(Ctx,
"denormal-fp-math", Known.Mode.str()));
9071 if (Known.ModeF32 != Known.Mode) {
9073 Attribute::get(Ctx,
"denormal-fp-math-f32", Known.ModeF32.str()));
9075 AttrToRemove.
push_back(
"denormal-fp-math-f32");
9078 auto &IRP = getIRPosition();
9081 return A.removeAttrs(IRP, AttrToRemove) |
9082 A.manifestAttrs(IRP, AttrToAdd,
true);
9085 void trackStatistics()
const override {
9094struct AAValueConstantRangeImpl : AAValueConstantRange {
9095 using StateType = IntegerRangeState;
9096 AAValueConstantRangeImpl(
const IRPosition &IRP, Attributor &
A)
9097 : AAValueConstantRange(IRP,
A) {}
9101 if (
A.hasSimplificationCallback(getIRPosition())) {
9102 indicatePessimisticFixpoint();
9107 intersectKnown(getConstantRangeFromSCEV(
A, getCtxI()));
9110 intersectKnown(getConstantRangeFromLVI(
A, getCtxI()));
9114 const std::string getAsStr(Attributor *
A)
const override {
9116 llvm::raw_string_ostream OS(Str);
9118 getKnown().print(OS);
9120 getAssumed().print(OS);
9127 const SCEV *getSCEV(Attributor &
A,
const Instruction *
I =
nullptr)
const {
9128 if (!getAnchorScope())
9131 ScalarEvolution *SE =
9132 A.getInfoCache().getAnalysisResultForFunction<ScalarEvolutionAnalysis>(
9135 LoopInfo *LI =
A.getInfoCache().getAnalysisResultForFunction<LoopAnalysis>(
9141 const SCEV *S = SE->
getSCEV(&getAssociatedValue());
9150 ConstantRange getConstantRangeFromSCEV(Attributor &
A,
9151 const Instruction *
I =
nullptr)
const {
9152 if (!getAnchorScope())
9155 ScalarEvolution *SE =
9156 A.getInfoCache().getAnalysisResultForFunction<ScalarEvolutionAnalysis>(
9159 const SCEV *S = getSCEV(
A,
I);
9169 getConstantRangeFromLVI(Attributor &
A,
9170 const Instruction *CtxI =
nullptr)
const {
9171 if (!getAnchorScope())
9174 LazyValueInfo *LVI =
9175 A.getInfoCache().getAnalysisResultForFunction<LazyValueAnalysis>(
9190 bool isValidCtxInstructionForOutsideAnalysis(Attributor &
A,
9191 const Instruction *CtxI,
9192 bool AllowAACtxI)
const {
9193 if (!CtxI || (!AllowAACtxI && CtxI == getCtxI()))
9205 InformationCache &InfoCache =
A.getInfoCache();
9206 const DominatorTree *DT =
9217 getKnownConstantRange(Attributor &
A,
9218 const Instruction *CtxI =
nullptr)
const override {
9219 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9223 ConstantRange LVIR = getConstantRangeFromLVI(
A, CtxI);
9224 ConstantRange SCEVR = getConstantRangeFromSCEV(
A, CtxI);
9225 return getKnown().intersectWith(SCEVR).intersectWith(LVIR);
9230 getAssumedConstantRange(Attributor &
A,
9231 const Instruction *CtxI =
nullptr)
const override {
9236 if (!isValidCtxInstructionForOutsideAnalysis(
A, CtxI,
9238 return getAssumed();
9240 ConstantRange LVIR = getConstantRangeFromLVI(
A, CtxI);
9241 ConstantRange SCEVR = getConstantRangeFromSCEV(
A, CtxI);
9242 return getAssumed().intersectWith(SCEVR).intersectWith(LVIR);
9247 getMDNodeForConstantRange(
Type *Ty, LLVMContext &Ctx,
9248 const ConstantRange &AssumedConstantRange) {
9250 Ty, AssumedConstantRange.
getLower())),
9252 Ty, AssumedConstantRange.
getUpper()))};
9257 static bool isBetterRange(
const ConstantRange &Assumed,
9258 const Instruction &
I) {
9262 std::optional<ConstantRange> Known;
9266 }
else if (MDNode *KnownRanges =
I.getMetadata(LLVMContext::MD_range)) {
9272 if (KnownRanges->getNumOperands() > 2)
9275 ConstantInt *
Lower =
9277 ConstantInt *
Upper =
9280 Known.emplace(
Lower->getValue(),
Upper->getValue());
9282 return !Known || (*Known != Assumed && Known->contains(Assumed));
9287 setRangeMetadataIfisBetterRange(Instruction *
I,
9288 const ConstantRange &AssumedConstantRange) {
9289 if (isBetterRange(AssumedConstantRange, *
I)) {
9290 I->setMetadata(LLVMContext::MD_range,
9291 getMDNodeForConstantRange(
I->getType(),
I->getContext(),
9292 AssumedConstantRange));
9299 setRangeRetAttrIfisBetterRange(Attributor &
A,
const IRPosition &IRP,
9301 const ConstantRange &AssumedConstantRange) {
9302 if (isBetterRange(AssumedConstantRange, *
I)) {
9303 A.manifestAttrs(IRP,
9304 Attribute::get(
I->getContext(), Attribute::Range,
9305 AssumedConstantRange),
9315 ConstantRange AssumedConstantRange = getAssumedConstantRange(
A);
9318 auto &
V = getAssociatedValue();
9322 assert(
I == getCtxI() &&
"Should not annotate an instruction which is "
9323 "not the context instruction");
9325 if (setRangeMetadataIfisBetterRange(
I, AssumedConstantRange))
9326 Changed = ChangeStatus::CHANGED;
9328 if (setRangeRetAttrIfisBetterRange(
A, getIRPosition(),
I,
9329 AssumedConstantRange))
9330 Changed = ChangeStatus::CHANGED;
9338struct AAValueConstantRangeArgument final
9339 : AAArgumentFromCallSiteArguments<
9340 AAValueConstantRange, AAValueConstantRangeImpl, IntegerRangeState,
9342 using Base = AAArgumentFromCallSiteArguments<
9343 AAValueConstantRange, AAValueConstantRangeImpl, IntegerRangeState,
9345 AAValueConstantRangeArgument(
const IRPosition &IRP, Attributor &
A)
9349 void trackStatistics()
const override {
9354struct AAValueConstantRangeReturned
9355 : AAReturnedFromReturnedValues<AAValueConstantRange,
9356 AAValueConstantRangeImpl,
9357 AAValueConstantRangeImpl::StateType,
9360 AAReturnedFromReturnedValues<AAValueConstantRange,
9361 AAValueConstantRangeImpl,
9362 AAValueConstantRangeImpl::StateType,
9364 AAValueConstantRangeReturned(
const IRPosition &IRP, Attributor &
A)
9369 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9370 indicatePessimisticFixpoint();
9374 void trackStatistics()
const override {
9379struct AAValueConstantRangeFloating : AAValueConstantRangeImpl {
9380 AAValueConstantRangeFloating(
const IRPosition &IRP, Attributor &
A)
9381 : AAValueConstantRangeImpl(IRP,
A) {}
9385 AAValueConstantRangeImpl::initialize(
A);
9389 Value &
V = getAssociatedValue();
9392 unionAssumed(ConstantRange(
C->getValue()));
9393 indicateOptimisticFixpoint();
9399 unionAssumed(ConstantRange(APInt(
getBitWidth(), 0)));
9400 indicateOptimisticFixpoint();
9412 if (
auto *RangeMD = LI->getMetadata(LLVMContext::MD_range)) {
9423 indicatePessimisticFixpoint();
9426 << getAssociatedValue() <<
"\n");
9429 bool calculateBinaryOperator(
9430 Attributor &
A, BinaryOperator *BinOp, IntegerRangeState &
T,
9431 const Instruction *CtxI,
9432 SmallVectorImpl<const AAValueConstantRange *> &QuerriedAAs) {
9437 bool UsedAssumedInformation =
false;
9438 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9441 if (!SimplifiedLHS.has_value())
9443 if (!*SimplifiedLHS)
9445 LHS = *SimplifiedLHS;
9447 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9450 if (!SimplifiedRHS.has_value())
9452 if (!*SimplifiedRHS)
9454 RHS = *SimplifiedRHS;
9460 auto *LHSAA =
A.getAAFor<AAValueConstantRange>(
9462 DepClassTy::REQUIRED);
9466 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9468 auto *RHSAA =
A.getAAFor<AAValueConstantRange>(
9470 DepClassTy::REQUIRED);
9474 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9476 auto AssumedRange = LHSAARange.binaryOp(BinOp->
getOpcode(), RHSAARange);
9478 T.unionAssumed(AssumedRange);
9482 return T.isValidState();
9485 bool calculateCastInst(
9486 Attributor &
A, CastInst *CastI, IntegerRangeState &
T,
9487 const Instruction *CtxI,
9488 SmallVectorImpl<const AAValueConstantRange *> &QuerriedAAs) {
9494 bool UsedAssumedInformation =
false;
9495 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9498 if (!SimplifiedOpV.has_value())
9500 if (!*SimplifiedOpV)
9502 OpV = *SimplifiedOpV;
9507 auto *OpAA =
A.getAAFor<AAValueConstantRange>(
9509 DepClassTy::REQUIRED);
9513 T.unionAssumed(OpAA->getAssumed().castOp(CastI->
getOpcode(),
9515 return T.isValidState();
9519 calculateCmpInst(Attributor &
A, CmpInst *CmpI, IntegerRangeState &
T,
9520 const Instruction *CtxI,
9521 SmallVectorImpl<const AAValueConstantRange *> &QuerriedAAs) {
9526 bool UsedAssumedInformation =
false;
9527 const auto &SimplifiedLHS =
A.getAssumedSimplified(
9530 if (!SimplifiedLHS.has_value())
9532 if (!*SimplifiedLHS)
9534 LHS = *SimplifiedLHS;
9536 const auto &SimplifiedRHS =
A.getAssumedSimplified(
9539 if (!SimplifiedRHS.has_value())
9541 if (!*SimplifiedRHS)
9543 RHS = *SimplifiedRHS;
9549 auto *LHSAA =
A.getAAFor<AAValueConstantRange>(
9551 DepClassTy::REQUIRED);
9555 auto *RHSAA =
A.getAAFor<AAValueConstantRange>(
9557 DepClassTy::REQUIRED);
9561 auto LHSAARange = LHSAA->getAssumedConstantRange(
A, CtxI);
9562 auto RHSAARange = RHSAA->getAssumedConstantRange(
A, CtxI);
9565 if (LHSAARange.isEmptySet() || RHSAARange.isEmptySet())
9568 bool MustTrue =
false, MustFalse =
false;
9570 auto AllowedRegion =
9573 if (AllowedRegion.intersectWith(LHSAARange).isEmptySet())
9579 assert((!MustTrue || !MustFalse) &&
9580 "Either MustTrue or MustFalse should be false!");
9583 T.unionAssumed(ConstantRange(APInt( 1, 1)));
9585 T.unionAssumed(ConstantRange(APInt( 1, 0)));
9587 T.unionAssumed(ConstantRange( 1,
true));
9589 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] " << *CmpI <<
" after "
9590 << (MustTrue ?
"true" : (MustFalse ?
"false" :
"unknown"))
9591 <<
": " <<
T <<
"\n\t" << *LHSAA <<
"\t<op>\n\t"
9595 return T.isValidState();
9607 bool UsedAssumedInformation =
false;
9608 const auto &SimplifiedOpV =
A.getAssumedSimplified(
9611 if (!SimplifiedOpV.has_value())
9613 if (!*SimplifiedOpV)
9615 Value *VPtr = *SimplifiedOpV;
9618 const auto *AA =
A.getAAFor<AAValueConstantRange>(
9620 DepClassTy::REQUIRED);
9624 T.unionAssumed(AA->getAssumedConstantRange(
A, CtxI));
9628 return T.isValidState();
9633 if (!calculateBinaryOperator(
A, BinOp,
T, CtxI, QuerriedAAs))
9636 if (!calculateCmpInst(
A, CmpI,
T, CtxI, QuerriedAAs))
9639 if (!calculateCastInst(
A, CastI,
T, CtxI, QuerriedAAs))
9645 T.indicatePessimisticFixpoint();
9652 for (
const AAValueConstantRange *QueriedAA : QuerriedAAs) {
9653 if (QueriedAA !=
this)
9656 if (
T.getAssumed() == getState().getAssumed())
9658 T.indicatePessimisticFixpoint();
9661 return T.isValidState();
9664 if (!VisitValueCB(getAssociatedValue(), getCtxI()))
9665 return indicatePessimisticFixpoint();
9670 return ChangeStatus::UNCHANGED;
9671 if (++NumChanges > MaxNumChanges) {
9672 LLVM_DEBUG(
dbgs() <<
"[AAValueConstantRange] performed " << NumChanges
9673 <<
" but only " << MaxNumChanges
9674 <<
" are allowed to avoid cyclic reasoning.");
9675 return indicatePessimisticFixpoint();
9677 return ChangeStatus::CHANGED;
9681 void trackStatistics()
const override {
9690 static constexpr int MaxNumChanges = 5;
9693struct AAValueConstantRangeFunction : AAValueConstantRangeImpl {
9694 AAValueConstantRangeFunction(
const IRPosition &IRP, Attributor &
A)
9695 : AAValueConstantRangeImpl(IRP,
A) {}
9699 llvm_unreachable(
"AAValueConstantRange(Function|CallSite)::updateImpl will "
9707struct AAValueConstantRangeCallSite : AAValueConstantRangeFunction {
9708 AAValueConstantRangeCallSite(
const IRPosition &IRP, Attributor &
A)
9709 : AAValueConstantRangeFunction(IRP,
A) {}
9715struct AAValueConstantRangeCallSiteReturned
9716 : AACalleeToCallSite<AAValueConstantRange, AAValueConstantRangeImpl,
9717 AAValueConstantRangeImpl::StateType,
9719 AAValueConstantRangeCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
9720 : AACalleeToCallSite<AAValueConstantRange, AAValueConstantRangeImpl,
9721 AAValueConstantRangeImpl::StateType,
9728 if (std::optional<ConstantRange>
Range = CI->getRange())
9729 intersectKnown(*
Range);
9732 AAValueConstantRangeImpl::initialize(
A);
9736 void trackStatistics()
const override {
9740struct AAValueConstantRangeCallSiteArgument : AAValueConstantRangeFloating {
9741 AAValueConstantRangeCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
9742 : AAValueConstantRangeFloating(IRP,
A) {}
9746 return ChangeStatus::UNCHANGED;
9750 void trackStatistics()
const override {
9759struct AAPotentialConstantValuesImpl : AAPotentialConstantValues {
9762 AAPotentialConstantValuesImpl(
const IRPosition &IRP, Attributor &
A)
9763 : AAPotentialConstantValues(IRP,
A) {}
9767 if (
A.hasSimplificationCallback(getIRPosition()))
9768 indicatePessimisticFixpoint();
9770 AAPotentialConstantValues::initialize(
A);
9773 bool fillSetWithConstantValues(Attributor &
A,
const IRPosition &IRP, SetTy &S,
9774 bool &ContainsUndef,
bool ForSelf) {
9776 bool UsedAssumedInformation =
false;
9778 UsedAssumedInformation)) {
9785 auto *PotentialValuesAA =
A.getAAFor<AAPotentialConstantValues>(
9786 *
this, IRP, DepClassTy::REQUIRED);
9787 if (!PotentialValuesAA || !PotentialValuesAA->getState().isValidState())
9789 ContainsUndef = PotentialValuesAA->getState().undefIsContained();
9790 S = PotentialValuesAA->getState().getAssumedSet();
9797 ContainsUndef =
false;
9798 for (
auto &It : Values) {
9800 ContainsUndef =
true;
9806 S.insert(CI->getValue());
9808 ContainsUndef &= S.empty();
9814 const std::string getAsStr(Attributor *
A)
const override {
9816 llvm::raw_string_ostream OS(Str);
9823 return indicatePessimisticFixpoint();
9827struct AAPotentialConstantValuesArgument final
9828 : AAArgumentFromCallSiteArguments<AAPotentialConstantValues,
9829 AAPotentialConstantValuesImpl,
9830 PotentialConstantIntValuesState> {
9831 using Base = AAArgumentFromCallSiteArguments<AAPotentialConstantValues,
9832 AAPotentialConstantValuesImpl,
9834 AAPotentialConstantValuesArgument(
const IRPosition &IRP, Attributor &
A)
9838 void trackStatistics()
const override {
9843struct AAPotentialConstantValuesReturned
9844 : AAReturnedFromReturnedValues<AAPotentialConstantValues,
9845 AAPotentialConstantValuesImpl> {
9846 using Base = AAReturnedFromReturnedValues<AAPotentialConstantValues,
9847 AAPotentialConstantValuesImpl>;
9848 AAPotentialConstantValuesReturned(
const IRPosition &IRP, Attributor &
A)
9852 if (!
A.isFunctionIPOAmendable(*getAssociatedFunction()))
9853 indicatePessimisticFixpoint();
9854 Base::initialize(
A);
9858 void trackStatistics()
const override {
9863struct AAPotentialConstantValuesFloating : AAPotentialConstantValuesImpl {
9864 AAPotentialConstantValuesFloating(
const IRPosition &IRP, Attributor &
A)
9865 : AAPotentialConstantValuesImpl(IRP,
A) {}
9869 AAPotentialConstantValuesImpl::initialize(
A);
9873 Value &
V = getAssociatedValue();
9876 unionAssumed(
C->getValue());
9877 indicateOptimisticFixpoint();
9882 unionAssumedWithUndef();
9883 indicateOptimisticFixpoint();
9893 indicatePessimisticFixpoint();
9896 << getAssociatedValue() <<
"\n");
9899 static bool calculateICmpInst(
const ICmpInst *ICI,
const APInt &
LHS,
9904 static APInt calculateCastInst(
const CastInst *CI,
const APInt &Src,
9905 uint32_t ResultBitWidth) {
9910 case Instruction::Trunc:
9911 return Src.trunc(ResultBitWidth);
9912 case Instruction::SExt:
9913 return Src.sext(ResultBitWidth);
9914 case Instruction::ZExt:
9915 return Src.zext(ResultBitWidth);
9916 case Instruction::BitCast:
9921 static APInt calculateBinaryOperator(
const BinaryOperator *BinOp,
9922 const APInt &
LHS,
const APInt &
RHS,
9923 bool &SkipOperation,
bool &Unsupported) {
9930 switch (BinOpcode) {
9934 case Instruction::Add:
9936 case Instruction::Sub:
9938 case Instruction::Mul:
9940 case Instruction::UDiv:
9942 SkipOperation =
true;
9946 case Instruction::SDiv:
9948 SkipOperation =
true;
9952 case Instruction::URem:
9954 SkipOperation =
true;
9958 case Instruction::SRem:
9960 SkipOperation =
true;
9964 case Instruction::Shl:
9966 case Instruction::LShr:
9968 case Instruction::AShr:
9970 case Instruction::And:
9972 case Instruction::Or:
9974 case Instruction::Xor:
9979 bool calculateBinaryOperatorAndTakeUnion(
const BinaryOperator *BinOp,
9980 const APInt &
LHS,
const APInt &
RHS) {
9981 bool SkipOperation =
false;
9984 calculateBinaryOperator(BinOp,
LHS,
RHS, SkipOperation, Unsupported);
9989 unionAssumed(Result);
9990 return isValidState();
9993 ChangeStatus updateWithICmpInst(Attributor &
A, ICmpInst *ICI) {
9994 auto AssumedBefore = getAssumed();
9998 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
9999 SetTy LHSAAPVS, RHSAAPVS;
10001 LHSContainsUndef,
false) ||
10003 RHSContainsUndef,
false))
10004 return indicatePessimisticFixpoint();
10007 bool MaybeTrue =
false, MaybeFalse =
false;
10009 if (LHSContainsUndef && RHSContainsUndef) {
10012 unionAssumedWithUndef();
10013 }
else if (LHSContainsUndef) {
10014 for (
const APInt &R : RHSAAPVS) {
10015 bool CmpResult = calculateICmpInst(ICI, Zero, R);
10016 MaybeTrue |= CmpResult;
10017 MaybeFalse |= !CmpResult;
10018 if (MaybeTrue & MaybeFalse)
10019 return indicatePessimisticFixpoint();
10021 }
else if (RHSContainsUndef) {
10022 for (
const APInt &L : LHSAAPVS) {
10023 bool CmpResult = calculateICmpInst(ICI, L, Zero);
10024 MaybeTrue |= CmpResult;
10025 MaybeFalse |= !CmpResult;
10026 if (MaybeTrue & MaybeFalse)
10027 return indicatePessimisticFixpoint();
10030 for (
const APInt &L : LHSAAPVS) {
10031 for (
const APInt &R : RHSAAPVS) {
10032 bool CmpResult = calculateICmpInst(ICI, L, R);
10033 MaybeTrue |= CmpResult;
10034 MaybeFalse |= !CmpResult;
10035 if (MaybeTrue & MaybeFalse)
10036 return indicatePessimisticFixpoint();
10041 unionAssumed(APInt( 1, 1));
10043 unionAssumed(APInt( 1, 0));
10044 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10045 : ChangeStatus::CHANGED;
10048 ChangeStatus updateWithSelectInst(Attributor &
A, SelectInst *SI) {
10049 auto AssumedBefore = getAssumed();
10053 bool UsedAssumedInformation =
false;
10054 std::optional<Constant *>
C =
A.getAssumedConstant(
10055 *
SI->getCondition(), *
this, UsedAssumedInformation);
10058 bool OnlyLeft =
false, OnlyRight =
false;
10059 if (
C && *
C && (*C)->isOneValue())
10061 else if (
C && *
C && (*C)->isZeroValue())
10064 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
10065 SetTy LHSAAPVS, RHSAAPVS;
10068 LHSContainsUndef,
false))
10069 return indicatePessimisticFixpoint();
10073 RHSContainsUndef,
false))
10074 return indicatePessimisticFixpoint();
10076 if (OnlyLeft || OnlyRight) {
10078 auto *OpAA = OnlyLeft ? &LHSAAPVS : &RHSAAPVS;
10079 auto Undef = OnlyLeft ? LHSContainsUndef : RHSContainsUndef;
10082 unionAssumedWithUndef();
10084 for (
const auto &It : *OpAA)
10088 }
else if (LHSContainsUndef && RHSContainsUndef) {
10090 unionAssumedWithUndef();
10092 for (
const auto &It : LHSAAPVS)
10094 for (
const auto &It : RHSAAPVS)
10097 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10098 : ChangeStatus::CHANGED;
10101 ChangeStatus updateWithCastInst(Attributor &
A, CastInst *CI) {
10102 auto AssumedBefore = getAssumed();
10104 return indicatePessimisticFixpoint();
10109 bool SrcContainsUndef =
false;
10112 SrcContainsUndef,
false))
10113 return indicatePessimisticFixpoint();
10115 if (SrcContainsUndef)
10116 unionAssumedWithUndef();
10118 for (
const APInt &S : SrcPVS) {
10119 APInt
T = calculateCastInst(CI, S, ResultBitWidth);
10123 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10124 : ChangeStatus::CHANGED;
10127 ChangeStatus updateWithBinaryOperator(Attributor &
A, BinaryOperator *BinOp) {
10128 auto AssumedBefore = getAssumed();
10132 bool LHSContainsUndef =
false, RHSContainsUndef =
false;
10133 SetTy LHSAAPVS, RHSAAPVS;
10135 LHSContainsUndef,
false) ||
10137 RHSContainsUndef,
false))
10138 return indicatePessimisticFixpoint();
10143 if (LHSContainsUndef && RHSContainsUndef) {
10144 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, Zero))
10145 return indicatePessimisticFixpoint();
10146 }
else if (LHSContainsUndef) {
10147 for (
const APInt &R : RHSAAPVS) {
10148 if (!calculateBinaryOperatorAndTakeUnion(BinOp, Zero, R))
10149 return indicatePessimisticFixpoint();
10151 }
else if (RHSContainsUndef) {
10152 for (
const APInt &L : LHSAAPVS) {
10153 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, Zero))
10154 return indicatePessimisticFixpoint();
10157 for (
const APInt &L : LHSAAPVS) {
10158 for (
const APInt &R : RHSAAPVS) {
10159 if (!calculateBinaryOperatorAndTakeUnion(BinOp, L, R))
10160 return indicatePessimisticFixpoint();
10164 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10165 : ChangeStatus::CHANGED;
10168 ChangeStatus updateWithInstruction(Attributor &
A, Instruction *Inst) {
10169 auto AssumedBefore = getAssumed();
10171 bool ContainsUndef;
10173 ContainsUndef,
true))
10174 return indicatePessimisticFixpoint();
10175 if (ContainsUndef) {
10176 unionAssumedWithUndef();
10178 for (
const auto &It : Incoming)
10181 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10182 : ChangeStatus::CHANGED;
10187 Value &
V = getAssociatedValue();
10191 return updateWithICmpInst(
A, ICI);
10194 return updateWithSelectInst(
A, SI);
10197 return updateWithCastInst(
A, CI);
10200 return updateWithBinaryOperator(
A, BinOp);
10203 return updateWithInstruction(
A,
I);
10205 return indicatePessimisticFixpoint();
10209 void trackStatistics()
const override {
10214struct AAPotentialConstantValuesFunction : AAPotentialConstantValuesImpl {
10215 AAPotentialConstantValuesFunction(
const IRPosition &IRP, Attributor &
A)
10216 : AAPotentialConstantValuesImpl(IRP,
A) {}
10221 "AAPotentialConstantValues(Function|CallSite)::updateImpl will "
10226 void trackStatistics()
const override {
10231struct AAPotentialConstantValuesCallSite : AAPotentialConstantValuesFunction {
10232 AAPotentialConstantValuesCallSite(
const IRPosition &IRP, Attributor &
A)
10233 : AAPotentialConstantValuesFunction(IRP,
A) {}
10236 void trackStatistics()
const override {
10241struct AAPotentialConstantValuesCallSiteReturned
10242 : AACalleeToCallSite<AAPotentialConstantValues,
10243 AAPotentialConstantValuesImpl> {
10244 AAPotentialConstantValuesCallSiteReturned(
const IRPosition &IRP,
10246 : AACalleeToCallSite<AAPotentialConstantValues,
10247 AAPotentialConstantValuesImpl>(IRP,
A) {}
10250 void trackStatistics()
const override {
10255struct AAPotentialConstantValuesCallSiteArgument
10256 : AAPotentialConstantValuesFloating {
10257 AAPotentialConstantValuesCallSiteArgument(
const IRPosition &IRP,
10259 : AAPotentialConstantValuesFloating(IRP,
A) {}
10263 AAPotentialConstantValuesImpl::initialize(
A);
10264 if (isAtFixpoint())
10267 Value &
V = getAssociatedValue();
10270 unionAssumed(
C->getValue());
10271 indicateOptimisticFixpoint();
10276 unionAssumedWithUndef();
10277 indicateOptimisticFixpoint();
10284 Value &
V = getAssociatedValue();
10285 auto AssumedBefore = getAssumed();
10286 auto *AA =
A.getAAFor<AAPotentialConstantValues>(
10289 return indicatePessimisticFixpoint();
10290 const auto &S = AA->getAssumed();
10292 return AssumedBefore == getAssumed() ? ChangeStatus::UNCHANGED
10293 : ChangeStatus::CHANGED;
10297 void trackStatistics()
const override {
10306 bool IgnoreSubsumingPositions) {
10307 assert(ImpliedAttributeKind == Attribute::NoUndef &&
10308 "Unexpected attribute kind");
10309 if (
A.hasAttr(IRP, {Attribute::NoUndef}, IgnoreSubsumingPositions,
10310 Attribute::NoUndef))
10330 Value &V = getAssociatedValue();
10332 indicatePessimisticFixpoint();
10333 assert(!isImpliedByIR(
A, getIRPosition(), Attribute::NoUndef));
10337 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
10338 AANoUndef::StateType &State) {
10339 const Value *UseV =
U->get();
10340 const DominatorTree *DT =
nullptr;
10341 AssumptionCache *AC =
nullptr;
10342 InformationCache &InfoCache =
A.getInfoCache();
10343 if (Function *
F = getAnchorScope()) {
10348 bool TrackUse =
false;
10357 const std::string getAsStr(Attributor *
A)
const override {
10358 return getAssumed() ?
"noundef" :
"may-undef-or-poison";
10365 bool UsedAssumedInformation =
false;
10366 if (
A.isAssumedDead(getIRPosition(),
nullptr,
nullptr,
10367 UsedAssumedInformation))
10368 return ChangeStatus::UNCHANGED;
10372 if (!
A.getAssumedSimplified(getIRPosition(), *
this, UsedAssumedInformation,
10375 return ChangeStatus::UNCHANGED;
10376 return AANoUndef::manifest(
A);
10380struct AANoUndefFloating :
public AANoUndefImpl {
10381 AANoUndefFloating(
const IRPosition &IRP, Attributor &
A)
10382 : AANoUndefImpl(IRP,
A) {}
10386 AANoUndefImpl::initialize(
A);
10387 if (!getState().isAtFixpoint() && getAnchorScope() &&
10388 !getAnchorScope()->isDeclaration())
10389 if (Instruction *CtxI = getCtxI())
10390 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10395 auto VisitValueCB = [&](
const IRPosition &IRP) ->
bool {
10396 bool IsKnownNoUndef;
10398 A,
this, IRP, DepClassTy::REQUIRED, IsKnownNoUndef);
10402 bool UsedAssumedInformation =
false;
10403 Value *AssociatedValue = &getAssociatedValue();
10405 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10410 Values.
size() != 1 || Values.
front().getValue() != AssociatedValue;
10418 if (AVIRP == getIRPosition() || !VisitValueCB(AVIRP))
10419 return indicatePessimisticFixpoint();
10420 return ChangeStatus::UNCHANGED;
10423 for (
const auto &VAC : Values)
10425 return indicatePessimisticFixpoint();
10427 return ChangeStatus::UNCHANGED;
10434struct AANoUndefReturned final
10435 : AAReturnedFromReturnedValues<AANoUndef, AANoUndefImpl> {
10436 AANoUndefReturned(
const IRPosition &IRP, Attributor &
A)
10437 : AAReturnedFromReturnedValues<AANoUndef, AANoUndefImpl>(IRP,
A) {}
10443struct AANoUndefArgument final
10444 : AAArgumentFromCallSiteArguments<AANoUndef, AANoUndefImpl> {
10445 AANoUndefArgument(
const IRPosition &IRP, Attributor &
A)
10446 : AAArgumentFromCallSiteArguments<AANoUndef, AANoUndefImpl>(IRP,
A) {}
10452struct AANoUndefCallSiteArgument final : AANoUndefFloating {
10453 AANoUndefCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
10454 : AANoUndefFloating(IRP,
A) {}
10460struct AANoUndefCallSiteReturned final
10461 : AACalleeToCallSite<AANoUndef, AANoUndefImpl> {
10462 AANoUndefCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
10463 : AACalleeToCallSite<AANoUndef, AANoUndefImpl>(IRP,
A) {}
10471struct AANoFPClassImpl : AANoFPClass {
10472 AANoFPClassImpl(
const IRPosition &IRP, Attributor &
A) : AANoFPClass(IRP,
A) {}
10475 const IRPosition &IRP = getIRPosition();
10479 indicateOptimisticFixpoint();
10484 A.getAttrs(getIRPosition(), {Attribute::NoFPClass},
Attrs,
false);
10485 for (
const auto &Attr : Attrs) {
10492 const DataLayout &
DL =
A.getDataLayout();
10493 InformationCache &InfoCache =
A.getInfoCache();
10495 const DominatorTree *DT =
nullptr;
10496 AssumptionCache *AC =
nullptr;
10497 const TargetLibraryInfo *TLI =
nullptr;
10501 if (!
F->isDeclaration()) {
10508 SimplifyQuery Q(
DL, TLI, DT, AC, CtxI);
10515 followUsesInMBEC(*
this,
A, getState(), *CtxI);
10519 bool followUseInMBEC(Attributor &
A,
const Use *U,
const Instruction *
I,
10520 AANoFPClass::StateType &State) {
10531 if (
auto *NoFPAA =
A.getAAFor<AANoFPClass>(*
this, IRP, DepClassTy::NONE))
10532 State.addKnownBits(NoFPAA->getState().getKnown());
10536 const std::string getAsStr(Attributor *
A)
const override {
10537 std::string
Result =
"nofpclass";
10538 raw_string_ostream OS(Result);
10539 OS << getKnownNoFPClass() <<
'/' << getAssumedNoFPClass();
10543 void getDeducedAttributes(Attributor &
A, LLVMContext &Ctx,
10544 SmallVectorImpl<Attribute> &Attrs)
const override {
10545 Attrs.emplace_back(Attribute::getWithNoFPClass(Ctx, getAssumedNoFPClass()));
10549struct AANoFPClassFloating :
public AANoFPClassImpl {
10550 AANoFPClassFloating(
const IRPosition &IRP, Attributor &
A)
10551 : AANoFPClassImpl(IRP,
A) {}
10556 bool UsedAssumedInformation =
false;
10557 if (!
A.getAssumedSimplifiedValues(getIRPosition(), *
this, Values,
10559 Values.
push_back({getAssociatedValue(), getCtxI()});
10565 DepClassTy::REQUIRED);
10566 if (!AA ||
this == AA) {
10567 T.indicatePessimisticFixpoint();
10569 const AANoFPClass::StateType &S =
10570 static_cast<const AANoFPClass::StateType &
>(AA->
getState());
10573 return T.isValidState();
10576 for (
const auto &VAC : Values)
10578 return indicatePessimisticFixpoint();
10584 void trackStatistics()
const override {
10589struct AANoFPClassReturned final
10590 : AAReturnedFromReturnedValues<AANoFPClass, AANoFPClassImpl,
10591 AANoFPClassImpl::StateType, false,
10592 Attribute::None, false> {
10593 AANoFPClassReturned(
const IRPosition &IRP, Attributor &
A)
10594 : AAReturnedFromReturnedValues<AANoFPClass, AANoFPClassImpl,
10595 AANoFPClassImpl::StateType,
false,
10599 void trackStatistics()
const override {
10604struct AANoFPClassArgument final
10605 : AAArgumentFromCallSiteArguments<AANoFPClass, AANoFPClassImpl> {
10606 AANoFPClassArgument(
const IRPosition &IRP, Attributor &
A)
10607 : AAArgumentFromCallSiteArguments<AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10613struct AANoFPClassCallSiteArgument final : AANoFPClassFloating {
10614 AANoFPClassCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
10615 : AANoFPClassFloating(IRP,
A) {}
10618 void trackStatistics()
const override {
10623struct AANoFPClassCallSiteReturned final
10624 : AACalleeToCallSite<AANoFPClass, AANoFPClassImpl> {
10625 AANoFPClassCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
10626 : AACalleeToCallSite<AANoFPClass, AANoFPClassImpl>(IRP,
A) {}
10629 void trackStatistics()
const override {
10634struct AACallEdgesImpl :
public AACallEdges {
10635 AACallEdgesImpl(
const IRPosition &IRP, Attributor &
A) : AACallEdges(IRP,
A) {}
10637 const SetVector<Function *> &getOptimisticEdges()
const override {
10638 return CalledFunctions;
10641 bool hasUnknownCallee()
const override {
return HasUnknownCallee; }
10643 bool hasNonAsmUnknownCallee()
const override {
10644 return HasUnknownCalleeNonAsm;
10647 const std::string getAsStr(Attributor *
A)
const override {
10648 return "CallEdges[" + std::to_string(HasUnknownCallee) +
"," +
10649 std::to_string(CalledFunctions.size()) +
"]";
10652 void trackStatistics()
const override {}
10655 void addCalledFunction(Function *Fn,
ChangeStatus &Change) {
10656 if (CalledFunctions.insert(Fn)) {
10657 Change = ChangeStatus::CHANGED;
10663 void setHasUnknownCallee(
bool NonAsm,
ChangeStatus &Change) {
10664 if (!HasUnknownCallee)
10665 Change = ChangeStatus::CHANGED;
10666 if (NonAsm && !HasUnknownCalleeNonAsm)
10667 Change = ChangeStatus::CHANGED;
10668 HasUnknownCalleeNonAsm |= NonAsm;
10669 HasUnknownCallee =
true;
10674 SetVector<Function *> CalledFunctions;
10677 bool HasUnknownCallee =
false;
10680 bool HasUnknownCalleeNonAsm =
false;
10683struct AACallEdgesCallSite :
public AACallEdgesImpl {
10684 AACallEdgesCallSite(
const IRPosition &IRP, Attributor &
A)
10685 : AACallEdgesImpl(IRP,
A) {}
10692 addCalledFunction(Fn, Change);
10694 LLVM_DEBUG(
dbgs() <<
"[AACallEdges] Unrecognized value: " << V <<
"\n");
10695 setHasUnknownCallee(
true, Change);
10706 VisitValue(*V, CtxI);
10710 bool UsedAssumedInformation =
false;
10716 for (
auto &VAC : Values)
10723 if (
IA->hasSideEffects() &&
10726 setHasUnknownCallee(
false, Change);
10732 if (
auto *IndirectCallAA =
A.getAAFor<AAIndirectCallInfo>(
10733 *
this, getIRPosition(), DepClassTy::OPTIONAL))
10734 if (IndirectCallAA->foreachCallee(
10735 [&](Function *Fn) { return VisitValue(*Fn, CB); }))
10744 for (
const Use *U : CallbackUses)
10745 ProcessCalledOperand(
U->get(), CB);
10751struct AACallEdgesFunction :
public AACallEdgesImpl {
10752 AACallEdgesFunction(
const IRPosition &IRP, Attributor &
A)
10753 : AACallEdgesImpl(IRP,
A) {}
10762 auto *CBEdges =
A.getAAFor<AACallEdges>(
10766 if (CBEdges->hasNonAsmUnknownCallee())
10767 setHasUnknownCallee(
true, Change);
10768 if (CBEdges->hasUnknownCallee())
10769 setHasUnknownCallee(
false, Change);
10771 for (Function *
F : CBEdges->getOptimisticEdges())
10772 addCalledFunction(
F, Change);
10778 bool UsedAssumedInformation =
false;
10779 if (!
A.checkForAllCallLikeInstructions(ProcessCallInst, *
this,
10780 UsedAssumedInformation,
10784 setHasUnknownCallee(
true, Change);
10793struct AAInterFnReachabilityFunction
10794 :
public CachedReachabilityAA<AAInterFnReachability, Function> {
10795 using Base = CachedReachabilityAA<AAInterFnReachability, Function>;
10796 AAInterFnReachabilityFunction(
const IRPosition &IRP, Attributor &
A)
10799 bool instructionCanReach(
10800 Attributor &
A,
const Instruction &From,
const Function &To,
10803 auto *NonConstThis =
const_cast<AAInterFnReachabilityFunction *
>(
this);
10805 RQITy StackRQI(
A, From, To, ExclusionSet,
false);
10806 RQITy::Reachable
Result;
10807 if (!NonConstThis->checkQueryCache(
A, StackRQI, Result))
10808 return NonConstThis->isReachableImpl(
A, StackRQI,
10810 return Result == RQITy::Reachable::Yes;
10814 bool IsTemporaryRQI)
override {
10816 &RQI.From->getFunction()->getEntryBlock().front();
10817 if (EntryI != RQI.From &&
10818 !instructionCanReach(
A, *EntryI, *RQI.To,
nullptr))
10819 return rememberResult(
A, RQITy::Reachable::No, RQI,
false,
10822 auto CheckReachableCallBase = [&](CallBase *CB) {
10823 auto *CBEdges =
A.getAAFor<AACallEdges>(
10825 if (!CBEdges || !CBEdges->getState().isValidState())
10828 if (CBEdges->hasUnknownCallee())
10831 for (Function *Fn : CBEdges->getOptimisticEdges()) {
10842 if (Fn == getAnchorScope()) {
10843 if (EntryI == RQI.From)
10848 const AAInterFnReachability *InterFnReachability =
10850 DepClassTy::OPTIONAL);
10853 if (!InterFnReachability ||
10861 const auto *IntraFnReachability =
A.getAAFor<AAIntraFnReachability>(
10863 DepClassTy::OPTIONAL);
10871 return IntraFnReachability && !IntraFnReachability->isAssumedReachable(
10872 A, *RQI.From, CBInst, RQI.ExclusionSet);
10875 bool UsedExclusionSet =
true;
10876 bool UsedAssumedInformation =
false;
10877 if (!
A.checkForAllCallLikeInstructions(CheckCallBase, *
this,
10878 UsedAssumedInformation,
10880 return rememberResult(
A, RQITy::Reachable::Yes, RQI, UsedExclusionSet,
10883 return rememberResult(
A, RQITy::Reachable::No, RQI, UsedExclusionSet,
10887 void trackStatistics()
const override {}
10891template <
typename AAType>
10892static std::optional<Constant *>
10895 if (!Ty.isIntegerTy())
10903 std::optional<Constant *> COpt =
AA->getAssumedConstant(
A);
10905 if (!COpt.has_value()) {
10907 return std::nullopt;
10909 if (
auto *
C = *COpt) {
10920 std::optional<Value *> V;
10921 for (
auto &It : Values) {
10923 if (V.has_value() && !*V)
10926 if (!V.has_value())
10940 if (
A.hasSimplificationCallback(getIRPosition())) {
10941 indicatePessimisticFixpoint();
10944 Value *Stripped = getAssociatedValue().stripPointerCasts();
10946 addValue(
A, getState(), *Stripped, getCtxI(),
AA::AnyScope,
10948 indicateOptimisticFixpoint();
10951 AAPotentialValues::initialize(
A);
10955 const std::string getAsStr(Attributor *
A)
const override {
10957 llvm::raw_string_ostream OS(Str);
10962 template <
typename AAType>
10963 static std::optional<Value *> askOtherAA(Attributor &
A,
10964 const AbstractAttribute &AA,
10965 const IRPosition &IRP,
Type &Ty) {
10970 return std::nullopt;
10977 virtual void addValue(Attributor &
A, StateType &State,
Value &V,
10979 Function *AnchorScope)
const {
10983 for (
const auto &U : CB->
args()) {
10993 Type &Ty = *getAssociatedType();
10994 std::optional<Value *> SimpleV =
10995 askOtherAA<AAValueConstantRange>(
A, *
this, ValIRP, Ty);
10996 if (SimpleV.has_value() && !*SimpleV) {
10997 auto *PotentialConstantsAA =
A.getAAFor<AAPotentialConstantValues>(
10998 *
this, ValIRP, DepClassTy::OPTIONAL);
10999 if (PotentialConstantsAA && PotentialConstantsAA->isValidState()) {
11000 for (
const auto &It : PotentialConstantsAA->getAssumedSet())
11001 State.unionAssumed({{*ConstantInt::get(&Ty, It),
nullptr}, S});
11002 if (PotentialConstantsAA->undefIsContained())
11007 if (!SimpleV.has_value())
11019 State.unionAssumed({{*VPtr, CtxI}, S});
11025 AA::ValueAndContext
I;
11029 return II.I ==
I &&
II.S == S;
11032 return std::tie(
I, S) < std::tie(
II.I,
II.S);
11036 bool recurseForValue(Attributor &
A,
const IRPosition &IRP,
AA::ValueScope S) {
11037 SmallMapVector<AA::ValueAndContext, int, 8> ValueScopeMap;
11042 bool UsedAssumedInformation =
false;
11044 if (!
A.getAssumedSimplifiedValues(IRP,
this, Values, CS,
11045 UsedAssumedInformation))
11048 for (
auto &It : Values)
11049 ValueScopeMap[It] += CS;
11051 for (
auto &It : ValueScopeMap)
11052 addValue(
A, getState(), *It.first.getValue(), It.first.getCtxI(),
11058 void giveUpOnIntraprocedural(Attributor &
A) {
11059 auto NewS = StateType::getBestState(getState());
11060 for (
const auto &It : getAssumedSet()) {
11063 addValue(
A, NewS, *It.first.getValue(), It.first.getCtxI(),
11066 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11074 getState() = StateType::getBestState(getState());
11075 getState().unionAssumed({{getAssociatedValue(), getCtxI()},
AA::AnyScope});
11076 AAPotentialValues::indicateOptimisticFixpoint();
11077 return ChangeStatus::CHANGED;
11082 return indicatePessimisticFixpoint();
11090 if (!getAssumedSimplifiedValues(
A, Values, S))
11092 Value &OldV = getAssociatedValue();
11095 Value *NewV = getSingleValue(
A, *
this, getIRPosition(), Values);
11096 if (!NewV || NewV == &OldV)
11101 if (
A.changeAfterManifest(getIRPosition(), *NewV))
11102 return ChangeStatus::CHANGED;
11104 return ChangeStatus::UNCHANGED;
11107 bool getAssumedSimplifiedValues(
11108 Attributor &
A, SmallVectorImpl<AA::ValueAndContext> &Values,
11109 AA::ValueScope S,
bool RecurseForSelectAndPHI =
false)
const override {
11110 if (!isValidState())
11112 bool UsedAssumedInformation =
false;
11113 for (
const auto &It : getAssumedSet())
11114 if (It.second & S) {
11115 if (RecurseForSelectAndPHI && (
isa<PHINode>(It.first.getValue()) ||
11117 if (
A.getAssumedSimplifiedValues(
11119 this, Values, S, UsedAssumedInformation))
11124 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11129struct AAPotentialValuesFloating : AAPotentialValuesImpl {
11130 AAPotentialValuesFloating(
const IRPosition &IRP, Attributor &
A)
11131 : AAPotentialValuesImpl(IRP,
A) {}
11135 auto AssumedBefore = getAssumed();
11137 genericValueTraversal(
A, &getAssociatedValue());
11139 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11140 : ChangeStatus::CHANGED;
11144 struct LivenessInfo {
11145 const AAIsDead *LivenessAA =
nullptr;
11146 bool AnyDead =
false;
11156 SmallVectorImpl<ItemInfo> &Worklist) {
11159 bool UsedAssumedInformation =
false;
11161 auto GetSimplifiedValues = [&](
Value &
V,
11163 if (!
A.getAssumedSimplifiedValues(
11167 Values.
push_back(AA::ValueAndContext{
V,
II.I.getCtxI()});
11169 return Values.
empty();
11171 if (GetSimplifiedValues(*
LHS, LHSValues))
11173 if (GetSimplifiedValues(*
RHS, RHSValues))
11178 InformationCache &InfoCache =
A.getInfoCache();
11185 F ?
A.getInfoCache().getTargetLibraryInfoForFunction(*
F) :
nullptr;
11190 const DataLayout &
DL =
A.getDataLayout();
11191 SimplifyQuery Q(
DL, TLI, DT, AC, CmpI);
11193 auto CheckPair = [&](
Value &LHSV,
Value &RHSV) {
11196 nullptr,
II.S, getAnchorScope());
11202 if (&LHSV == &RHSV &&
11204 Constant *NewV = ConstantInt::get(Type::getInt1Ty(Ctx),
11206 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11213 if (TypedLHS && TypedRHS) {
11215 if (NewV && NewV != &Cmp) {
11216 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11228 if (!LHSIsNull && !RHSIsNull)
11234 assert((LHSIsNull || RHSIsNull) &&
11235 "Expected nullptr versus non-nullptr comparison at this point");
11238 unsigned PtrIdx = LHSIsNull;
11239 bool IsKnownNonNull;
11242 DepClassTy::REQUIRED, IsKnownNonNull);
11243 if (!IsAssumedNonNull)
11249 addValue(
A, getState(), *NewV,
nullptr,
II.S,
11254 for (
auto &LHSValue : LHSValues)
11255 for (
auto &RHSValue : RHSValues)
11256 if (!CheckPair(*LHSValue.getValue(), *RHSValue.getValue()))
11261 bool handleSelectInst(Attributor &
A, SelectInst &SI, ItemInfo
II,
11262 SmallVectorImpl<ItemInfo> &Worklist) {
11264 bool UsedAssumedInformation =
false;
11266 std::optional<Constant *>
C =
11267 A.getAssumedConstant(*
SI.getCondition(), *
this, UsedAssumedInformation);
11268 bool NoValueYet = !
C.has_value();
11276 }
else if (&SI == &getAssociatedValue()) {
11281 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11283 if (!SimpleV.has_value())
11286 addValue(
A, getState(), **SimpleV, CtxI,
II.S, getAnchorScope());
11294 bool handleLoadInst(Attributor &
A, LoadInst &LI, ItemInfo
II,
11295 SmallVectorImpl<ItemInfo> &Worklist) {
11296 SmallSetVector<Value *, 4> PotentialCopies;
11297 SmallSetVector<Instruction *, 4> PotentialValueOrigins;
11298 bool UsedAssumedInformation =
false;
11300 PotentialValueOrigins, *
this,
11301 UsedAssumedInformation,
11303 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Failed to get potentially "
11304 "loaded values for load instruction "
11312 InformationCache &InfoCache =
A.getInfoCache();
11314 if (!
llvm::all_of(PotentialValueOrigins, [&](Instruction *
I) {
11318 return A.isAssumedDead(
SI->getOperandUse(0),
this,
11320 UsedAssumedInformation,
11322 return A.isAssumedDead(*
I,
this,
nullptr,
11323 UsedAssumedInformation,
11326 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Load is onl used by assumes "
11327 "and we cannot delete all the stores: "
11338 bool AllLocal = ScopeIsLocal;
11343 if (!DynamicallyUnique) {
11344 LLVM_DEBUG(
dbgs() <<
"[AAPotentialValues] Not all potentially loaded "
11345 "values are dynamically unique: "
11350 for (
auto *PotentialCopy : PotentialCopies) {
11352 Worklist.
push_back({{*PotentialCopy, CtxI},
II.S});
11357 if (!AllLocal && ScopeIsLocal)
11362 bool handlePHINode(
11363 Attributor &
A, PHINode &
PHI, ItemInfo
II,
11364 SmallVectorImpl<ItemInfo> &Worklist,
11365 SmallMapVector<const Function *, LivenessInfo, 4> &LivenessAAs) {
11366 auto GetLivenessInfo = [&](
const Function &
F) -> LivenessInfo & {
11367 LivenessInfo &LI = LivenessAAs[&
F];
11368 if (!LI.LivenessAA)
11374 if (&
PHI == &getAssociatedValue()) {
11375 LivenessInfo &LI = GetLivenessInfo(*
PHI.getFunction());
11377 A.getInfoCache().getAnalysisResultForFunction<CycleAnalysis>(
11378 *
PHI.getFunction());
11382 for (
unsigned u = 0, e =
PHI.getNumIncomingValues(); u < e; u++) {
11384 if (LI.LivenessAA &&
11385 LI.LivenessAA->isEdgeDead(IncomingBB,
PHI.getParent())) {
11404 bool UsedAssumedInformation =
false;
11405 std::optional<Value *> SimpleV =
A.getAssumedSimplified(
11407 if (!SimpleV.has_value())
11411 addValue(
A, getState(), **SimpleV, &
PHI,
II.S, getAnchorScope());
11418 bool handleGenericInst(Attributor &
A, Instruction &
I, ItemInfo
II,
11419 SmallVectorImpl<ItemInfo> &Worklist) {
11420 bool SomeSimplified =
false;
11421 bool UsedAssumedInformation =
false;
11423 SmallVector<Value *, 8> NewOps(
I.getNumOperands());
11426 const auto &SimplifiedOp =
A.getAssumedSimplified(
11431 if (!SimplifiedOp.has_value())
11435 NewOps[Idx] = *SimplifiedOp;
11439 SomeSimplified |= (NewOps[Idx] !=
Op);
11445 if (!SomeSimplified)
11448 InformationCache &InfoCache =
A.getInfoCache();
11452 const auto *TLI =
A.getInfoCache().getTargetLibraryInfoForFunction(*
F);
11455 const DataLayout &
DL =
I.getDataLayout();
11456 SimplifyQuery Q(
DL, TLI, DT, AC, &
I);
11458 if (!NewV || NewV == &
I)
11461 LLVM_DEBUG(
dbgs() <<
"Generic inst " <<
I <<
" assumed simplified to "
11468 Attributor &
A, Instruction &
I, ItemInfo
II,
11469 SmallVectorImpl<ItemInfo> &Worklist,
11470 SmallMapVector<const Function *, LivenessInfo, 4> &LivenessAAs) {
11473 CI->getPredicate(),
II, Worklist);
11475 switch (
I.getOpcode()) {
11476 case Instruction::Select:
11478 case Instruction::PHI:
11480 case Instruction::Load:
11483 return handleGenericInst(
A,
I,
II, Worklist);
11488 void genericValueTraversal(Attributor &
A,
Value *InitialV) {
11489 SmallMapVector<const Function *, LivenessInfo, 4> LivenessAAs;
11491 SmallSet<ItemInfo, 16> Visited;
11510 LLVM_DEBUG(
dbgs() <<
"Generic value traversal reached iteration limit: "
11511 << Iteration <<
"!\n");
11512 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11518 Value *NewV =
nullptr;
11519 if (
V->getType()->isPointerTy()) {
11525 for (Argument &Arg :
Callee->args())
11532 if (NewV && NewV != V) {
11533 Worklist.
push_back({{*NewV, CtxI}, S});
11547 if (V == InitialV && CtxI == getCtxI()) {
11548 indicatePessimisticFixpoint();
11552 addValue(
A, getState(), *V, CtxI, S, getAnchorScope());
11553 }
while (!Worklist.
empty());
11557 for (
auto &It : LivenessAAs)
11558 if (It.second.AnyDead)
11559 A.recordDependence(*It.second.LivenessAA, *
this, DepClassTy::OPTIONAL);
11563 void trackStatistics()
const override {
11568struct AAPotentialValuesArgument final : AAPotentialValuesImpl {
11569 using Base = AAPotentialValuesImpl;
11570 AAPotentialValuesArgument(
const IRPosition &IRP, Attributor &
A)
11577 indicatePessimisticFixpoint();
11582 auto AssumedBefore = getAssumed();
11584 unsigned ArgNo = getCalleeArgNo();
11586 bool UsedAssumedInformation =
false;
11588 auto CallSitePred = [&](AbstractCallSite ACS) {
11590 if (CSArgIRP.getPositionKind() == IRP_INVALID)
11593 if (!
A.getAssumedSimplifiedValues(CSArgIRP,
this, Values,
11595 UsedAssumedInformation))
11598 return isValidState();
11601 if (!
A.checkForAllCallSites(CallSitePred, *
this,
11603 UsedAssumedInformation))
11604 return indicatePessimisticFixpoint();
11606 Function *Fn = getAssociatedFunction();
11607 bool AnyNonLocal =
false;
11608 for (
auto &It : Values) {
11610 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11615 return indicatePessimisticFixpoint();
11619 addValue(
A, getState(), *It.getValue(), It.getCtxI(),
AA::AnyScope,
11625 AnyNonLocal =
true;
11627 assert(!undefIsContained() &&
"Undef should be an explicit value!");
11629 giveUpOnIntraprocedural(
A);
11631 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11632 : ChangeStatus::CHANGED;
11636 void trackStatistics()
const override {
11641struct AAPotentialValuesReturned :
public AAPotentialValuesFloating {
11642 using Base = AAPotentialValuesFloating;
11643 AAPotentialValuesReturned(
const IRPosition &IRP, Attributor &
A)
11649 if (!
F ||
F->isDeclaration() ||
F->getReturnType()->isVoidTy()) {
11650 indicatePessimisticFixpoint();
11654 for (Argument &Arg :
F->args())
11657 ReturnedArg = &Arg;
11660 if (!
A.isFunctionIPOAmendable(*
F) ||
11661 A.hasSimplificationCallback(getIRPosition())) {
11663 indicatePessimisticFixpoint();
11665 indicateOptimisticFixpoint();
11671 auto AssumedBefore = getAssumed();
11672 bool UsedAssumedInformation =
false;
11675 Function *AnchorScope = getAnchorScope();
11681 UsedAssumedInformation,
11687 bool AllInterAreIntra =
false;
11690 llvm::all_of(Values, [&](
const AA::ValueAndContext &VAC) {
11694 for (
const AA::ValueAndContext &VAC : Values) {
11695 addValue(
A, getState(), *VAC.
getValue(),
11699 if (AllInterAreIntra)
11706 HandleReturnedValue(*ReturnedArg,
nullptr,
true);
11709 bool AddValues =
true;
11712 addValue(
A, getState(), *RetI.getOperand(0), &RetI,
AA::AnyScope,
11716 return HandleReturnedValue(*RetI.getOperand(0), &RetI, AddValues);
11719 if (!
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11720 UsedAssumedInformation,
11722 return indicatePessimisticFixpoint();
11725 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11726 : ChangeStatus::CHANGED;
11731 return ChangeStatus::UNCHANGED;
11733 if (!getAssumedSimplifiedValues(
A, Values, AA::ValueScope::Intraprocedural,
11735 return ChangeStatus::UNCHANGED;
11736 Value *NewVal = getSingleValue(
A, *
this, getIRPosition(), Values);
11738 return ChangeStatus::UNCHANGED;
11743 "Number of function with unique return");
11746 {Attribute::get(Arg->
getContext(), Attribute::Returned)});
11751 Value *RetOp = RetI.getOperand(0);
11755 if (
A.changeUseAfterManifest(RetI.getOperandUse(0), *NewVal))
11756 Changed = ChangeStatus::CHANGED;
11759 bool UsedAssumedInformation =
false;
11760 (void)
A.checkForAllInstructions(RetInstPred, *
this, {Instruction::Ret},
11761 UsedAssumedInformation,
11767 return AAPotentialValues::indicatePessimisticFixpoint();
11771 void trackStatistics()
const override{
11778struct AAPotentialValuesFunction : AAPotentialValuesImpl {
11779 AAPotentialValuesFunction(
const IRPosition &IRP, Attributor &
A)
11780 : AAPotentialValuesImpl(IRP,
A) {}
11789 void trackStatistics()
const override {
11794struct AAPotentialValuesCallSite : AAPotentialValuesFunction {
11795 AAPotentialValuesCallSite(
const IRPosition &IRP, Attributor &
A)
11796 : AAPotentialValuesFunction(IRP,
A) {}
11799 void trackStatistics()
const override {
11804struct AAPotentialValuesCallSiteReturned : AAPotentialValuesImpl {
11805 AAPotentialValuesCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
11806 : AAPotentialValuesImpl(IRP,
A) {}
11810 auto AssumedBefore = getAssumed();
11814 return indicatePessimisticFixpoint();
11816 bool UsedAssumedInformation =
false;
11820 UsedAssumedInformation))
11821 return indicatePessimisticFixpoint();
11828 Values, S, UsedAssumedInformation))
11831 for (
auto &It : Values) {
11832 Value *
V = It.getValue();
11833 std::optional<Value *> CallerV =
A.translateArgumentToCallSiteContent(
11834 V, *CB, *
this, UsedAssumedInformation);
11835 if (!CallerV.has_value()) {
11839 V = *CallerV ? *CallerV :
V;
11845 giveUpOnIntraprocedural(
A);
11848 addValue(
A, getState(), *V, CB, S, getAnchorScope());
11853 return indicatePessimisticFixpoint();
11855 return indicatePessimisticFixpoint();
11856 return (AssumedBefore == getAssumed()) ? ChangeStatus::UNCHANGED
11857 : ChangeStatus::CHANGED;
11861 return AAPotentialValues::indicatePessimisticFixpoint();
11865 void trackStatistics()
const override {
11870struct AAPotentialValuesCallSiteArgument : AAPotentialValuesFloating {
11871 AAPotentialValuesCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
11872 : AAPotentialValuesFloating(IRP,
A) {}
11875 void trackStatistics()
const override {
11883struct AAAssumptionInfoImpl :
public AAAssumptionInfo {
11884 AAAssumptionInfoImpl(
const IRPosition &IRP, Attributor &
A,
11885 const DenseSet<StringRef> &Known)
11886 : AAAssumptionInfo(IRP,
A, Known) {}
11891 if (getKnown().isUniversal())
11892 return ChangeStatus::UNCHANGED;
11894 const IRPosition &IRP = getIRPosition();
11896 getAssumed().getSet().
end());
11898 return A.manifestAttrs(IRP,
11905 bool hasAssumption(
const StringRef Assumption)
const override {
11906 return isValidState() && setContains(Assumption);
11910 const std::string getAsStr(Attributor *
A)
const override {
11911 const SetContents &Known = getKnown();
11912 const SetContents &Assumed = getAssumed();
11916 const std::string KnownStr =
llvm::join(Set,
",");
11918 std::string AssumedStr =
"Universal";
11919 if (!Assumed.isUniversal()) {
11920 Set.assign(Assumed.getSet().begin(), Assumed.getSet().end());
11923 return "Known [" + KnownStr +
"]," +
" Assumed [" + AssumedStr +
"]";
11938struct AAAssumptionInfoFunction final : AAAssumptionInfoImpl {
11939 AAAssumptionInfoFunction(
const IRPosition &IRP, Attributor &
A)
11940 : AAAssumptionInfoImpl(IRP,
A,
11947 auto CallSitePred = [&](AbstractCallSite ACS) {
11948 const auto *AssumptionAA =
A.getAAFor<AAAssumptionInfo>(
11950 DepClassTy::REQUIRED);
11954 Changed |= getIntersection(AssumptionAA->getAssumed());
11955 return !getAssumed().empty() || !getKnown().empty();
11958 bool UsedAssumedInformation =
false;
11963 if (!
A.checkForAllCallSites(CallSitePred, *
this,
true,
11964 UsedAssumedInformation))
11965 return indicatePessimisticFixpoint();
11967 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11970 void trackStatistics()
const override {}
11974struct AAAssumptionInfoCallSite final : AAAssumptionInfoImpl {
11976 AAAssumptionInfoCallSite(
const IRPosition &IRP, Attributor &
A)
11977 : AAAssumptionInfoImpl(IRP,
A, getInitialAssumptions(IRP)) {}
11982 A.getAAFor<AAAssumptionInfo>(*
this, FnPos, DepClassTy::REQUIRED);
11988 auto *AssumptionAA =
11989 A.getAAFor<AAAssumptionInfo>(*
this, FnPos, DepClassTy::REQUIRED);
11991 return indicatePessimisticFixpoint();
11992 bool Changed = getIntersection(AssumptionAA->getAssumed());
11993 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
11997 void trackStatistics()
const override {}
12002 DenseSet<StringRef> getInitialAssumptions(
const IRPosition &IRP) {
12009 return Assumptions;
12024struct AAUnderlyingObjectsImpl
12030 const std::string getAsStr(
Attributor *
A)
const override {
12031 if (!isValidState())
12032 return "<invalid>";
12035 OS <<
"underlying objects: inter " << InterAssumedUnderlyingObjects.size()
12036 <<
" objects, intra " << IntraAssumedUnderlyingObjects.size()
12038 if (!InterAssumedUnderlyingObjects.empty()) {
12039 OS <<
"inter objects:\n";
12040 for (
auto *Obj : InterAssumedUnderlyingObjects)
12041 OS << *Obj <<
'\n';
12043 if (!IntraAssumedUnderlyingObjects.empty()) {
12044 OS <<
"intra objects:\n";
12045 for (
auto *Obj : IntraAssumedUnderlyingObjects)
12046 OS << *
Obj <<
'\n';
12052 void trackStatistics()
const override {}
12056 auto &Ptr = getAssociatedValue();
12058 bool UsedAssumedInformation =
false;
12059 auto DoUpdate = [&](SmallSetVector<Value *, 8> &UnderlyingObjects,
12061 SmallPtrSet<Value *, 8> SeenObjects;
12065 Scope, UsedAssumedInformation))
12066 return UnderlyingObjects.
insert(&Ptr);
12070 for (
unsigned I = 0;
I < Values.
size(); ++
I) {
12071 auto &VAC = Values[
I];
12074 if (!SeenObjects.
insert(UO ? UO : Obj).second)
12076 if (UO && UO != Obj) {
12082 const auto *OtherAA =
A.getAAFor<AAUnderlyingObjects>(
12084 auto Pred = [&](
Value &
V) {
12092 if (!OtherAA || !OtherAA->forallUnderlyingObjects(Pred, Scope))
12094 "The forall call should not return false at this position");
12100 Changed |= handleIndirect(
A, *Obj, UnderlyingObjects, Scope,
12101 UsedAssumedInformation);
12107 for (
unsigned u = 0, e =
PHI->getNumIncomingValues(); u < e; u++) {
12109 handleIndirect(
A, *
PHI->getIncomingValue(u), UnderlyingObjects,
12110 Scope, UsedAssumedInformation);
12124 if (!UsedAssumedInformation)
12125 indicateOptimisticFixpoint();
12126 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
12129 bool forallUnderlyingObjects(
12130 function_ref<
bool(
Value &)> Pred,
12132 if (!isValidState())
12133 return Pred(getAssociatedValue());
12136 ? IntraAssumedUnderlyingObjects
12137 : InterAssumedUnderlyingObjects;
12138 for (
Value *Obj : AssumedUnderlyingObjects)
12148 bool handleIndirect(Attributor &
A,
Value &V,
12149 SmallSetVector<Value *, 8> &UnderlyingObjects,
12152 const auto *AA =
A.getAAFor<AAUnderlyingObjects>(
12154 auto Pred = [&](
Value &
V) {
12158 if (!AA || !AA->forallUnderlyingObjects(Pred, Scope))
12160 "The forall call should not return false at this position");
12166 SmallSetVector<Value *, 8> IntraAssumedUnderlyingObjects;
12168 SmallSetVector<Value *, 8> InterAssumedUnderlyingObjects;
12171struct AAUnderlyingObjectsFloating final : AAUnderlyingObjectsImpl {
12172 AAUnderlyingObjectsFloating(
const IRPosition &IRP, Attributor &
A)
12173 : AAUnderlyingObjectsImpl(IRP,
A) {}
12176struct AAUnderlyingObjectsArgument final : AAUnderlyingObjectsImpl {
12177 AAUnderlyingObjectsArgument(
const IRPosition &IRP, Attributor &
A)
12178 : AAUnderlyingObjectsImpl(IRP,
A) {}
12181struct AAUnderlyingObjectsCallSite final : AAUnderlyingObjectsImpl {
12182 AAUnderlyingObjectsCallSite(
const IRPosition &IRP, Attributor &
A)
12183 : AAUnderlyingObjectsImpl(IRP,
A) {}
12186struct AAUnderlyingObjectsCallSiteArgument final : AAUnderlyingObjectsImpl {
12187 AAUnderlyingObjectsCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
12188 : AAUnderlyingObjectsImpl(IRP,
A) {}
12191struct AAUnderlyingObjectsReturned final : AAUnderlyingObjectsImpl {
12192 AAUnderlyingObjectsReturned(
const IRPosition &IRP, Attributor &
A)
12193 : AAUnderlyingObjectsImpl(IRP,
A) {}
12196struct AAUnderlyingObjectsCallSiteReturned final : AAUnderlyingObjectsImpl {
12197 AAUnderlyingObjectsCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
12198 : AAUnderlyingObjectsImpl(IRP,
A) {}
12201struct AAUnderlyingObjectsFunction final : AAUnderlyingObjectsImpl {
12202 AAUnderlyingObjectsFunction(
const IRPosition &IRP, Attributor &
A)
12203 : AAUnderlyingObjectsImpl(IRP,
A) {}
12209struct AAGlobalValueInfoFloating :
public AAGlobalValueInfo {
12210 AAGlobalValueInfoFloating(
const IRPosition &IRP, Attributor &
A)
12211 : AAGlobalValueInfo(IRP,
A) {}
12216 bool checkUse(Attributor &
A,
const Use &U,
bool &Follow,
12217 SmallVectorImpl<const Value *> &Worklist) {
12224 LLVM_DEBUG(
dbgs() <<
"[AAGlobalValueInfo] Check use: " << *
U.get() <<
" in "
12225 << *UInst <<
"\n");
12228 int Idx = &
Cmp->getOperandUse(0) == &
U;
12231 return U == &getAnchorValue();
12236 auto CallSitePred = [&](AbstractCallSite ACS) {
12237 Worklist.
push_back(ACS.getInstruction());
12240 bool UsedAssumedInformation =
false;
12242 if (!
A.checkForAllCallSites(CallSitePred, *UInst->
getFunction(),
12244 UsedAssumedInformation))
12262 if (!Fn || !
A.isFunctionIPOAmendable(*Fn))
12271 unsigned NumUsesBefore =
Uses.size();
12273 SmallPtrSet<const Value *, 8> Visited;
12277 auto UsePred = [&](
const Use &
U,
bool &Follow) ->
bool {
12285 return checkUse(
A, U, Follow, Worklist);
12287 auto EquivalentUseCB = [&](
const Use &OldU,
const Use &NewU) {
12288 Uses.insert(&OldU);
12292 while (!Worklist.
empty()) {
12294 if (!Visited.
insert(V).second)
12296 if (!
A.checkForAllUses(UsePred, *
this, *V,
12298 DepClassTy::OPTIONAL,
12299 true, EquivalentUseCB)) {
12300 return indicatePessimisticFixpoint();
12304 return Uses.size() == NumUsesBefore ? ChangeStatus::UNCHANGED
12305 : ChangeStatus::CHANGED;
12308 bool isPotentialUse(
const Use &U)
const override {
12309 return !isValidState() ||
Uses.contains(&U);
12314 return ChangeStatus::UNCHANGED;
12318 const std::string getAsStr(Attributor *
A)
const override {
12319 return "[" + std::to_string(
Uses.size()) +
" uses]";
12322 void trackStatistics()
const override {
12328 SmallPtrSet<const Use *, 8>
Uses;
12334struct AAIndirectCallInfoCallSite :
public AAIndirectCallInfo {
12335 AAIndirectCallInfoCallSite(
const IRPosition &IRP, Attributor &
A)
12336 : AAIndirectCallInfo(IRP,
A) {}
12340 auto *MD = getCtxI()->getMetadata(LLVMContext::MD_callees);
12341 if (!MD && !
A.isClosedWorldModule())
12345 for (
const auto &
Op : MD->operands())
12347 PotentialCallees.insert(Callee);
12348 }
else if (
A.isClosedWorldModule()) {
12350 A.getInfoCache().getIndirectlyCallableFunctions(
A);
12351 PotentialCallees.insert_range(IndirectlyCallableFunctions);
12354 if (PotentialCallees.empty())
12355 indicateOptimisticFixpoint();
12363 SmallSetVector<Function *, 4> AssumedCalleesNow;
12364 bool AllCalleesKnownNow = AllCalleesKnown;
12366 auto CheckPotentialCalleeUse = [&](
Function &PotentialCallee,
12367 bool &UsedAssumedInformation) {
12368 const auto *GIAA =
A.getAAFor<AAGlobalValueInfo>(
12370 if (!GIAA || GIAA->isPotentialUse(CalleeUse))
12372 UsedAssumedInformation = !GIAA->isAtFixpoint();
12376 auto AddPotentialCallees = [&]() {
12377 for (
auto *PotentialCallee : PotentialCallees) {
12378 bool UsedAssumedInformation =
false;
12379 if (CheckPotentialCalleeUse(*PotentialCallee, UsedAssumedInformation))
12380 AssumedCalleesNow.
insert(PotentialCallee);
12386 bool UsedAssumedInformation =
false;
12389 AA::ValueScope::AnyScope,
12390 UsedAssumedInformation)) {
12391 if (PotentialCallees.empty())
12392 return indicatePessimisticFixpoint();
12393 AddPotentialCallees();
12398 auto CheckPotentialCallee = [&](
Function &Fn) {
12399 if (!PotentialCallees.empty() && !PotentialCallees.count(&Fn))
12402 auto &CachedResult = FilterResults[&Fn];
12403 if (CachedResult.has_value())
12404 return CachedResult.value();
12406 bool UsedAssumedInformation =
false;
12407 if (!CheckPotentialCalleeUse(Fn, UsedAssumedInformation)) {
12408 if (!UsedAssumedInformation)
12409 CachedResult =
false;
12418 for (
int I = NumCBArgs;
I < NumFnArgs; ++
I) {
12419 bool IsKnown =
false;
12422 DepClassTy::OPTIONAL, IsKnown)) {
12424 CachedResult =
false;
12429 CachedResult =
true;
12435 for (
auto &VAC : Values) {
12443 if (CheckPotentialCallee(*VACFn))
12444 AssumedCalleesNow.
insert(VACFn);
12447 if (!PotentialCallees.empty()) {
12448 AddPotentialCallees();
12451 AllCalleesKnownNow =
false;
12454 if (AssumedCalleesNow == AssumedCallees &&
12455 AllCalleesKnown == AllCalleesKnownNow)
12456 return ChangeStatus::UNCHANGED;
12458 std::swap(AssumedCallees, AssumedCalleesNow);
12459 AllCalleesKnown = AllCalleesKnownNow;
12460 return ChangeStatus::CHANGED;
12466 if (!AllCalleesKnown && AssumedCallees.empty())
12467 return ChangeStatus::UNCHANGED;
12470 bool UsedAssumedInformation =
false;
12471 if (
A.isAssumedDead(*CB,
this,
nullptr,
12472 UsedAssumedInformation))
12473 return ChangeStatus::UNCHANGED;
12477 if (
FP->getType()->getPointerAddressSpace())
12478 FP =
new AddrSpaceCastInst(
FP, PointerType::get(
FP->getContext(), 0),
12488 if (AssumedCallees.empty()) {
12489 assert(AllCalleesKnown &&
12490 "Expected all callees to be known if there are none.");
12491 A.changeToUnreachableAfterManifest(CB);
12492 return ChangeStatus::CHANGED;
12496 if (AllCalleesKnown && AssumedCallees.size() == 1) {
12497 auto *NewCallee = AssumedCallees.front();
12500 NumIndirectCallsPromoted++;
12501 return ChangeStatus::CHANGED;
12508 A.deleteAfterManifest(*CB);
12509 return ChangeStatus::CHANGED;
12519 bool SpecializedForAnyCallees =
false;
12520 bool SpecializedForAllCallees = AllCalleesKnown;
12521 ICmpInst *LastCmp =
nullptr;
12524 for (Function *NewCallee : AssumedCallees) {
12525 if (!
A.shouldSpecializeCallSiteForCallee(*
this, *CB, *NewCallee,
12526 AssumedCallees.size())) {
12527 SkippedAssumedCallees.
push_back(NewCallee);
12528 SpecializedForAllCallees =
false;
12531 SpecializedForAnyCallees =
true;
12537 A.registerManifestAddedBasicBlock(*ThenTI->
getParent());
12538 A.registerManifestAddedBasicBlock(*IP->getParent());
12544 A.registerManifestAddedBasicBlock(*ElseBB);
12546 SplitTI->replaceUsesOfWith(CBBB, ElseBB);
12551 CastInst *RetBC =
nullptr;
12552 CallInst *NewCall =
nullptr;
12557 NumIndirectCallsPromoted++;
12565 auto AttachCalleeMetadata = [&](CallBase &IndirectCB) {
12566 if (!AllCalleesKnown)
12567 return ChangeStatus::UNCHANGED;
12568 MDBuilder MDB(IndirectCB.getContext());
12569 MDNode *Callees = MDB.createCallees(SkippedAssumedCallees);
12570 IndirectCB.setMetadata(LLVMContext::MD_callees, Callees);
12571 return ChangeStatus::CHANGED;
12574 if (!SpecializedForAnyCallees)
12575 return AttachCalleeMetadata(*CB);
12578 if (SpecializedForAllCallees) {
12581 new UnreachableInst(IP->getContext(), IP);
12582 IP->eraseFromParent();
12585 CBClone->setName(CB->
getName());
12586 CBClone->insertBefore(*IP->getParent(), IP);
12587 NewCalls.
push_back({CBClone,
nullptr});
12588 AttachCalleeMetadata(*CBClone);
12595 CB->
getParent()->getFirstInsertionPt());
12596 for (
auto &It : NewCalls) {
12597 CallBase *NewCall = It.first;
12598 Instruction *CallRet = It.second ? It.second : It.first;
12610 A.deleteAfterManifest(*CB);
12611 Changed = ChangeStatus::CHANGED;
12617 const std::string getAsStr(Attributor *
A)
const override {
12618 return std::string(AllCalleesKnown ?
"eliminate" :
"specialize") +
12619 " indirect call site with " + std::to_string(AssumedCallees.size()) +
12623 void trackStatistics()
const override {
12624 if (AllCalleesKnown) {
12626 Eliminated, CallSites,
12627 "Number of indirect call sites eliminated via specialization")
12630 "Number of indirect call sites specialized")
12634 bool foreachCallee(function_ref<
bool(Function *)> CB)
const override {
12635 return isValidState() && AllCalleesKnown &&
all_of(AssumedCallees, CB);
12640 DenseMap<Function *, std::optional<bool>> FilterResults;
12644 SmallSetVector<Function *, 4> PotentialCallees;
12648 SmallSetVector<Function *, 4> AssumedCallees;
12652 bool AllCalleesKnown =
true;
12659struct AAInvariantLoadPointerImpl
12660 :
public StateWrapper<BitIntegerState<uint8_t, 15>,
12661 AAInvariantLoadPointer> {
12665 IS_NOALIAS = 1 << 0,
12668 IS_NOEFFECT = 1 << 1,
12670 IS_LOCALLY_INVARIANT = 1 << 2,
12672 IS_LOCALLY_CONSTRAINED = 1 << 3,
12674 IS_BEST_STATE = IS_NOALIAS | IS_NOEFFECT | IS_LOCALLY_INVARIANT |
12675 IS_LOCALLY_CONSTRAINED,
12677 static_assert(getBestState() == IS_BEST_STATE,
"Unexpected best state");
12680 StateWrapper<BitIntegerState<uint8_t, 15>, AAInvariantLoadPointer>;
12684 AAInvariantLoadPointerImpl(
const IRPosition &IRP, Attributor &
A)
12687 bool isKnownInvariant()
const final {
12688 return isKnownLocallyInvariant() && isKnown(IS_LOCALLY_CONSTRAINED);
12691 bool isKnownLocallyInvariant()
const final {
12692 if (isKnown(IS_LOCALLY_INVARIANT))
12694 return isKnown(IS_NOALIAS | IS_NOEFFECT);
12697 bool isAssumedInvariant()
const final {
12698 return isAssumedLocallyInvariant() && isAssumed(IS_LOCALLY_CONSTRAINED);
12701 bool isAssumedLocallyInvariant()
const final {
12702 if (isAssumed(IS_LOCALLY_INVARIANT))
12704 return isAssumed(IS_NOALIAS | IS_NOEFFECT);
12711 if (requiresNoAlias() && !isAssumed(IS_NOALIAS))
12712 return indicatePessimisticFixpoint();
12716 Changed |= updateLocalInvariance(
A);
12722 if (!isKnownInvariant())
12723 return ChangeStatus::UNCHANGED;
12726 const Value *Ptr = &getAssociatedValue();
12727 const auto TagInvariantLoads = [&](
const Use &
U,
bool &) {
12728 if (
U.get() != Ptr)
12736 if (!
A.isRunOn(
I->getFunction()))
12739 if (
I->hasMetadata(LLVMContext::MD_invariant_load))
12743 LI->setMetadata(LLVMContext::MD_invariant_load,
12745 Changed = ChangeStatus::CHANGED;
12750 (void)
A.checkForAllUses(TagInvariantLoads, *
this, *Ptr);
12755 const std::string getAsStr(Attributor *)
const override {
12756 if (isKnownInvariant())
12757 return "load-invariant pointer";
12758 return "non-invariant pointer";
12762 void trackStatistics()
const override {}
12766 bool requiresNoAlias()
const {
12767 switch (getPositionKind()) {
12773 case IRP_CALL_SITE:
12775 case IRP_CALL_SITE_RETURNED: {
12780 case IRP_ARGUMENT: {
12781 const Function *
F = getAssociatedFunction();
12782 assert(
F &&
"no associated function for argument");
12788 bool isExternal()
const {
12789 const Function *
F = getAssociatedFunction();
12793 getPositionKind() != IRP_CALL_SITE_RETURNED;
12797 if (isKnown(IS_NOALIAS) || !isAssumed(IS_NOALIAS))
12798 return ChangeStatus::UNCHANGED;
12801 if (
const auto *ANoAlias =
A.getOrCreateAAFor<AANoAlias>(
12802 getIRPosition(),
this, DepClassTy::REQUIRED)) {
12803 if (ANoAlias->isKnownNoAlias()) {
12804 addKnownBits(IS_NOALIAS);
12805 return ChangeStatus::CHANGED;
12808 if (!ANoAlias->isAssumedNoAlias()) {
12809 removeAssumedBits(IS_NOALIAS);
12810 return ChangeStatus::CHANGED;
12813 return ChangeStatus::UNCHANGED;
12818 if (
const Argument *Arg = getAssociatedArgument()) {
12820 addKnownBits(IS_NOALIAS);
12821 return ChangeStatus::UNCHANGED;
12826 removeAssumedBits(IS_NOALIAS);
12827 return ChangeStatus::CHANGED;
12830 return ChangeStatus::UNCHANGED;
12834 if (isKnown(IS_NOEFFECT) || !isAssumed(IS_NOEFFECT))
12835 return ChangeStatus::UNCHANGED;
12837 if (!getAssociatedFunction())
12838 return indicatePessimisticFixpoint();
12841 return indicatePessimisticFixpoint();
12843 const auto HasNoEffectLoads = [&](
const Use &
U,
bool &) {
12845 return !LI || !LI->mayHaveSideEffects();
12847 if (!
A.checkForAllUses(HasNoEffectLoads, *
this, getAssociatedValue()))
12848 return indicatePessimisticFixpoint();
12850 if (
const auto *AMemoryBehavior =
A.getOrCreateAAFor<AAMemoryBehavior>(
12851 getIRPosition(),
this, DepClassTy::REQUIRED)) {
12854 if (!AMemoryBehavior->isAssumedReadOnly())
12855 return indicatePessimisticFixpoint();
12857 if (AMemoryBehavior->isKnownReadOnly()) {
12858 addKnownBits(IS_NOEFFECT);
12859 return ChangeStatus::UNCHANGED;
12862 return ChangeStatus::UNCHANGED;
12865 if (
const Argument *Arg = getAssociatedArgument()) {
12867 addKnownBits(IS_NOEFFECT);
12868 return ChangeStatus::UNCHANGED;
12873 return indicatePessimisticFixpoint();
12876 return ChangeStatus::UNCHANGED;
12880 if (isKnown(IS_LOCALLY_INVARIANT) || !isAssumed(IS_LOCALLY_INVARIANT))
12881 return ChangeStatus::UNCHANGED;
12884 const auto *AUO =
A.getOrCreateAAFor<AAUnderlyingObjects>(
12885 getIRPosition(),
this, DepClassTy::REQUIRED);
12887 return ChangeStatus::UNCHANGED;
12889 bool UsedAssumedInformation =
false;
12890 const auto IsLocallyInvariantLoadIfPointer = [&](
const Value &
V) {
12891 if (!
V.getType()->isPointerTy())
12893 const auto *IsInvariantLoadPointer =
12895 DepClassTy::REQUIRED);
12897 if (!IsInvariantLoadPointer)
12900 if (IsInvariantLoadPointer->isKnownLocallyInvariant())
12902 if (!IsInvariantLoadPointer->isAssumedLocallyInvariant())
12905 UsedAssumedInformation =
true;
12908 if (!AUO->forallUnderlyingObjects(IsLocallyInvariantLoadIfPointer))
12909 return indicatePessimisticFixpoint();
12915 if (!IsLocallyInvariantLoadIfPointer(*Arg))
12916 return indicatePessimisticFixpoint();
12921 if (!UsedAssumedInformation) {
12923 addKnownBits(IS_LOCALLY_INVARIANT);
12924 return ChangeStatus::CHANGED;
12927 return ChangeStatus::UNCHANGED;
12931struct AAInvariantLoadPointerFloating final : AAInvariantLoadPointerImpl {
12932 AAInvariantLoadPointerFloating(
const IRPosition &IRP, Attributor &
A)
12933 : AAInvariantLoadPointerImpl(IRP,
A) {}
12936struct AAInvariantLoadPointerReturned final : AAInvariantLoadPointerImpl {
12937 AAInvariantLoadPointerReturned(
const IRPosition &IRP, Attributor &
A)
12938 : AAInvariantLoadPointerImpl(IRP,
A) {}
12941 removeAssumedBits(IS_LOCALLY_CONSTRAINED);
12945struct AAInvariantLoadPointerCallSiteReturned final
12946 : AAInvariantLoadPointerImpl {
12947 AAInvariantLoadPointerCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
12948 : AAInvariantLoadPointerImpl(IRP,
A) {}
12951 const Function *
F = getAssociatedFunction();
12952 assert(
F &&
"no associated function for return from call");
12954 if (!
F->isDeclaration() && !
F->isIntrinsic())
12955 return AAInvariantLoadPointerImpl::initialize(
A);
12960 return AAInvariantLoadPointerImpl::initialize(
A);
12962 if (
F->onlyReadsMemory() &&
F->hasNoSync())
12963 return AAInvariantLoadPointerImpl::initialize(
A);
12967 indicatePessimisticFixpoint();
12971struct AAInvariantLoadPointerArgument final : AAInvariantLoadPointerImpl {
12972 AAInvariantLoadPointerArgument(
const IRPosition &IRP, Attributor &
A)
12973 : AAInvariantLoadPointerImpl(IRP,
A) {}
12976 const Function *
F = getAssociatedFunction();
12977 assert(
F &&
"no associated function for argument");
12980 addKnownBits(IS_LOCALLY_CONSTRAINED);
12984 if (!
F->hasLocalLinkage())
12985 removeAssumedBits(IS_LOCALLY_CONSTRAINED);
12989struct AAInvariantLoadPointerCallSiteArgument final
12990 : AAInvariantLoadPointerImpl {
12991 AAInvariantLoadPointerCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
12992 : AAInvariantLoadPointerImpl(IRP,
A) {}
12999template <
typename InstType>
13000static bool makeChange(Attributor &
A, InstType *MemInst,
const Use &U,
13001 Value *OriginalValue, PointerType *NewPtrTy,
13002 bool UseOriginalValue) {
13003 if (
U.getOperandNo() != InstType::getPointerOperandIndex())
13006 if (MemInst->isVolatile()) {
13007 auto *
TTI =
A.getInfoCache().getAnalysisResultForFunction<TargetIRAnalysis>(
13008 *MemInst->getFunction());
13009 unsigned NewAS = NewPtrTy->getPointerAddressSpace();
13014 if (UseOriginalValue) {
13015 A.changeUseAfterManifest(
const_cast<Use &
>(U), *OriginalValue);
13019 Instruction *CastInst =
new AddrSpaceCastInst(OriginalValue, NewPtrTy);
13021 A.changeUseAfterManifest(
const_cast<Use &
>(U), *CastInst);
13025struct AAAddressSpaceImpl :
public AAAddressSpace {
13026 AAAddressSpaceImpl(
const IRPosition &IRP, Attributor &
A)
13027 : AAAddressSpace(IRP,
A) {}
13030 assert(isValidState() &&
"the AA is invalid");
13031 return AssumedAddressSpace;
13036 assert(getAssociatedType()->isPtrOrPtrVectorTy() &&
13037 "Associated value is not a pointer");
13039 if (!
A.getInfoCache().getFlatAddressSpace().has_value()) {
13040 indicatePessimisticFixpoint();
13044 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13045 unsigned AS = getAssociatedType()->getPointerAddressSpace();
13046 if (AS != FlatAS) {
13047 [[maybe_unused]]
bool R = takeAddressSpace(AS);
13048 assert(R &&
"The take should happen");
13049 indicateOptimisticFixpoint();
13054 uint32_t OldAddressSpace = AssumedAddressSpace;
13055 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13057 auto CheckAddressSpace = [&](
Value &
Obj) {
13063 unsigned ObjAS =
Obj.getType()->getPointerAddressSpace();
13064 if (ObjAS != FlatAS)
13065 return takeAddressSpace(ObjAS);
13079 A.getInfoCache().getAnalysisResultForFunction<TargetIRAnalysis>(*F);
13081 if (AssumedAS != ~0U)
13082 return takeAddressSpace(AssumedAS);
13086 return takeAddressSpace(FlatAS);
13089 auto *AUO =
A.getOrCreateAAFor<AAUnderlyingObjects>(getIRPosition(),
this,
13090 DepClassTy::REQUIRED);
13091 if (!AUO->forallUnderlyingObjects(CheckAddressSpace))
13092 return indicatePessimisticFixpoint();
13094 return OldAddressSpace == AssumedAddressSpace ? ChangeStatus::UNCHANGED
13095 : ChangeStatus::CHANGED;
13102 if (NewAS == InvalidAddressSpace ||
13104 return ChangeStatus::UNCHANGED;
13106 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13108 Value *AssociatedValue = &getAssociatedValue();
13109 Value *OriginalValue = peelAddrspacecast(AssociatedValue, FlatAS);
13112 PointerType::get(getAssociatedType()->
getContext(), NewAS);
13113 bool UseOriginalValue =
13118 auto Pred = [&](
const Use &
U,
bool &) {
13119 if (
U.get() != AssociatedValue)
13130 makeChange(
A, LI, U, OriginalValue, NewPtrTy, UseOriginalValue);
13133 makeChange(
A, SI, U, OriginalValue, NewPtrTy, UseOriginalValue);
13136 makeChange(
A, RMW, U, OriginalValue, NewPtrTy, UseOriginalValue);
13139 makeChange(
A, CmpX, U, OriginalValue, NewPtrTy, UseOriginalValue);
13146 (void)
A.checkForAllUses(Pred, *
this, getAssociatedValue(),
13149 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
13153 const std::string getAsStr(Attributor *
A)
const override {
13154 if (!isValidState())
13155 return "addrspace(<invalid>)";
13156 return "addrspace(" +
13157 (AssumedAddressSpace == InvalidAddressSpace
13159 : std::to_string(AssumedAddressSpace)) +
13164 uint32_t AssumedAddressSpace = InvalidAddressSpace;
13166 bool takeAddressSpace(uint32_t AS) {
13167 if (AssumedAddressSpace == InvalidAddressSpace) {
13168 AssumedAddressSpace = AS;
13171 return AssumedAddressSpace == AS;
13174 static Value *peelAddrspacecast(
Value *V,
unsigned FlatAS) {
13176 assert(
I->getSrcAddressSpace() != FlatAS &&
13177 "there should not be flat AS -> non-flat AS");
13178 return I->getPointerOperand();
13181 if (
C->getOpcode() == Instruction::AddrSpaceCast) {
13182 assert(
C->getOperand(0)->getType()->getPointerAddressSpace() !=
13184 "there should not be flat AS -> non-flat AS X");
13185 return C->getOperand(0);
13191struct AAAddressSpaceFloating final : AAAddressSpaceImpl {
13192 AAAddressSpaceFloating(
const IRPosition &IRP, Attributor &
A)
13193 : AAAddressSpaceImpl(IRP,
A) {}
13195 void trackStatistics()
const override {
13200struct AAAddressSpaceReturned final : AAAddressSpaceImpl {
13201 AAAddressSpaceReturned(
const IRPosition &IRP, Attributor &
A)
13202 : AAAddressSpaceImpl(IRP,
A) {}
13208 (void)indicatePessimisticFixpoint();
13211 void trackStatistics()
const override {
13216struct AAAddressSpaceCallSiteReturned final : AAAddressSpaceImpl {
13217 AAAddressSpaceCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
13218 : AAAddressSpaceImpl(IRP,
A) {}
13220 void trackStatistics()
const override {
13225struct AAAddressSpaceArgument final : AAAddressSpaceImpl {
13226 AAAddressSpaceArgument(
const IRPosition &IRP, Attributor &
A)
13227 : AAAddressSpaceImpl(IRP,
A) {}
13232struct AAAddressSpaceCallSiteArgument final : AAAddressSpaceImpl {
13233 AAAddressSpaceCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
13234 : AAAddressSpaceImpl(IRP,
A) {}
13240 (void)indicatePessimisticFixpoint();
13243 void trackStatistics()
const override {
13258struct AANoAliasAddrSpaceImpl :
public AANoAliasAddrSpace {
13259 AANoAliasAddrSpaceImpl(
const IRPosition &IRP, Attributor &
A)
13260 : AANoAliasAddrSpace(IRP,
A) {}
13263 assert(getAssociatedType()->isPtrOrPtrVectorTy() &&
13264 "Associated value is not a pointer");
13268 std::optional<unsigned> FlatAS =
A.getInfoCache().getFlatAddressSpace();
13269 if (!FlatAS.has_value()) {
13270 indicatePessimisticFixpoint();
13276 unsigned AS = getAssociatedType()->getPointerAddressSpace();
13277 if (AS != *FlatAS) {
13279 indicateOptimisticFixpoint();
13284 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13285 uint32_t OldAssumed = getAssumed();
13287 auto CheckAddressSpace = [&](
Value &
Obj) {
13291 unsigned AS =
Obj.getType()->getPointerAddressSpace();
13295 removeAS(
Obj.getType()->getPointerAddressSpace());
13299 const AAUnderlyingObjects *AUO =
A.getOrCreateAAFor<AAUnderlyingObjects>(
13300 getIRPosition(),
this, DepClassTy::REQUIRED);
13302 return indicatePessimisticFixpoint();
13304 return OldAssumed == getAssumed() ? ChangeStatus::UNCHANGED
13305 : ChangeStatus::CHANGED;
13310 unsigned FlatAS =
A.getInfoCache().getFlatAddressSpace().value();
13312 unsigned AS = getAssociatedType()->getPointerAddressSpace();
13313 if (AS != FlatAS ||
Map.empty())
13314 return ChangeStatus::UNCHANGED;
13316 LLVMContext &Ctx = getAssociatedValue().getContext();
13317 MDNode *NoAliasASNode =
nullptr;
13318 MDBuilder MDB(Ctx);
13320 for (RangeMap::const_iterator
I =
Map.begin();
I !=
Map.end();
I++) {
13323 unsigned Upper =
I.stop();
13324 unsigned Lower =
I.start();
13325 if (!NoAliasASNode) {
13326 NoAliasASNode = MDB.createRange(APInt(32,
Lower), APInt(32,
Upper + 1));
13329 MDNode *ASRange = MDB.createRange(APInt(32,
Lower), APInt(32,
Upper + 1));
13333 Value *AssociatedValue = &getAssociatedValue();
13336 auto AddNoAliasAttr = [&](
const Use &
U,
bool &) {
13337 if (
U.get() != AssociatedValue)
13340 if (!Inst || Inst->
hasMetadata(LLVMContext::MD_noalias_addrspace))
13347 Inst->
setMetadata(LLVMContext::MD_noalias_addrspace, NoAliasASNode);
13351 (void)
A.checkForAllUses(AddNoAliasAttr, *
this, *AssociatedValue,
13353 return Changed ? ChangeStatus::CHANGED : ChangeStatus::UNCHANGED;
13357 const std::string getAsStr(Attributor *
A)
const override {
13358 if (!isValidState())
13359 return "<invalid>";
13361 raw_string_ostream OS(Str);
13362 OS <<
"CanNotBeAddrSpace(";
13363 for (RangeMap::const_iterator
I =
Map.begin();
I !=
Map.end();
I++) {
13364 unsigned Upper =
I.stop();
13365 unsigned Lower =
I.start();
13366 OS <<
' ' <<
'[' <<
Upper <<
',' <<
Lower + 1 <<
')';
13373 void removeAS(
unsigned AS) {
13374 RangeMap::iterator
I =
Map.find(AS);
13376 if (
I !=
Map.end()) {
13377 unsigned Upper =
I.stop();
13378 unsigned Lower =
I.start();
13382 if (AS != ~((
unsigned)0) && AS + 1 <=
Upper)
13384 if (AS != 0 &&
Lower <= AS - 1)
13389 void resetASRanges(Attributor &
A) {
13391 Map.insert(0,
A.getInfoCache().getMaxAddrSpace(),
true);
13395struct AANoAliasAddrSpaceFloating final : AANoAliasAddrSpaceImpl {
13396 AANoAliasAddrSpaceFloating(
const IRPosition &IRP, Attributor &
A)
13397 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13399 void trackStatistics()
const override {
13404struct AANoAliasAddrSpaceReturned final : AANoAliasAddrSpaceImpl {
13405 AANoAliasAddrSpaceReturned(
const IRPosition &IRP, Attributor &
A)
13406 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13408 void trackStatistics()
const override {
13413struct AANoAliasAddrSpaceCallSiteReturned final : AANoAliasAddrSpaceImpl {
13414 AANoAliasAddrSpaceCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
13415 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13417 void trackStatistics()
const override {
13422struct AANoAliasAddrSpaceArgument final : AANoAliasAddrSpaceImpl {
13423 AANoAliasAddrSpaceArgument(
const IRPosition &IRP, Attributor &
A)
13424 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13426 void trackStatistics()
const override {
13431struct AANoAliasAddrSpaceCallSiteArgument final : AANoAliasAddrSpaceImpl {
13432 AANoAliasAddrSpaceCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
13433 : AANoAliasAddrSpaceImpl(IRP,
A) {}
13435 void trackStatistics()
const override {
13442struct AAAllocationInfoImpl :
public AAAllocationInfo {
13443 AAAllocationInfoImpl(
const IRPosition &IRP, Attributor &
A)
13444 : AAAllocationInfo(IRP,
A) {}
13446 std::optional<TypeSize> getAllocatedSize()
const override {
13447 assert(isValidState() &&
"the AA is invalid");
13448 return AssumedAllocatedSize;
13451 std::optional<TypeSize> findInitialAllocationSize(Instruction *
I,
13452 const DataLayout &
DL) {
13455 switch (
I->getOpcode()) {
13456 case Instruction::Alloca: {
13461 return std::nullopt;
13467 const IRPosition &IRP = getIRPosition();
13472 return indicatePessimisticFixpoint();
13474 bool IsKnownNoCapture;
13476 A,
this, IRP, DepClassTy::OPTIONAL, IsKnownNoCapture))
13477 return indicatePessimisticFixpoint();
13479 const AAPointerInfo *PI =
13480 A.getOrCreateAAFor<AAPointerInfo>(IRP, *
this, DepClassTy::REQUIRED);
13483 return indicatePessimisticFixpoint();
13486 return indicatePessimisticFixpoint();
13488 const DataLayout &
DL =
A.getDataLayout();
13489 const auto AllocationSize = findInitialAllocationSize(
I,
DL);
13492 if (!AllocationSize)
13493 return indicatePessimisticFixpoint();
13497 if (*AllocationSize == 0)
13498 return indicatePessimisticFixpoint();
13504 return indicatePessimisticFixpoint();
13506 if (BinSize == 0) {
13507 auto NewAllocationSize = std::make_optional<TypeSize>(0,
false);
13508 if (!changeAllocationSize(NewAllocationSize))
13509 return ChangeStatus::UNCHANGED;
13510 return ChangeStatus::CHANGED;
13514 const auto &It = PI->
begin();
13517 if (It->first.Offset != 0)
13518 return indicatePessimisticFixpoint();
13520 uint64_t SizeOfBin = It->first.Offset + It->first.Size;
13522 if (SizeOfBin >= *AllocationSize)
13523 return indicatePessimisticFixpoint();
13525 auto NewAllocationSize = std::make_optional<TypeSize>(SizeOfBin * 8,
false);
13527 if (!changeAllocationSize(NewAllocationSize))
13528 return ChangeStatus::UNCHANGED;
13530 return ChangeStatus::CHANGED;
13536 assert(isValidState() &&
13537 "Manifest should only be called if the state is valid.");
13541 auto FixedAllocatedSizeInBits = getAllocatedSize()->getFixedValue();
13543 unsigned long NumBytesToAllocate = (FixedAllocatedSizeInBits + 7) / 8;
13545 switch (
I->getOpcode()) {
13547 case Instruction::Alloca: {
13551 Type *CharType = Type::getInt8Ty(
I->getContext());
13553 auto *NumBytesToValue =
13554 ConstantInt::get(
I->getContext(), APInt(32, NumBytesToAllocate));
13557 insertPt = std::next(insertPt);
13558 AllocaInst *NewAllocaInst =
13563 return ChangeStatus::CHANGED;
13571 return ChangeStatus::UNCHANGED;
13575 const std::string getAsStr(Attributor *
A)
const override {
13576 if (!isValidState())
13577 return "allocationinfo(<invalid>)";
13578 return "allocationinfo(" +
13579 (AssumedAllocatedSize == HasNoAllocationSize
13581 : std::to_string(AssumedAllocatedSize->getFixedValue())) +
13586 std::optional<TypeSize> AssumedAllocatedSize = HasNoAllocationSize;
13590 bool changeAllocationSize(std::optional<TypeSize>
Size) {
13591 if (AssumedAllocatedSize == HasNoAllocationSize ||
13592 AssumedAllocatedSize !=
Size) {
13593 AssumedAllocatedSize =
Size;
13600struct AAAllocationInfoFloating : AAAllocationInfoImpl {
13601 AAAllocationInfoFloating(
const IRPosition &IRP, Attributor &
A)
13602 : AAAllocationInfoImpl(IRP,
A) {}
13604 void trackStatistics()
const override {
13609struct AAAllocationInfoReturned : AAAllocationInfoImpl {
13610 AAAllocationInfoReturned(
const IRPosition &IRP, Attributor &
A)
13611 : AAAllocationInfoImpl(IRP,
A) {}
13617 (void)indicatePessimisticFixpoint();
13620 void trackStatistics()
const override {
13625struct AAAllocationInfoCallSiteReturned : AAAllocationInfoImpl {
13626 AAAllocationInfoCallSiteReturned(
const IRPosition &IRP, Attributor &
A)
13627 : AAAllocationInfoImpl(IRP,
A) {}
13629 void trackStatistics()
const override {
13634struct AAAllocationInfoArgument : AAAllocationInfoImpl {
13635 AAAllocationInfoArgument(
const IRPosition &IRP, Attributor &
A)
13636 : AAAllocationInfoImpl(IRP,
A) {}
13638 void trackStatistics()
const override {
13643struct AAAllocationInfoCallSiteArgument : AAAllocationInfoImpl {
13644 AAAllocationInfoCallSiteArgument(
const IRPosition &IRP, Attributor &
A)
13645 : AAAllocationInfoImpl(IRP,
A) {}
13650 (void)indicatePessimisticFixpoint();
13653 void trackStatistics()
const override {
13702#define SWITCH_PK_INV(CLASS, PK, POS_NAME) \
13703 case IRPosition::PK: \
13704 llvm_unreachable("Cannot create " #CLASS " for a " POS_NAME " position!");
13706#define SWITCH_PK_CREATE(CLASS, IRP, PK, SUFFIX) \
13707 case IRPosition::PK: \
13708 AA = new (A.Allocator) CLASS##SUFFIX(IRP, A); \
13712#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13713 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13714 CLASS *AA = nullptr; \
13715 switch (IRP.getPositionKind()) { \
13716 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13717 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
13718 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
13719 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13720 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
13721 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13722 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13723 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13728#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13729 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13730 CLASS *AA = nullptr; \
13731 switch (IRP.getPositionKind()) { \
13732 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13733 SWITCH_PK_INV(CLASS, IRP_FUNCTION, "function") \
13734 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13735 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13736 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13737 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
13738 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13739 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13744#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS) \
13745 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13746 CLASS *AA = nullptr; \
13747 switch (IRP.getPositionKind()) { \
13748 SWITCH_PK_CREATE(CLASS, IRP, POS, SUFFIX) \
13750 llvm_unreachable("Cannot create " #CLASS " for position otherthan " #POS \
13756#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13757 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13758 CLASS *AA = nullptr; \
13759 switch (IRP.getPositionKind()) { \
13760 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13761 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13762 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13763 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13764 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13765 SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
13766 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13767 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13772#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13773 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13774 CLASS *AA = nullptr; \
13775 switch (IRP.getPositionKind()) { \
13776 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13777 SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
13778 SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
13779 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13780 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
13781 SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
13782 SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
13783 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13788#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
13789 CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
13790 CLASS *AA = nullptr; \
13791 switch (IRP.getPositionKind()) { \
13792 SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
13793 SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
13794 SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
13795 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
13796 SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
13797 SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
13798 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
13799 SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
13851#undef CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION
13852#undef CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION
13853#undef CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION
13854#undef CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION
13855#undef CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION
13856#undef CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION
13857#undef SWITCH_PK_CREATE
13858#undef SWITCH_PK_INV
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
AMDGPU Register Bank Select
This file implements a class to represent arbitrary precision integral constant values and operations...
ReachingDefInfo InstSet & ToRemove
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Function Alias Analysis false
This file contains the simple types necessary to represent the attributes associated with functions a...
#define STATS_DECLTRACK(NAME, TYPE, MSG)
static std::optional< Constant * > askForAssumedConstant(Attributor &A, const AbstractAttribute &QueryingAA, const IRPosition &IRP, Type &Ty)
static cl::opt< unsigned, true > MaxPotentialValues("attributor-max-potential-values", cl::Hidden, cl::desc("Maximum number of potential values to be " "tracked for each position."), cl::location(llvm::PotentialConstantIntValuesState::MaxPotentialValues), cl::init(7))
static void clampReturnedValueStates(Attributor &A, const AAType &QueryingAA, StateType &S, const IRPosition::CallBaseContext *CBContext=nullptr)
Clamp the information known for all returned values of a function (identified by QueryingAA) into S.
#define STATS_DECLTRACK_FN_ATTR(NAME)
#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxPotentialValuesIterations("attributor-max-potential-values-iterations", cl::Hidden, cl::desc("Maximum number of iterations we keep dismantling potential values."), cl::init(64))
#define STATS_DECLTRACK_CS_ATTR(NAME)
#define PIPE_OPERATOR(CLASS)
static bool mayBeInCycle(const CycleInfo *CI, const Instruction *I, bool HeaderOnly, Cycle **CPtr=nullptr)
#define STATS_DECLTRACK_ARG_ATTR(NAME)
static const Value * stripAndAccumulateOffsets(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Val, const DataLayout &DL, APInt &Offset, bool GetMinOffset, bool AllowNonInbounds, bool UseAssumed=false)
#define STATS_DECLTRACK_CSRET_ATTR(NAME)
static cl::opt< bool > ManifestInternal("attributor-manifest-internal", cl::Hidden, cl::desc("Manifest Attributor internal string attributes."), cl::init(false))
static Value * constructPointer(Value *Ptr, int64_t Offset, IRBuilder< NoFolder > &IRB)
Helper function to create a pointer based on Ptr, and advanced by Offset bytes.
#define CREATE_NON_RET_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define BUILD_STAT_NAME(NAME, TYPE)
static bool isDenselyPacked(Type *Ty, const DataLayout &DL)
Checks if a type could have padding bytes.
#define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static const Value * getMinimalBaseOfPointer(Attributor &A, const AbstractAttribute &QueryingAA, const Value *Ptr, int64_t &BytesOffset, const DataLayout &DL, bool AllowNonInbounds=false)
#define STATS_DECLTRACK_FNRET_ATTR(NAME)
#define STATS_DECLTRACK_CSARG_ATTR(NAME)
#define CREATE_ABSTRACT_ATTRIBUTE_FOR_ONE_POSITION(POS, SUFFIX, CLASS)
#define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
static cl::opt< int > MaxHeapToStackSize("max-heap-to-stack-size", cl::init(128), cl::Hidden)
#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
#define STATS_DECLTRACK_FLOATING_ATTR(NAME)
#define STATS_DECL(NAME, TYPE, MSG)
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static bool isReachableImpl(SmallVectorImpl< BasicBlock * > &Worklist, const StopSetT &StopSet, const SmallPtrSetImpl< BasicBlock * > *ExclusionSet, const DominatorTree *DT, const LoopInfo *LI)
This file contains the declarations for the subclasses of Constant, which represent the different fla...
This file declares an analysis pass that computes CycleInfo for LLVM IR, specialized from GenericCycl...
DXIL Forward Handle Accesses
This file defines DenseMapInfo traits for DenseMap.
Machine Check Debug Module
This file implements a map that provides insertion order iteration.
static unsigned getAddressSpace(const Value *V, unsigned MaxLookup)
ConstantRange Range(APInt(BitWidth, Low), APInt(BitWidth, High))
uint64_t IntrinsicInst * II
static StringRef getName(Value *V)
static cl::opt< RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode > Mode("regalloc-enable-advisor", cl::Hidden, cl::init(RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode::Default), cl::desc("Enable regalloc advisor mode"), cl::values(clEnumValN(RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode::Default, "default", "Default"), clEnumValN(RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode::Release, "release", "precompiled"), clEnumValN(RegAllocEvictionAdvisorAnalysisLegacy::AdvisorMode::Development, "development", "for training")))
dot regions Print regions of function to dot true view regions View regions of function(with no function bodies)"
Remove Loads Into Fake Uses
This builds on the llvm/ADT/GraphTraits.h file to find the strongly connected components (SCCs) of a ...
std::pair< BasicBlock *, BasicBlock * > Edge
BaseType
A given derived pointer can have multiple base pointers through phi/selects.
This file defines generic set operations that may be used on set's of different types,...
This file implements a set that has insertion order iteration characteristics.
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static SymbolRef::Type getType(const Symbol *Sym)
static void initialize(TargetLibraryInfoImpl &TLI, const Triple &T, const llvm::StringTable &StandardNames, VectorLibrary VecLib)
Initialize the set of available library functions based on the specified target triple.
static unsigned getBitWidth(Type *Ty, const DataLayout &DL)
Returns the bitwidth of the given scalar or pointer type.
static unsigned getSize(unsigned Kind)
LLVM_ABI AACallGraphNode * operator*() const
bool isNoAlias(const MemoryLocation &LocA, const MemoryLocation &LocB)
A trivial helper function to check to see if the specified pointers are no-alias.
Class for arbitrary precision integers.
int64_t getSExtValue() const
Get sign extended value.
CallBase * getInstruction() const
Return the underlying instruction.
bool isCallbackCall() const
Return true if this ACS represents a callback call.
bool isDirectCall() const
Return true if this ACS represents a direct call.
static LLVM_ABI void getCallbackUses(const CallBase &CB, SmallVectorImpl< const Use * > &CallbackUses)
Add operand uses of CB that represent callback uses into CallbackUses.
int getCallArgOperandNo(Argument &Arg) const
Return the operand index of the underlying instruction associated with Arg.
Align getAlign() const
Return the alignment of the memory that is being allocated by the instruction.
unsigned getAddressSpace() const
Return the address space for the allocation.
LLVM_ABI std::optional< TypeSize > getAllocationSize(const DataLayout &DL) const
Get allocation size in bytes.
This class represents an incoming formal argument to a Function.
LLVM_ABI bool hasNoAliasAttr() const
Return true if this argument has the noalias attribute.
LLVM_ABI bool onlyReadsMemory() const
Return true if this argument has the readonly or readnone attribute.
LLVM_ABI bool hasPointeeInMemoryValueAttr() const
Return true if this argument has the byval, sret, inalloca, preallocated, or byref attribute.
LLVM_ABI bool hasReturnedAttr() const
Return true if this argument has the returned attribute.
LLVM_ABI bool hasByValAttr() const
Return true if this argument has the byval attribute.
const Function * getParent() const
unsigned getArgNo() const
Return the index of this formal argument in its containing function.
A function analysis which provides an AssumptionCache.
A cache of @llvm.assume calls within a function.
Functions, function parameters, and return types can have attributes to indicate how they should be t...
static LLVM_ABI Attribute get(LLVMContext &Context, AttrKind Kind, uint64_t Val=0)
Return a uniquified Attribute object.
LLVM_ABI FPClassTest getNoFPClass() const
Return the FPClassTest for nofpclass.
LLVM_ABI Attribute::AttrKind getKindAsEnum() const
Return the attribute's kind as an enum (Attribute::AttrKind).
LLVM_ABI MemoryEffects getMemoryEffects() const
Returns memory effects.
AttrKind
This enumeration lists the attributes that can be associated with parameters, function results,...
static LLVM_ABI Attribute getWithCaptureInfo(LLVMContext &Context, CaptureInfo CI)
static bool isEnumAttrKind(AttrKind Kind)
LLVM_ABI CaptureInfo getCaptureInfo() const
Returns information from captures attribute.
LLVM Basic Block Representation.
LLVM_ABI const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
const Function * getParent() const
Return the enclosing method, or null if none.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
const Instruction & front() const
InstListType::iterator iterator
Instruction iterators...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
BinaryOps getOpcode() const
Conditional or Unconditional Branch instruction.
unsigned getNumSuccessors() const
static BranchInst * Create(BasicBlock *IfTrue, InsertPosition InsertBefore=nullptr)
BasicBlock * getSuccessor(unsigned i) const
Value * getCondition() const
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
LLVM_ABI bool isMustTailCall() const
Tests if this call site must be tail call optimized.
LLVM_ABI bool isIndirectCall() const
Return true if the callsite is an indirect call.
bool isCallee(Value::const_user_iterator UI) const
Determine whether the passed iterator points to the callee operand's Use.
Value * getCalledOperand() const
const Use & getCalledOperandUse() const
const Use & getArgOperandUse(unsigned i) const
Wrappers for getting the Use of a call argument.
LLVM_ABI std::optional< ConstantRange > getRange() const
If this return value has a range attribute, return the value range of the argument.
Value * getArgOperand(unsigned i) const
bool isBundleOperand(unsigned Idx) const
Return true if the operand at index Idx is a bundle operand.
bool isConvergent() const
Determine if the invoke is convergent.
FunctionType * getFunctionType() const
LLVM_ABI Intrinsic::ID getIntrinsicID() const
Returns the intrinsic ID of the intrinsic called or Intrinsic::not_intrinsic if the called function i...
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
unsigned getArgOperandNo(const Use *U) const
Given a use for a arg operand, get the arg operand number that corresponds to it.
unsigned arg_size() const
bool isArgOperand(const Use *U) const
LLVM_ABI Function * getCaller()
Helper to get the caller (the parent function).
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
static CaptureInfo none()
Create CaptureInfo that does not capture any components of the pointer.
Instruction::CastOps getOpcode() const
Return the opcode of this CastInst.
LLVM_ABI bool isIntegerCast() const
There are several places where we need to know if a cast instruction only deals with integer source a...
Type * getDestTy() const
Return the destination type, as a convenience.
bool isEquality() const
Determine if this is an equals/not equals predicate.
bool isFalseWhenEqual() const
This is just a convenience.
Predicate
This enumeration lists the possible predicates for CmpInst subclasses.
bool isTrueWhenEqual() const
This is just a convenience.
Predicate getPredicate() const
Return the predicate for this instruction.
static LLVM_ABI Constant * getExtractElement(Constant *Vec, Constant *Idx, Type *OnlyIfReducedTy=nullptr)
static LLVM_ABI ConstantInt * getTrue(LLVMContext &Context)
This class represents a range of values.
const APInt & getLower() const
Return the lower value for this range.
LLVM_ABI bool isFullSet() const
Return true if this set contains all of the elements possible for this data-type.
LLVM_ABI bool isEmptySet() const
Return true if this set contains no members.
bool isSingleElement() const
Return true if this set contains exactly one member.
static LLVM_ABI ConstantRange makeAllowedICmpRegion(CmpInst::Predicate Pred, const ConstantRange &Other)
Produce the smallest range such that all values that may satisfy the given predicate with any value c...
const APInt & getUpper() const
Return the upper value for this range.
A parsed version of the target data layout string in and methods for querying it.
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
size_type count(const_arg_type_t< KeyT > Val) const
Return 1 if the specified key is in the map, 0 otherwise.
bool contains(const_arg_type_t< KeyT > Val) const
Return true if the specified key is in the map, false otherwise.
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Analysis pass which computes a DominatorTree.
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
LLVM_ABI bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
const BasicBlock & getEntryBlock() const
iterator_range< arg_iterator > args()
const Function & getFunction() const
Argument * getArg(unsigned i) const
bool hasFnAttribute(Attribute::AttrKind Kind) const
Return true if the function has the attribute.
CycleT * getCycle(const BlockT *Block) const
Find the innermost cycle containing a given block.
LLVM_ABI bool isDeclaration() const
Return true if the primary definition of this global value is outside of the current translation unit...
bool hasLocalLinkage() const
static LLVM_ABI bool compare(const APInt &LHS, const APInt &RHS, ICmpInst::Predicate Pred)
Return result of LHS Pred RHS comparison.
Value * CreatePtrAdd(Value *Ptr, Value *Offset, const Twine &Name="", GEPNoWrapFlags NW=GEPNoWrapFlags::none())
ConstantInt * getInt64(uint64_t C)
Get a constant 64-bit value.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
LLVM_ABI Instruction * clone() const
Create a copy of 'this' instruction that is identical in all ways except the following:
LLVM_ABI bool isLifetimeStartOrEnd() const LLVM_READONLY
Return true if the instruction is a llvm.lifetime.start or llvm.lifetime.end marker.
bool mayReadOrWriteMemory() const
Return true if this instruction may read or write memory.
LLVM_ABI bool mayWriteToMemory() const LLVM_READONLY
Return true if this instruction may modify memory.
bool hasMetadata() const
Return true if this instruction has any metadata attached to it.
LLVM_ABI void insertBefore(InstListType::iterator InsertPos)
Insert an unlinked instruction into a basic block immediately before the specified position.
LLVM_ABI InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
LLVM_ABI const Function * getFunction() const
Return the function this instruction belongs to.
LLVM_ABI BasicBlock * getSuccessor(unsigned Idx) const LLVM_READONLY
Return the specified successor. This instruction must be a terminator.
LLVM_ABI bool mayHaveSideEffects() const LLVM_READONLY
Return true if the instruction may have side effects.
bool isTerminator() const
LLVM_ABI bool mayReadFromMemory() const LLVM_READONLY
Return true if this instruction may read memory.
LLVM_ABI void setMetadata(unsigned KindID, MDNode *Node)
Set the metadata of the specified kind to the specified node.
unsigned getOpcode() const
Returns a member of one of the enums like Instruction::Add.
void setDebugLoc(DebugLoc Loc)
Set the debug location information for this instruction.
LLVM_ABI const DataLayout & getDataLayout() const
Get the data layout of the module this instruction belongs to.
This is an important class for using LLVM in a threaded context.
ConstantRange getConstantRange(Value *V, Instruction *CxtI, bool UndefAllowed)
Return the ConstantRange constraint that is known to hold for the specified value at the specified in...
LoopT * getLoopFor(const BlockT *BB) const
Return the inner most loop that BB lives in.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
static LLVM_ABI MDNode * getMostGenericRange(MDNode *A, MDNode *B)
static MemoryEffectsBase readOnly()
bool doesNotAccessMemory() const
Whether this function accesses no memory.
static MemoryEffectsBase argMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
static MemoryEffectsBase inaccessibleMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
bool onlyAccessesInaccessibleMem() const
Whether this function only (at most) accesses inaccessible memory.
ModRefInfo getModRef(Location Loc) const
Get ModRefInfo for the given Location.
bool onlyAccessesArgPointees() const
Whether this function only (at most) accesses argument memory.
bool onlyReadsMemory() const
Whether this function only (at most) reads memory.
static MemoryEffectsBase writeOnly()
static MemoryEffectsBase inaccessibleOrArgMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
static MemoryEffectsBase none()
bool onlyAccessesInaccessibleOrArgMem() const
Whether this function only (at most) accesses argument and inaccessible memory.
static MemoryEffectsBase unknown()
static LLVM_ABI std::optional< MemoryLocation > getOrNone(const Instruction *Inst)
static SizeOffsetValue unknown()
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
static LLVM_ABI PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
Value * getReturnValue() const
Convenience accessor. Returns null if there is no return value.
LLVM_ABI const SCEV * getSCEVAtScope(const SCEV *S, const Loop *L)
Return a SCEV expression for the specified value at the specified scope in the program.
LLVM_ABI const SCEV * getSCEV(Value *V)
Return a SCEV expression for the full generality of the specified expression.
LLVM_ABI unsigned getSmallConstantMaxTripCount(const Loop *L, SmallVectorImpl< const SCEVPredicate * > *Predicates=nullptr)
Returns the upper bound of the loop trip count as a normal unsigned value.
ConstantRange getUnsignedRange(const SCEV *S)
Determine the unsigned range for a particular SCEV.
A vector that has set insertion semantics.
size_type size() const
Determine the number of elements in the SetVector.
bool insert(const value_type &X)
Insert a new element into the SetVector.
bool erase(PtrType Ptr)
Remove pointer from the set.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
std::pair< const_iterator, bool > insert(const T &V)
insert - Insert an element into the set if it isn't already there.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void append(ItTy in_start, ItTy in_end)
Add the specified range to the end of the SmallVector.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Used to lazily calculate structure layout information for a target machine, based on the DataLayout s...
TypeSize getElementOffset(unsigned Idx) const
TypeSize getElementOffsetInBits(unsigned Idx) const
Class to represent struct types.
unsigned getNumElements() const
Random access to the elements.
Type * getElementType(unsigned N) const
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
LLVM_ABI unsigned getIntegerBitWidth() const
bool isPointerTy() const
True if this is an instance of PointerType.
LLVM_ABI unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
bool isPtrOrPtrVectorTy() const
Return true if this is a pointer type or a vector of pointer types.
bool isIntegerTy() const
True if this is an instance of IntegerType.
bool isVoidTy() const
Return true if this is 'void'.
static LLVM_ABI UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
A Use represents the edge between a Value definition and its users.
User * getUser() const
Returns the User that contains this Use.
const Use & getOperandUse(unsigned i) const
LLVM_ABI bool isDroppable() const
A droppable user is a user for which uses can be dropped without affecting correctness and should be ...
LLVM_ABI bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
Value * getOperand(unsigned i) const
unsigned getNumOperands() const
ValueT lookup(const KeyT &Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
static constexpr uint64_t MaximumAlignment
LLVM_ABI void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
LLVM_ABI const Value * stripAndAccumulateConstantOffsets(const DataLayout &DL, APInt &Offset, bool AllowNonInbounds, bool AllowInvariantGroup=false, function_ref< bool(Value &Value, APInt &Offset)> ExternalAnalysis=nullptr, bool LookThroughIntToPtr=false) const
Accumulate the constant offset this value has compared to a base pointer.
LLVM_ABI LLVMContext & getContext() const
All values hold a context through their type.
static constexpr unsigned MaxAlignmentExponent
The maximum alignment for instructions.
iterator_range< use_iterator > uses()
LLVM_ABI StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
constexpr ScalarTy getFixedValue() const
constexpr bool isScalable() const
Returns whether the quantity is scaled by a runtime quantity (vscale).
const ParentTy * getParent() const
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
SetVector< Function * >::iterator I
This class implements an extremely fast bulk output stream that can only output to a stream.
A raw_ostream that writes to an std::string.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
Abstract Attribute helper functions.
LLVM_ABI bool isAssumedReadNone(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readnone.
LLVM_ABI bool isAssumedReadOnly(Attributor &A, const IRPosition &IRP, const AbstractAttribute &QueryingAA, bool &IsKnown)
Return true if IRP is readonly.
raw_ostream & operator<<(raw_ostream &OS, const RangeTy &R)
LLVM_ABI std::optional< Value * > combineOptionalValuesInAAValueLatice(const std::optional< Value * > &A, const std::optional< Value * > &B, Type *Ty)
Return the combination of A and B such that the result is a possible value of both.
LLVM_ABI bool isValidAtPosition(const ValueAndContext &VAC, InformationCache &InfoCache)
Return true if the value of VAC is a valid at the position of VAC, that is a constant,...
LLVM_ABI bool isAssumedThreadLocalObject(Attributor &A, Value &Obj, const AbstractAttribute &QueryingAA)
Return true if Obj is assumed to be a thread local object.
LLVM_ABI bool isDynamicallyUnique(Attributor &A, const AbstractAttribute &QueryingAA, const Value &V, bool ForAnalysisOnly=true)
Return true if V is dynamically unique, that is, there are no two "instances" of V at runtime with di...
LLVM_ABI bool getPotentialCopiesOfStoredValue(Attributor &A, StoreInst &SI, SmallSetVector< Value *, 4 > &PotentialCopies, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values of the one stored by SI into PotentialCopies.
SmallPtrSet< Instruction *, 4 > InstExclusionSetTy
LLVM_ABI bool isGPU(const Module &M)
Return true iff M target a GPU (and we can use GPU AS reasoning).
ValueScope
Flags to distinguish intra-procedural queries from potentially inter-procedural queries.
LLVM_ABI bool isValidInScope(const Value &V, const Function *Scope)
Return true if V is a valid value in Scope, that is a constant or an instruction/argument of Scope.
LLVM_ABI bool isPotentiallyReachable(Attributor &A, const Instruction &FromI, const Instruction &ToI, const AbstractAttribute &QueryingAA, const AA::InstExclusionSetTy *ExclusionSet=nullptr, std::function< bool(const Function &F)> GoBackwardsCB=nullptr)
Return true if ToI is potentially reachable from FromI without running into any instruction in Exclus...
LLVM_ABI bool isNoSyncInst(Attributor &A, const Instruction &I, const AbstractAttribute &QueryingAA)
Return true if I is a nosync instruction.
bool hasAssumedIRAttr(Attributor &A, const AbstractAttribute *QueryingAA, const IRPosition &IRP, DepClassTy DepClass, bool &IsKnown, bool IgnoreSubsumingPositions=false, const AAType **AAPtr=nullptr)
Helper to avoid creating an AA for IR Attributes that might already be set.
LLVM_ABI bool getPotentiallyLoadedValues(Attributor &A, LoadInst &LI, SmallSetVector< Value *, 4 > &PotentialValues, SmallSetVector< Instruction *, 4 > &PotentialValueOrigins, const AbstractAttribute &QueryingAA, bool &UsedAssumedInformation, bool OnlyExact=false)
Collect all potential values LI could read into PotentialValues.
LLVM_ABI Value * getWithType(Value &V, Type &Ty)
Try to convert V to type Ty without introducing new instructions.
constexpr char Align[]
Key for Kernel::Arg::Metadata::mAlign.
constexpr char Attrs[]
Key for Kernel::Metadata::mAttrs.
@ C
The default llvm calling convention, compatible with C.
@ BasicBlock
Various leaf nodes.
@ Unsupported
This operation is completely unsupported on the target.
@ Undef
Value of the register doesn't matter.
@ SingleThread
Synchronized with respect to signal handlers executing in the same thread.
@ CE
Windows NT (Windows on ARM)
@ Valid
The data is already valid.
initializer< Ty > init(const Ty &Val)
LocationClass< Ty > location(Ty &L)
unsigned combineHashValue(unsigned a, unsigned b)
Simplistic combination of 32-bit hash values into 32-bit hash values.
ElementType
The element type of an SRV or UAV resource.
Scope
Defines the scope in which this symbol should be visible: Default – Visible in the public interface o...
std::enable_if_t< detail::IsValidPointer< X, Y >::value, X * > dyn_extract_or_null(Y &&MD)
Extract a Value from Metadata, if any, allowing null.
std::enable_if_t< detail::IsValidPointer< X, Y >::value, X * > extract(Y &&MD)
Extract a Value from Metadata.
@ User
could "use" a pointer
NodeAddr< UseNode * > Use
Context & getContext() const
friend class Instruction
Iterator for Instructions in a `BasicBlock.
LLVM_ABI iterator begin() const
This is an optimization pass for GlobalISel generic memory operations.
bool operator<(int64_t V1, const APSInt &V2)
FunctionAddr VTableAddr Value
LLVM_ATTRIBUTE_ALWAYS_INLINE DynamicAPInt gcd(const DynamicAPInt &A, const DynamicAPInt &B)
LLVM_ABI KnownFPClass computeKnownFPClass(const Value *V, const APInt &DemandedElts, FPClassTest InterestedClasses, const SimplifyQuery &SQ, unsigned Depth=0)
Determine which floating-point classes are valid for V, and return them in KnownFPClass bit sets.
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
LLVM_ABI bool isLegalToPromote(const CallBase &CB, Function *Callee, const char **FailureReason=nullptr)
Return true if the given indirect call site can be made to call Callee.
LLVM_ABI Constant * getInitialValueOfAllocation(const Value *V, const TargetLibraryInfo *TLI, Type *Ty)
If this is a call to an allocation function that initializes memory to a fixed value,...
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
auto pred_end(const MachineBasicBlock *BB)
unsigned getPointerAddressSpace(const Type *T)
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
FunctionAddr VTableAddr uintptr_t uintptr_t Int32Ty
auto successors(const MachineBasicBlock *BB)
LLVM_ABI bool isRemovableAlloc(const CallBase *V, const TargetLibraryInfo *TLI)
Return true if this is a call to an allocation function that does not have side effects that we are r...
APFloat abs(APFloat X)
Returns the absolute value of the argument.
LLVM_ABI raw_fd_ostream & outs()
This returns a reference to a raw_fd_ostream for standard output.
LLVM_ABI Value * getAllocAlignment(const CallBase *V, const TargetLibraryInfo *TLI)
Gets the alignment argument for an aligned_alloc-like function, using either built-in knowledge based...
auto dyn_cast_if_present(const Y &Val)
dyn_cast_if_present<X> - Functionally identical to dyn_cast, except that a null (or none in the case ...
LLVM_ABI Value * simplifyInstructionWithOperands(Instruction *I, ArrayRef< Value * > NewOps, const SimplifyQuery &Q)
Like simplifyInstruction but the operands of I are replaced with NewOps.
Value * GetPointerBaseWithConstantOffset(Value *Ptr, int64_t &Offset, const DataLayout &DL, bool AllowNonInbounds=true)
Analyze the specified pointer to see if it can be expressed as a base pointer plus a constant offset.
scc_iterator< T > scc_begin(const T &G)
Construct the begin iterator for a deduced graph type T.
LLVM_ABI bool isNoAliasCall(const Value *V)
Return true if this pointer is returned by a noalias function.
MemoryEffectsBase< IRMemLocation > MemoryEffects
Summary of how a function affects memory in the program.
raw_ostream & WriteGraph(raw_ostream &O, const GraphType &G, bool ShortNames=false, const Twine &Title="")
LLVM_ABI bool isSafeToSpeculativelyExecute(const Instruction *I, const Instruction *CtxI=nullptr, AssumptionCache *AC=nullptr, const DominatorTree *DT=nullptr, const TargetLibraryInfo *TLI=nullptr, bool UseVariableInfo=true, bool IgnoreUBImplyingAttrs=true)
Return true if the instruction does not have any effects besides calculating the result and does not ...
bool isa_and_nonnull(const Y &Val)
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
LLVM_ABI ConstantRange getConstantRangeFromMetadata(const MDNode &RangeMD)
Parse out a conservative ConstantRange from !range metadata.
auto map_range(ContainerTy &&C, FuncTy F)
const Value * getPointerOperand(const Value *V)
A helper function that returns the pointer operand of a load, store or GEP instruction.
LLVM_ABI Value * simplifyInstruction(Instruction *I, const SimplifyQuery &Q)
See if we can compute a simplified version of this instruction.
auto dyn_cast_or_null(const Y &Val)
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
unsigned Log2_32(uint32_t Value)
Return the floor log base 2 of the specified value, -1 if the value is zero.
LLVM_ABI bool isIntrinsicReturningPointerAliasingArgumentWithoutCapturing(const CallBase *Call, bool MustPreserveNullness)
{launder,strip}.invariant.group returns pointer that aliases its argument, and it only captures point...
constexpr bool isPowerOf2_32(uint32_t Value)
Return true if the argument is a power of two > 0.
PotentialValuesState< std::pair< AA::ValueAndContext, AA::ValueScope > > PotentialLLVMValuesState
void sort(IteratorTy Start, IteratorTy End)
LLVM_ABI bool NullPointerIsDefined(const Function *F, unsigned AS=0)
Check whether null pointer dereferencing is considered undefined behavior for a given function or an ...
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool isPointerTy(const Type *T)
LLVM_ABI bool wouldInstructionBeTriviallyDead(const Instruction *I, const TargetLibraryInfo *TLI=nullptr)
Return true if the result produced by the instruction would have no side effects if it was not used.
bool set_union(S1Ty &S1, const S2Ty &S2)
set_union(A, B) - Compute A := A u B, return whether A changed.
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
LLVM_ABI CallBase & promoteCall(CallBase &CB, Function *Callee, CastInst **RetBitCast=nullptr)
Promote the given indirect call site to unconditionally call Callee.
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
LLVM_ABI bool hasAssumption(const Function &F, const KnownAssumptionString &AssumptionStr)
Return true if F has the assumption AssumptionStr attached.
LLVM_ABI RetainedKnowledge getKnowledgeFromUse(const Use *U, ArrayRef< Attribute::AttrKind > AttrKinds)
Return a valid Knowledge associated to the Use U if its Attribute kind is in AttrKinds.
@ Success
The lock was released successfully.
LLVM_ATTRIBUTE_VISIBILITY_DEFAULT AnalysisKey InnerAnalysisManagerProxy< AnalysisManagerT, IRUnitT, ExtraArgTs... >::Key
LLVM_ABI bool isKnownNonZero(const Value *V, const SimplifyQuery &Q, unsigned Depth=0)
Return true if the given value is known to be non-zero when defined.
AtomicOrdering
Atomic ordering for LLVM's memory model.
PotentialValuesState< APInt > PotentialConstantIntValuesState
std::string join(IteratorT Begin, IteratorT End, StringRef Separator)
Joins the strings in the range [Begin, End), adding Separator between the elements.
IRBuilder(LLVMContext &, FolderTy, InserterTy, MDNode *, ArrayRef< OperandBundleDef >) -> IRBuilder< FolderTy, InserterTy >
InterleavedRange< Range > interleaved_array(const Range &R, StringRef Separator=", ")
Output range R as an array of interleaved elements.
ChangeStatus clampStateAndIndicateChange< DerefState >(DerefState &S, const DerefState &R)
void RemapInstruction(Instruction *I, ValueToValueMapTy &VM, RemapFlags Flags=RF_None, ValueMapTypeRemapper *TypeMapper=nullptr, ValueMaterializer *Materializer=nullptr, const MetadataPredicate *IdentityMD=nullptr)
Convert the instruction operands from referencing the current values into those specified by VM.
DWARFExpression::Operation Op
LLVM_ABI bool isGuaranteedNotToBeUndefOrPoison(const Value *V, AssumptionCache *AC=nullptr, const Instruction *CtxI=nullptr, const DominatorTree *DT=nullptr, unsigned Depth=0)
Return true if this function can prove that V does not have undef bits and is never poison.
ArrayRef(const T &OneElt) -> ArrayRef< T >
LLVM_ABI Value * getFreedOperand(const CallBase *CB, const TargetLibraryInfo *TLI)
If this if a call to a free function, return the freed operand.
ChangeStatus clampStateAndIndicateChange(StateType &S, const StateType &R)
Helper function to clamp a state S of type StateType with the information in R and indicate/return if...
constexpr unsigned BitWidth
ValueMap< const Value *, WeakTrackingVH > ValueToValueMapTy
auto pred_begin(const MachineBasicBlock *BB)
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
iterator_range< pointer_iterator< WrappedIteratorT > > make_pointer_range(RangeT &&Range)
LLVM_ABI std::optional< APInt > getAllocSize(const CallBase *CB, const TargetLibraryInfo *TLI, function_ref< const Value *(const Value *)> Mapper=[](const Value *V) { return V;})
Return the size of the requested allocation.
LLVM_ABI DenseSet< StringRef > getAssumptions(const Function &F)
Return the set of all assumptions for the function F.
Align assumeAligned(uint64_t Value)
Treats the value 0 as a 1, so Align is always at least 1.
LLVM_ABI Instruction * SplitBlockAndInsertIfThen(Value *Cond, BasicBlock::iterator SplitBefore, bool Unreachable, MDNode *BranchWeights=nullptr, DomTreeUpdater *DTU=nullptr, LoopInfo *LI=nullptr, BasicBlock *ThenBlock=nullptr)
Split the containing block at the specified instruction - everything before SplitBefore stays in the ...
@ OPTIONAL
The target may be valid if the source is not.
@ NONE
Do not track a dependence between source and target.
@ REQUIRED
The target cannot be valid if the source is not.
LLVM_ABI UseCaptureInfo DetermineUseCaptureKind(const Use &U, const Value *Base)
Determine what kind of capture behaviour U may exhibit.
LLVM_ABI Value * simplifyCmpInst(CmpPredicate Predicate, Value *LHS, Value *RHS, const SimplifyQuery &Q)
Given operands for a CmpInst, fold the result or return null.
LLVM_ABI bool mayContainIrreducibleControl(const Function &F, const LoopInfo *LI)
BumpPtrAllocatorImpl<> BumpPtrAllocator
The standard BumpPtrAllocator which just uses the default template parameters.
T bit_floor(T Value)
Returns the largest integral power of two no greater than Value if Value is nonzero.
LLVM_ABI const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=MaxLookupSearchDepth)
This method strips off any GEP address adjustments, pointer casts or llvm.threadlocal....
bool capturesNothing(CaptureComponents CC)
LLVM_ABI bool isIdentifiedObject(const Value *V)
Return true if this pointer refers to a distinct and identifiable object.
constexpr StringRef AssumptionAttrKey
The key we use for assumption attributes.
constexpr bool isCallableCC(CallingConv::ID CC)
GenericCycleInfo< SSAContext > CycleInfo
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
A type to track pointer/struct usage and accesses for AAPointerInfo.
bool forallInterferingAccesses(AA::RangeTy Range, F CB) const
See AAPointerInfo::forallInterferingAccesses.
AAPointerInfo::const_bin_iterator end() const
ChangeStatus addAccess(Attributor &A, const AAPointerInfo::RangeList &Ranges, Instruction &I, std::optional< Value * > Content, AAPointerInfo::AccessKind Kind, Type *Ty, Instruction *RemoteI=nullptr)
Add a new Access to the state at offset Offset and with size Size.
DenseMap< const Instruction *, SmallVector< unsigned > > RemoteIMap
AAPointerInfo::const_bin_iterator begin() const
AAPointerInfo::OffsetInfo ReturnedOffsets
Flag to determine if the underlying pointer is reaching a return statement in the associated function...
State & operator=(State &&R)
State(State &&SIS)=default
const AAPointerInfo::Access & getAccess(unsigned Index) const
SmallVector< AAPointerInfo::Access > AccessList
bool isAtFixpoint() const override
See AbstractState::isAtFixpoint().
bool forallInterferingAccesses(Instruction &I, F CB, AA::RangeTy &Range) const
See AAPointerInfo::forallInterferingAccesses.
static State getWorstState(const State &SIS)
Return the worst possible representable state.
int64_t numOffsetBins() const
AAPointerInfo::OffsetBinsTy OffsetBins
ChangeStatus indicateOptimisticFixpoint() override
See AbstractState::indicateOptimisticFixpoint().
State & operator=(const State &R)
ChangeStatus indicatePessimisticFixpoint() override
See AbstractState::indicatePessimisticFixpoint().
const State & getAssumed() const
static State getBestState(const State &SIS)
Return the best possible representable state.
bool isValidState() const override
See AbstractState::isValidState().
----------------—AAIntraFnReachability Attribute-----------------------—
ReachabilityQueryInfo(const ReachabilityQueryInfo &RQI)
unsigned Hash
Precomputed hash for this RQI.
const Instruction * From
Start here,.
Reachable Result
and remember if it worked:
ReachabilityQueryInfo(const Instruction *From, const ToTy *To)
ReachabilityQueryInfo(Attributor &A, const Instruction &From, const ToTy &To, const AA::InstExclusionSetTy *ES, bool MakeUnique)
Constructor replacement to ensure unique and stable sets are used for the cache.
const ToTy * To
reach this place,
const AA::InstExclusionSetTy * ExclusionSet
without going through any of these instructions,
unsigned computeHashValue() const
An abstract interface for address space information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all align attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
Align getKnownAlign() const
Return known alignment.
static LLVM_ABI const char ID
An abstract attribute for getting assumption information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract state for querying live call edges.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for specializing "dynamic" components of "denormal-fp-math" and "denormal-fp-ma...
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all dereferenceable attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for llvm::GlobalValue information interference.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for indirect call information interference.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface to track if a value leaves it's defining function instance.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for computing reachability between functions.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool canReach(Attributor &A, const Function &Fn) const
If the function represented by this possition can reach Fn.
virtual bool instructionCanReach(Attributor &A, const Instruction &Inst, const Function &Fn, const AA::InstExclusionSetTy *ExclusionSet=nullptr) const =0
Can Inst reach Fn.
An abstract interface to determine reachability of point A to B.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for identifying pointers from which loads can be marked invariant.
static LLVM_ABI const char ID
Unique ID (due to the unique address).
An abstract interface for liveness abstract attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for memory access kind related attributes (readnone/readonly/writeonly).
bool isAssumedReadOnly() const
Return true if we assume that the underlying value is not accessed (=written) in its respective scope...
bool isKnownReadNone() const
Return true if we know that the underlying value is not read or accessed in its respective scope.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool isAssumedReadNone() const
Return true if we assume that the underlying value is not read or accessed in its respective scope.
An abstract interface for all memory location attributes (readnone/argmemonly/inaccessiblememonly/ina...
static LLVM_ABI std::string getMemoryLocationsAsStr(MemoryLocationsKind MLK)
Return the locations encoded by MLK as a readable string.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
StateType::base_t MemoryLocationsKind
An abstract interface for all nonnull attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for potential address space information.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all noalias attributes.
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all nocapture attributes.
@ NO_CAPTURE_MAYBE_RETURNED
If we do not capture the value in memory or through integers we can only communicate it back as a der...
@ NO_CAPTURE
If we do not capture the value in memory, through integers, or as a derived pointer we know it is not...
static LLVM_ABI const char ID
Unique ID (due to the unique address)
bool isAssumedNoCaptureMaybeReturned() const
Return true if we assume that the underlying value is not captured in its respective scope but we all...
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI void determineFunctionCaptureCapabilities(const IRPosition &IRP, const Function &F, BitIntegerState &State)
Update State according to the capture capabilities of F for position IRP.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An AbstractAttribute for nofree.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for norecurse.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An AbstractAttribute for noreturn.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isAlignedBarrier(const CallBase &CB, bool ExecutedAligned)
Helper function to determine if CB is an aligned (GPU) barrier.
static LLVM_ABI bool isNonRelaxedAtomic(const Instruction *I)
Helper function used to determine whether an instruction is non-relaxed atomic.
static LLVM_ABI bool isNoSyncIntrinsic(const Instruction *I)
Helper function specific for intrinsics which are potentially volatile.
An abstract interface for all noundef attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See IRAttribute::isImpliedByIR.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract Attribute for determining the necessity of the convergent attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for all nonnull attributes.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI bool isImpliedByIR(Attributor &A, const IRPosition &IRP, Attribute::AttrKind ImpliedAttributeKind, bool IgnoreSubsumingPositions=false)
See AbstractAttribute::isImpliedByIR(...).
A helper containing a list of offsets computed for a Use.
A container for a list of ranges.
static void set_difference(const RangeList &L, const RangeList &R, RangeList &D)
Copy ranges from L that are not in R, into D.
An abstract interface for struct information.
virtual bool reachesReturn() const =0
OffsetBinsTy::const_iterator const_bin_iterator
virtual const_bin_iterator begin() const =0
DenseMap< AA::RangeTy, SmallSet< unsigned, 4 > > OffsetBinsTy
static LLVM_ABI const char ID
Unique ID (due to the unique address)
virtual int64_t numOffsetBins() const =0
An abstract interface for potential values analysis.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI const char ID
Unique ID (due to the unique address)
static LLVM_ABI Value * getSingleValue(Attributor &A, const AbstractAttribute &AA, const IRPosition &IRP, SmallVectorImpl< AA::ValueAndContext > &Values)
Extract the single value in Values if any.
An abstract interface for privatizability.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for undefined behavior.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for getting all assumption underlying objects.
virtual bool forallUnderlyingObjects(function_ref< bool(Value &)> Pred, AA::ValueScope Scope=AA::Interprocedural) const =0
Check Pred on all underlying objects in Scope collected so far.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for range value analysis.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract interface for value simplify abstract attribute.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
An abstract attribute for willreturn.
static LLVM_ABI const char ID
Unique ID (due to the unique address)
Helper to represent an access offset and size, with logic to deal with uncertainty and check for over...
static constexpr int64_t Unknown
static RangeTy getUnknown()
const Instruction * getCtxI() const
Base struct for all "concrete attribute" deductions.
void print(raw_ostream &OS) const
Helper functions, for debug purposes only.
virtual StateType & getState()=0
Return the internal abstract state for inspection.
An interface to query the internal state of an abstract attribute.
virtual bool isAtFixpoint() const =0
Return if this abstract state is fixed, thus does not need to be updated if information changes as it...
virtual bool isValidState() const =0
Return if this abstract state is in a valid state.
Helper for AA::PointerInfo::Access DenseMap/Set usage ignoring everythign but the instruction.
static unsigned getHashValue(const Access &A)
AAPointerInfo::Access Access
static Access getTombstoneKey()
DenseMapInfo< Instruction * > Base
static bool isEqual(const Access &LHS, const Access &RHS)
static Access getEmptyKey()
constexpr uint64_t value() const
This is a hole in the type system and should not be abused.
std::function< void( const ArgumentReplacementInfo &, Function &, Function::arg_iterator)> CalleeRepairCBTy
Callee repair callback type.
const Argument & getReplacedArg() const
std::function< void(const ArgumentReplacementInfo &, AbstractCallSite, SmallVectorImpl< Value * > &)> ACSRepairCBTy
Abstract call site (ACS) repair callback type.
The fixpoint analysis framework that orchestrates the attribute deduction.
std::function< std::optional< Value * >( const IRPosition &, const AbstractAttribute *, bool &)> SimplifictionCallbackTy
Register CB as a simplification callback.
Specialization of the integer state for a bit-wise encoding.
BitIntegerState & addKnownBits(base_t Bits)
Add the bits in BitsEncoding to the "known bits".
Simple wrapper for a single bit (boolean) state.
static constexpr DenormalMode getDefault()
Return the assumed default mode for a function without denormal-fp-math.
static constexpr DenormalMode getInvalid()
static Access getTombstoneKey()
static unsigned getHashValue(const Access &A)
static Access getEmptyKey()
AAPointerInfo::Access Access
static bool isEqual(const Access &LHS, const Access &RHS)
static bool isEqual(const AA::RangeTy &A, const AA::RangeTy B)
static AA::RangeTy getTombstoneKey()
static unsigned getHashValue(const AA::RangeTy &Range)
static AA::RangeTy getEmptyKey()
static ReachabilityQueryInfo< ToTy > EmptyKey
static ReachabilityQueryInfo< ToTy > TombstoneKey
static ReachabilityQueryInfo< ToTy > * getEmptyKey()
DenseMapInfo< std::pair< const Instruction *, const ToTy * > > PairDMI
static ReachabilityQueryInfo< ToTy > * getTombstoneKey()
static bool isEqual(const ReachabilityQueryInfo< ToTy > *LHS, const ReachabilityQueryInfo< ToTy > *RHS)
DenseMapInfo< const AA::InstExclusionSetTy * > InstSetDMI
static unsigned getHashValue(const ReachabilityQueryInfo< ToTy > *RQI)
An information struct used to provide DenseMap with the various necessary components for a given valu...
State for dereferenceable attribute.
IncIntegerState DerefBytesState
State representing for dereferenceable bytes.
ChangeStatus manifest(Attributor &A) override
See AbstractAttribute::manifest(...).
Helper to describe and deal with positions in the LLVM-IR.
Function * getAssociatedFunction() const
Return the associated function, if any.
static const IRPosition callsite_returned(const CallBase &CB)
Create a position describing the returned value of CB.
static const IRPosition returned(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the returned value of F.
LLVM_ABI Argument * getAssociatedArgument() const
Return the associated argument, if any.
static const IRPosition value(const Value &V, const CallBaseContext *CBContext=nullptr)
Create a position describing the value of V.
int getCalleeArgNo() const
Return the callee argument number of the associated value if it is an argument or call site argument,...
static const IRPosition inst(const Instruction &I, const CallBaseContext *CBContext=nullptr)
Create a position describing the instruction I.
static const IRPosition callsite_argument(const CallBase &CB, unsigned ArgNo)
Create a position describing the argument of CB at position ArgNo.
@ IRP_ARGUMENT
An attribute for a function argument.
@ IRP_RETURNED
An attribute for the function return value.
@ IRP_CALL_SITE
An attribute for a call site (function scope).
@ IRP_CALL_SITE_RETURNED
An attribute for a call site return value.
@ IRP_FUNCTION
An attribute for a function (scope).
@ IRP_CALL_SITE_ARGUMENT
An attribute for a call site argument.
@ IRP_INVALID
An invalid position.
Instruction * getCtxI() const
Return the context instruction, if any.
static const IRPosition argument(const Argument &Arg, const CallBaseContext *CBContext=nullptr)
Create a position describing the argument Arg.
Type * getAssociatedType() const
Return the type this abstract attribute is associated with.
static const IRPosition function(const Function &F, const CallBaseContext *CBContext=nullptr)
Create a position describing the function scope of F.
const CallBaseContext * getCallBaseContext() const
Get the call base context from the position.
Value & getAssociatedValue() const
Return the value this abstract attribute is associated with.
Value & getAnchorValue() const
Return the value this abstract attribute is anchored with.
int getCallSiteArgNo() const
Return the call site argument number of the associated value if it is an argument or call site argume...
static const IRPosition function_scope(const IRPosition &IRP, const CallBaseContext *CBContext=nullptr)
Create a position with function scope matching the "context" of IRP.
Kind getPositionKind() const
Return the associated position kind.
bool isArgumentPosition() const
Return true if the position is an argument or call site argument.
static const IRPosition callsite_function(const CallBase &CB)
Create a position describing the function scope of CB.
Function * getAnchorScope() const
Return the Function surrounding the anchor value.
ConstantRange getKnown() const
Return the known state encoding.
ConstantRange getAssumed() const
Return the assumed state encoding.
base_t getAssumed() const
Return the assumed state encoding.
static constexpr base_t getWorstState()
Helper that allows to insert a new assumption string in the known assumption set by creating a (stati...
FPClassTest KnownFPClasses
Floating-point classes the value could be one of.
A "must be executed context" for a given program point PP is the set of instructions,...
iterator & end()
Return an universal end iterator.
bool findInContextOf(const Instruction *I, const Instruction *PP)
Helper to look for I in the context of PP.
iterator & begin(const Instruction *PP)
Return an iterator to explore the context around PP.
bool checkForAllContext(const Instruction *PP, function_ref< bool(const Instruction *)> Pred)
}
static unsigned MaxPotentialValues
Helper to tie a abstract state implementation to an abstract attribute.
StateType & getState() override
See AbstractAttribute::getState(...).
bool isPassthrough() const
LLVM_ABI bool unionAssumed(std::optional< Value * > Other)
Merge Other into the currently assumed simplified value.
std::optional< Value * > SimplifiedAssociatedValue
An assumed simplified value.
Type * Ty
The type of the original value.