64#define DEBUG_TYPE "basicaa"
78STATISTIC(SearchLimitReached,
"Number of times the limit to "
79 "decompose GEPs is reached");
80STATISTIC(SearchTimes,
"Number of times a GEP is decomposed");
109 bool RoundToAlign =
false) {
124 bool NullIsValidLoc) {
156 std::optional<TypeSize> ObjectSize =
getObjectSize(V,
DL, TLI, NullIsValidLoc,
168 bool NullIsValidLoc) {
173 bool CanBeNull, CanBeFreed;
175 V.getPointerDereferenceableBytes(
DL, CanBeNull, CanBeFreed);
176 DerefBytes = (CanBeNull && NullIsValidLoc) ? 0 : DerefBytes;
187 std::optional<TypeSize> ObjectSize =
189 return ObjectSize && *ObjectSize ==
Size;
213 return Succs.
empty() ||
222 auto Iter = EarliestEscapes.insert({Object,
nullptr});
227 if (EarliestCapture) {
228 auto Ins = Inst2Obj.insert({EarliestCapture, {}});
229 Ins.first->second.push_back(Object);
231 Iter.first->second = EarliestCapture;
235 if (!Iter.first->second)
242 if (
I == Iter.first->second) {
252 auto Iter = Inst2Obj.find(
I);
253 if (Iter != Inst2Obj.end()) {
254 for (
const Value *Obj : Iter->second)
255 EarliestEscapes.erase(Obj);
268 unsigned ZExtBits = 0;
269 unsigned SExtBits = 0;
270 unsigned TruncBits = 0;
272 bool IsNonNegative =
false;
274 explicit CastedValue(
const Value *V) : V(V) {}
275 explicit CastedValue(
const Value *V,
unsigned ZExtBits,
unsigned SExtBits,
276 unsigned TruncBits,
bool IsNonNegative)
277 : V(V), ZExtBits(ZExtBits), SExtBits(SExtBits), TruncBits(TruncBits),
278 IsNonNegative(IsNonNegative) {}
281 return V->getType()->getPrimitiveSizeInBits() - TruncBits + ZExtBits +
285 CastedValue withValue(
const Value *NewV,
bool PreserveNonNeg)
const {
286 return CastedValue(NewV, ZExtBits, SExtBits, TruncBits,
287 IsNonNegative && PreserveNonNeg);
291 CastedValue withZExtOfValue(
const Value *NewV,
bool ZExtNonNegative)
const {
292 unsigned ExtendBy =
V->getType()->getPrimitiveSizeInBits() -
294 if (ExtendBy <= TruncBits)
297 return CastedValue(NewV, ZExtBits, SExtBits, TruncBits - ExtendBy,
301 ExtendBy -= TruncBits;
306 return CastedValue(NewV, ZExtBits + SExtBits + ExtendBy, 0, 0,
311 CastedValue withSExtOfValue(
const Value *NewV)
const {
312 unsigned ExtendBy =
V->getType()->getPrimitiveSizeInBits() -
314 if (ExtendBy <= TruncBits)
317 return CastedValue(NewV, ZExtBits, SExtBits, TruncBits - ExtendBy,
321 ExtendBy -= TruncBits;
324 return CastedValue(NewV, ZExtBits, SExtBits + ExtendBy, 0, IsNonNegative);
328 assert(
N.getBitWidth() ==
V->getType()->getPrimitiveSizeInBits() &&
329 "Incompatible bit width");
330 if (TruncBits)
N =
N.trunc(
N.getBitWidth() - TruncBits);
331 if (SExtBits)
N =
N.sext(
N.getBitWidth() + SExtBits);
332 if (ZExtBits)
N =
N.zext(
N.getBitWidth() + ZExtBits);
337 assert(
N.getBitWidth() ==
V->getType()->getPrimitiveSizeInBits() &&
338 "Incompatible bit width");
339 if (TruncBits)
N =
N.truncate(
N.getBitWidth() - TruncBits);
340 if (IsNonNegative && !
N.isAllNonNegative())
344 if (SExtBits)
N =
N.signExtend(
N.getBitWidth() + SExtBits);
345 if (ZExtBits)
N =
N.zeroExtend(
N.getBitWidth() + ZExtBits);
349 bool canDistributeOver(
bool NUW,
bool NSW)
const {
353 return (!ZExtBits || NUW) && (!SExtBits || NSW);
356 bool hasSameCastsAs(
const CastedValue &
Other)
const {
357 if (ZExtBits ==
Other.ZExtBits && SExtBits ==
Other.SExtBits &&
358 TruncBits ==
Other.TruncBits)
362 if (IsNonNegative ||
Other.IsNonNegative)
363 return (ZExtBits + SExtBits ==
Other.ZExtBits +
Other.SExtBits &&
364 TruncBits ==
Other.TruncBits);
370struct LinearExpression {
378 LinearExpression(
const CastedValue &Val,
const APInt &Scale,
380 : Val(Val), Scale(Scale),
Offset(
Offset), IsNSW(IsNSW) {}
382 LinearExpression(
const CastedValue &Val) : Val(Val), IsNSW(
true) {
383 unsigned BitWidth = Val.getBitWidth();
388 LinearExpression mul(
const APInt &
Other,
bool MulIsNSW)
const {
391 bool NSW = IsNSW && (
Other.isOne() || (MulIsNSW &&
Offset.isZero()));
406 if (
const ConstantInt *Const = dyn_cast<ConstantInt>(Val.V))
407 return LinearExpression(Val,
APInt(Val.getBitWidth(), 0),
408 Val.evaluateWith(Const->getValue()),
true);
410 if (
const BinaryOperator *BOp = dyn_cast<BinaryOperator>(Val.V)) {
411 if (
ConstantInt *RHSC = dyn_cast<ConstantInt>(BOp->getOperand(1))) {
412 APInt RHS = Val.evaluateWith(RHSC->getValue());
415 bool NUW =
true, NSW =
true;
416 if (isa<OverflowingBinaryOperator>(BOp)) {
417 NUW &= BOp->hasNoUnsignedWrap();
418 NSW &= BOp->hasNoSignedWrap();
420 if (!Val.canDistributeOver(NUW, NSW))
428 LinearExpression E(Val);
429 switch (BOp->getOpcode()) {
434 case Instruction::Or:
436 if (!cast<PossiblyDisjointInst>(BOp)->isDisjoint())
440 case Instruction::Add: {
447 case Instruction::Sub: {
454 case Instruction::Mul:
459 case Instruction::Shl:
465 if (
RHS.getLimitedValue() > Val.getBitWidth())
470 E.Offset <<=
RHS.getLimitedValue();
471 E.Scale <<=
RHS.getLimitedValue();
479 if (
const auto *ZExt = dyn_cast<ZExtInst>(Val.V))
481 Val.withZExtOfValue(ZExt->getOperand(0), ZExt->hasNonNeg()),
DL,
484 if (isa<SExtInst>(Val.V))
486 Val.withSExtOfValue(cast<CastInst>(Val.V)->getOperand(0)),
498 assert(IndexSize <=
Offset.getBitWidth() &&
"Invalid IndexSize!");
499 unsigned ShiftBits =
Offset.getBitWidth() - IndexSize;
500 if (ShiftBits != 0) {
502 Offset.ashrInPlace(ShiftBits);
509struct VariableGEPIndex {
524 bool hasNegatedScaleOf(
const VariableGEPIndex &
Other)
const {
525 if (IsNegated ==
Other.IsNegated)
526 return Scale == -
Other.Scale;
527 return Scale ==
Other.Scale;
535 OS <<
"(V=" << Val.V->getName()
536 <<
", zextbits=" << Val.ZExtBits
537 <<
", sextbits=" << Val.SExtBits
538 <<
", truncbits=" << Val.TruncBits
539 <<
", scale=" << Scale
541 <<
", negated=" << IsNegated <<
")";
590 const Instruction *CxtI = dyn_cast<Instruction>(V);
592 unsigned MaxIndexSize =
DL.getMaxIndexSizeInBits();
593 DecomposedGEP Decomposed;
594 Decomposed.Offset =
APInt(MaxIndexSize, 0);
600 if (
const GlobalAlias *GA = dyn_cast<GlobalAlias>(V)) {
601 if (!GA->isInterposable()) {
602 V = GA->getAliasee();
610 if (
Op->getOpcode() == Instruction::BitCast ||
611 Op->getOpcode() == Instruction::AddrSpaceCast) {
612 V =
Op->getOperand(0);
618 if (
const auto *
PHI = dyn_cast<PHINode>(V)) {
620 if (
PHI->getNumIncomingValues() == 1) {
621 V =
PHI->getIncomingValue(0);
624 }
else if (
const auto *Call = dyn_cast<CallBase>(V)) {
646 if (Decomposed.InBounds == std::nullopt)
649 Decomposed.InBounds =
false;
656 unsigned IndexSize =
DL.getIndexSizeInBits(AS);
658 bool GepHasConstantOffset =
true;
660 I != E; ++
I, ++GTI) {
665 unsigned FieldNo = cast<ConstantInt>(
Index)->getZExtValue();
669 Decomposed.Offset +=
DL.getStructLayout(STy)->getElementOffset(FieldNo);
686 CIdx->getValue().sextOrTrunc(MaxIndexSize);
696 GepHasConstantOffset =
false;
702 unsigned Width =
Index->getType()->getIntegerBitWidth();
703 unsigned SExtBits = IndexSize > Width ? IndexSize - Width : 0;
704 unsigned TruncBits = IndexSize < Width ? Width - IndexSize : 0;
706 CastedValue(
Index, 0, SExtBits, TruncBits, NonNeg), DL, 0, AC, DT);
711 Decomposed.Offset +=
LE.Offset.sext(MaxIndexSize);
712 APInt Scale =
LE.Scale.sext(MaxIndexSize);
718 for (
unsigned i = 0, e = Decomposed.VarIndices.size(); i != e; ++i) {
719 if ((Decomposed.VarIndices[i].Val.V ==
LE.Val.V ||
721 Decomposed.VarIndices[i].Val.hasSameCastsAs(
LE.Val)) {
722 Scale += Decomposed.VarIndices[i].Scale;
724 Decomposed.VarIndices.erase(Decomposed.VarIndices.begin() + i);
734 VariableGEPIndex
Entry = {
LE.Val, Scale, CxtI,
LE.IsNSW,
736 Decomposed.VarIndices.push_back(Entry);
741 if (GepHasConstantOffset)
746 }
while (--MaxLookup);
750 SearchLimitReached++;
757 assert(Visited.empty() &&
"Visited must be cleared after use!");
760 unsigned MaxLookup = 8;
767 if (!Visited.insert(V).second)
771 if (IgnoreLocals && isa<AllocaInst>(V))
780 if (
const Argument *Arg = dyn_cast<Argument>(V)) {
781 if (Arg->hasNoAliasAttr() && Arg->onlyReadsMemory()) {
792 if (!GV->isConstant())
798 if (
const SelectInst *SI = dyn_cast<SelectInst>(V)) {
806 if (
const PHINode *PN = dyn_cast<PHINode>(V)) {
808 if (PN->getNumIncomingValues() > MaxLookup)
816 }
while (!Worklist.
empty() && --MaxLookup);
819 if (!Worklist.
empty())
827 return II &&
II->getIntrinsicID() == IID;
833 MemoryEffects Min = Call->getAttributes().getMemoryEffects();
835 if (
const Function *F = dyn_cast<Function>(Call->getCalledOperand())) {
839 if (Call->hasReadingOperandBundles())
841 if (Call->hasClobberingOperandBundles())
852 switch (
F->getIntrinsicID()) {
853 case Intrinsic::experimental_guard:
854 case Intrinsic::experimental_deoptimize:
861 return F->getMemoryEffects();
866 if (Call->paramHasAttr(ArgIdx, Attribute::WriteOnly))
869 if (Call->paramHasAttr(ArgIdx, Attribute::ReadOnly))
872 if (Call->paramHasAttr(ArgIdx, Attribute::ReadNone))
880 if (
const Instruction *inst = dyn_cast<Instruction>(V)) {
881 if (!inst->getParent())
886 if (
const Argument *arg = dyn_cast<Argument>(V))
887 return arg->getParent();
897 return !F1 || !F2 || F1 == F2;
905 "BasicAliasAnalysis doesn't support interprocedural queries.");
906 return aliasCheck(LocA.
Ptr, LocA.
Size, LocB.
Ptr, LocB.
Size, AAQI, CtxI);
919 "AliasAnalysis query involving multiple functions!");
928 if (isa<AllocaInst>(Object))
929 if (
const CallInst *CI = dyn_cast<CallInst>(Call))
930 if (CI->isTailCall() &&
931 !CI->getAttributes().hasAttrSomewhere(Attribute::ByVal))
936 if (
auto *AI = dyn_cast<AllocaInst>(Object))
937 if (!AI->isStaticAlloca() &&
isIntrinsicCall(Call, Intrinsic::stackrestore))
945 if (!isa<Constant>(Object) && Call != Object &&
952 unsigned OperandNo = 0;
953 for (
auto CI = Call->data_operands_begin(), CE = Call->data_operands_end();
954 CI != CE; ++CI, ++OperandNo) {
955 if (!(*CI)->getType()->isPointerTy())
960 if (Call->doesNotAccessMemory(OperandNo))
973 if (Call->onlyReadsMemory(OperandNo)) {
978 if (Call->onlyWritesMemory(OperandNo)) {
1076 return (isa<AllocaInst>(V) || isa<GlobalVariable>(V));
1092 if (!isa<GEPOperator>(V2))
1105 DecomposedGEP DecompGEP1 = DecomposeGEPExpression(GEP1, DL, &AC, DT);
1106 DecomposedGEP DecompGEP2 = DecomposeGEPExpression(V2, DL, &AC, DT);
1109 if (DecompGEP1.Base == GEP1 && DecompGEP2.Base == V2)
1114 subtractDecomposedGEPs(DecompGEP1, DecompGEP2, AAQI);
1120 if (*DecompGEP1.InBounds && DecompGEP1.VarIndices.empty() &&
1122 DecompGEP1.Offset.sge(V2Size.
getValue()) &&
1126 if (isa<GEPOperator>(V2)) {
1128 if (*DecompGEP2.InBounds && DecompGEP1.VarIndices.empty() &&
1130 DecompGEP1.Offset.sle(-V1Size.
getValue()) &&
1137 if (DecompGEP1.Offset == 0 && DecompGEP1.VarIndices.empty())
1158 if (DecompGEP1.VarIndices.empty()) {
1163 const Value *RightPtr = GEP1;
1166 const bool Swapped =
Off.isNegative();
1185 if (
Off.ult(LSize)) {
1190 Off.ule(INT32_MAX) && (Off + VRightSize.
getValue()).ule(LSize)) {
1206 if (!Overflow &&
Off.uge(UpperRange))
1214 if (DecompGEP1.VarIndices.size() == 1 &&
1215 DecompGEP1.VarIndices[0].Val.TruncBits == 0 &&
1216 DecompGEP1.Offset.isZero() &&
1219 const VariableGEPIndex &ScalableVar = DecompGEP1.VarIndices[0];
1221 ScalableVar.IsNegated ? -ScalableVar.Scale : ScalableVar.Scale;
1226 bool Overflows = !DecompGEP1.VarIndices[0].IsNSW;
1252 for (
unsigned i = 0, e = DecompGEP1.VarIndices.size(); i != e; ++i) {
1253 const VariableGEPIndex &
Index = DecompGEP1.VarIndices[i];
1255 APInt ScaleForGCD = Scale;
1261 GCD = ScaleForGCD.
abs();
1266 true, &AC,
Index.CxtI);
1275 "Bit widths are normalized to MaxIndexSize");
1281 if (
Index.IsNegated)
1282 OffsetRange = OffsetRange.
sub(CR);
1284 OffsetRange = OffsetRange.
add(CR);
1293 APInt ModOffset = DecompGEP1.Offset.
srem(GCD);
1297 (GCD - ModOffset).uge(V1Size.
getValue()))
1312 std::optional<APInt> MinAbsVarIndex;
1313 if (DecompGEP1.VarIndices.size() == 1) {
1315 const VariableGEPIndex &Var = DecompGEP1.VarIndices[0];
1316 if (Var.Val.TruncBits == 0 &&
1320 auto MultiplyByScaleNoWrap = [](
const VariableGEPIndex &Var) {
1324 int ValOrigBW = Var.Val.V->getType()->getPrimitiveSizeInBits();
1328 int MaxScaleValueBW = Var.Val.getBitWidth() - ValOrigBW;
1329 if (MaxScaleValueBW <= 0)
1331 return Var.Scale.ule(
1336 if (MultiplyByScaleNoWrap(Var)) {
1338 MinAbsVarIndex = Var.Scale.abs();
1341 }
else if (DecompGEP1.VarIndices.size() == 2) {
1346 const VariableGEPIndex &Var0 = DecompGEP1.VarIndices[0];
1347 const VariableGEPIndex &Var1 = DecompGEP1.VarIndices[1];
1348 if (Var0.hasNegatedScaleOf(Var1) && Var0.Val.TruncBits == 0 &&
1352 MinAbsVarIndex = Var0.Scale.abs();
1355 if (MinAbsVarIndex) {
1357 APInt OffsetLo = DecompGEP1.Offset - *MinAbsVarIndex;
1358 APInt OffsetHi = DecompGEP1.Offset + *MinAbsVarIndex;
1365 if (constantOffsetHeuristic(DecompGEP1, V1Size, V2Size, &AC, DT, AAQI))
1394 if (
const SelectInst *SI2 = dyn_cast<SelectInst>(V2))
1395 if (isValueEqualInPotentialCycles(
SI->getCondition(), SI2->getCondition(),
1431 if (
const PHINode *PN2 = dyn_cast<PHINode>(V2))
1432 if (PN2->getParent() == PN->
getParent()) {
1433 std::optional<AliasResult> Alias;
1454 bool isRecursive =
false;
1455 auto CheckForRecPhi = [&](
Value *PV) {
1466 Value *OnePhi =
nullptr;
1472 if (isa<PHINode>(PV1)) {
1473 if (OnePhi && OnePhi != PV1) {
1484 if (CheckForRecPhi(PV1))
1487 if (UniqueSrc.
insert(PV1).second)
1491 if (OnePhi && UniqueSrc.
size() > 1)
1526 for (
unsigned i = 1, e = V1Srcs.
size(); i != e; ++i) {
1552 V2 =
V2->stripPointerCastsForAliasAnalysis();
1556 if (isa<UndefValue>(V1) || isa<UndefValue>(V2))
1565 if (isValueEqualInPotentialCycles(V1, V2, AAQI))
1605 O2, dyn_cast<Instruction>(O1),
true))
1608 O1, dyn_cast<Instruction>(O2),
true))
1617 TLI, NullIsValidLocation)) ||
1620 TLI, NullIsValidLocation)))
1630 if (OBU.
getTagName() ==
"separate_storage") {
1640 auto ValidAssumeForPtrContext = [&](
const Value *
Ptr) {
1645 if (
const Argument *PtrA = dyn_cast<Argument>(
Ptr)) {
1647 &*PtrA->
getParent()->getEntryBlock().begin();
1654 if ((O1 == HintO1 && O2 == HintO2) || (O1 == HintO2 && O2 == HintO1)) {
1660 ValidAssumeForPtrContext(V1) || ValidAssumeForPtrContext(V2)) {
1684 if (AAQI.
Depth >= 512)
1693 const bool Swapped = V1 >
V2;
1699 auto &
Entry = Pair.first->second;
1700 if (!
Entry.isDefinitive()) {
1705 if (
Entry.isAssumption())
1706 ++
Entry.NumAssumptionUses;
1717 aliasCheckRecursive(V1, V1Size, V2, V2Size, AAQI, O1, O2);
1721 auto &
Entry = It->second;
1724 bool AssumptionDisproven =
1726 if (AssumptionDisproven)
1733 Entry.Result.swap(Swapped);
1738 if (AssumptionDisproven)
1754 if (AAQI.
Depth == 1) {
1772 if (
const GEPOperator *GV1 = dyn_cast<GEPOperator>(V1)) {
1776 }
else if (
const GEPOperator *GV2 = dyn_cast<GEPOperator>(V2)) {
1783 if (
const PHINode *PN = dyn_cast<PHINode>(V1)) {
1787 }
else if (
const PHINode *PN = dyn_cast<PHINode>(V2)) {
1798 }
else if (
const SelectInst *S2 = dyn_cast<SelectInst>(V2)) {
1824bool BasicAAResult::isValueEqualInPotentialCycles(
const Value *V,
1835 const Instruction *Inst = dyn_cast<Instruction>(V);
1836 if (!Inst || Inst->
getParent()->isEntryBlock())
1843void BasicAAResult::subtractDecomposedGEPs(DecomposedGEP &DestGEP,
1844 const DecomposedGEP &SrcGEP,
1846 DestGEP.Offset -= SrcGEP.Offset;
1847 for (
const VariableGEPIndex &Src : SrcGEP.VarIndices) {
1851 for (
auto I :
enumerate(DestGEP.VarIndices)) {
1852 VariableGEPIndex &Dest =
I.value();
1853 if ((!isValueEqualInPotentialCycles(Dest.Val.V, Src.Val.V, AAQI) &&
1855 !Dest.Val.hasSameCastsAs(Src.Val))
1859 if (Dest.IsNegated) {
1860 Dest.Scale = -Dest.Scale;
1861 Dest.IsNegated =
false;
1867 if (Dest.Scale != Src.Scale) {
1868 Dest.Scale -= Src.Scale;
1871 DestGEP.VarIndices.erase(DestGEP.VarIndices.begin() +
I.index());
1879 VariableGEPIndex
Entry = {Src.Val, Src.Scale, Src.CxtI, Src.IsNSW,
1881 DestGEP.VarIndices.push_back(Entry);
1886bool BasicAAResult::constantOffsetHeuristic(
const DecomposedGEP &
GEP,
1892 if (
GEP.VarIndices.size() != 2 || !MaybeV1Size.
hasValue() ||
1899 const VariableGEPIndex &Var0 =
GEP.VarIndices[0], &Var1 =
GEP.VarIndices[1];
1901 if (Var0.Val.TruncBits != 0 || !Var0.Val.hasSameCastsAs(Var1.Val) ||
1902 !Var0.hasNegatedScaleOf(Var1) ||
1903 Var0.Val.V->getType() != Var1.Val.V->getType())
1910 LinearExpression E0 =
1912 LinearExpression E1 =
1914 if (E0.Scale != E1.Scale || !E0.Val.hasSameCastsAs(E1.Val) ||
1915 !isValueEqualInPotentialCycles(E0.Val.V, E1.Val.V, AAQI))
1925 APInt MinDiff = E0.Offset - E1.Offset, Wrapped = -MinDiff;
1927 APInt MinDiffBytes =
1928 MinDiff.
zextOrTrunc(Var0.Scale.getBitWidth()) * Var0.Scale.
abs();
1934 return MinDiffBytes.
uge(V1Size +
GEP.Offset.abs()) &&
1935 MinDiffBytes.
uge(V2Size +
GEP.Offset.abs());
1957void BasicAAWrapperPass::anchor() {}
1960 "Basic Alias Analysis (stateless AA impl)",
true,
true)
1972 auto &ACT = getAnalysis<AssumptionCacheTracker>();
1973 auto &TLIWP = getAnalysis<TargetLibraryInfoWrapperPass>();
1974 auto &DTWP = getAnalysis<DominatorTreeWrapperPass>();
1977 TLIWP.getTLI(
F), ACT.getAssumptionCache(
F),
1978 &DTWP.getDomTree()));
This file implements a class to represent arbitrary precision integral constant values and operations...
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
static void print(raw_ostream &Out, object::Archive::Kind Kind, T Val)
This file contains the simple types necessary to represent the attributes associated with functions a...
static cl::opt< bool > EnableRecPhiAnalysis("basic-aa-recphi", cl::Hidden, cl::init(true))
Enable analysis of recursive PHI nodes.
static const Function * getParent(const Value *V)
static bool isObjectSmallerThan(const Value *V, TypeSize Size, const DataLayout &DL, const TargetLibraryInfo &TLI, bool NullIsValidLoc)
Returns true if we can prove that the object specified by V is smaller than Size.
static bool isBaseOfObject(const Value *V)
Return true if we know V to the base address of the corresponding memory object.
static bool isObjectSize(const Value *V, TypeSize Size, const DataLayout &DL, const TargetLibraryInfo &TLI, bool NullIsValidLoc)
Returns true if we can prove that the object specified by V has size Size.
static cl::opt< bool > EnableSeparateStorageAnalysis("basic-aa-separate-storage", cl::Hidden, cl::init(true))
static bool notDifferentParent(const Value *O1, const Value *O2)
static LinearExpression GetLinearExpression(const CastedValue &Val, const DataLayout &DL, unsigned Depth, AssumptionCache *AC, DominatorTree *DT)
Analyzes the specified value as a linear expression: "A*V + B", where A and B are constant integers.
static bool isNotInCycle(const Instruction *I, const DominatorTree *DT, const LoopInfo *LI)
static bool areBothVScale(const Value *V1, const Value *V2)
Return true if both V1 and V2 are VScale.
static TypeSize getMinimalExtentFrom(const Value &V, const LocationSize &LocSize, const DataLayout &DL, bool NullIsValidLoc)
Return the minimal extent from V to the end of the underlying object, assuming the result is used in ...
static AliasResult MergeAliasResults(AliasResult A, AliasResult B)
static void adjustToIndexSize(APInt &Offset, unsigned IndexSize)
To ensure a pointer offset fits in an integer of size IndexSize (in bits) when that size is smaller t...
static bool isIntrinsicCall(const CallBase *Call, Intrinsic::ID IID)
static const unsigned MaxLookupSearchDepth
This is the interface for LLVM's primary stateless and local alias analysis.
block Block Frequency Analysis
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
This file contains the declarations for the subclasses of Constant, which represent the different fla...
std::optional< std::vector< StOtherPiece > > Other
This file provides utility analysis objects describing memory locations.
uint64_t IntrinsicInst * II
#define INITIALIZE_PASS_DEPENDENCY(depName)
#define INITIALIZE_PASS_END(passName, arg, name, cfg, analysis)
#define INITIALIZE_PASS_BEGIN(passName, arg, name, cfg, analysis)
place backedge safepoints impl
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file provides utility classes that use RAII to save and restore values.
This file defines the make_scope_exit function, which executes user-defined cleanup logic at scope ex...
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
static unsigned getBitWidth(Type *Ty, const DataLayout &DL)
Returns the bitwidth of the given scalar or pointer type.
This class stores info we want to provide to or retain within an alias query.
SmallVector< AAQueryInfo::LocPair, 4 > AssumptionBasedResults
Location pairs for which an assumption based result is currently stored.
unsigned Depth
Query depth used to distinguish recursive queries.
int NumAssumptionUses
How many active NoAlias assumption uses there are.
std::pair< AACacheLoc, AACacheLoc > LocPair
bool MayBeCrossIteration
Tracks whether the accesses may be on different cycle iterations.
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB)
The main low level interface to the alias analysis implementation.
MemoryEffects getMemoryEffects(const CallBase *Call)
Return the behavior of the given call site.
Class for arbitrary precision integers.
APInt umul_ov(const APInt &RHS, bool &Overflow) const
APInt zext(unsigned width) const
Zero extend to a new width.
APInt zextOrTrunc(unsigned width) const
Zero extend or truncate to width.
static APInt getMaxValue(unsigned numBits)
Gets maximum unsigned value of APInt for specific bit width.
APInt abs() const
Get the absolute value.
unsigned getBitWidth() const
Return the number of bits in the APInt.
bool isNegative() const
Determine sign of this APInt.
unsigned countr_zero() const
Count the number of trailing zero bits.
static APInt getSignedMinValue(unsigned numBits)
Gets minimum signed value of APInt for a specific bit width.
APInt srem(const APInt &RHS) const
Function for signed remainder operation.
APInt smul_ov(const APInt &RHS, bool &Overflow) const
bool isNonNegative() const
Determine if this APInt Value is non-negative (>= 0)
static APInt getZero(unsigned numBits)
Get the '0' value for the specified bit-width.
static APInt getOneBitSet(unsigned numBits, unsigned BitNo)
Return an APInt with exactly one bit set in the result.
bool uge(const APInt &RHS) const
Unsigned greater or equal comparison.
The possible results of an alias query.
void swap(bool DoSwap=true)
Helper for processing AliasResult for swapped memory location pairs.
@ MayAlias
The two locations may or may not alias.
@ NoAlias
The two locations do not alias at all.
@ PartialAlias
The two locations alias, but only due to a partial overlap.
@ MustAlias
The two locations precisely alias each other.
void setOffset(int32_t NewOffset)
API to communicate dependencies between analyses during invalidation.
bool invalidate(IRUnitT &IR, const PreservedAnalyses &PA)
Trigger the invalidation of some other analysis pass if not already handled and return whether it was...
A container for analyses that lazily runs them and caches their results.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
Represent the analysis usage information of a pass.
void setPreservesAll()
Set by analyses that do not transform their input at all.
AnalysisUsage & addRequiredTransitive()
This class represents an incoming formal argument to a Function.
This represents the llvm.assume intrinsic.
A function analysis which provides an AssumptionCache.
An immutable pass that tracks lazily created AssumptionCache objects.
A cache of @llvm.assume calls within a function.
MutableArrayRef< ResultElem > assumptionsFor(const Value *V)
Access the list of assumptions which affect this value.
This is the AA result object for the basic, local, and stateless alias analysis.
ModRefInfo getModRefInfo(const CallBase *Call, const MemoryLocation &Loc, AAQueryInfo &AAQI)
Checks to see if the specified callsite can clobber the specified memory object.
ModRefInfo getArgModRefInfo(const CallBase *Call, unsigned ArgIdx)
Get the location associated with a pointer argument of a callsite.
MemoryEffects getMemoryEffects(const CallBase *Call, AAQueryInfo &AAQI)
Returns the behavior when calling the given call site.
ModRefInfo getModRefInfoMask(const MemoryLocation &Loc, AAQueryInfo &AAQI, bool IgnoreLocals=false)
Returns a bitmask that should be unconditionally applied to the ModRef info of a memory location.
bool invalidate(Function &Fn, const PreservedAnalyses &PA, FunctionAnalysisManager::Invalidator &Inv)
Handle invalidation events in the new pass manager.
AliasResult alias(const MemoryLocation &LocA, const MemoryLocation &LocB, AAQueryInfo &AAQI, const Instruction *CtxI)
Legacy wrapper pass to provide the BasicAAResult object.
bool runOnFunction(Function &F) override
runOnFunction - Virtual method overriden by subclasses to do the per-function processing of the pass.
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - This function should be overriden by passes that need analysis information to do t...
BasicAAResult run(Function &F, FunctionAnalysisManager &AM)
LLVM Basic Block Representation.
const Function * getParent() const
Return the enclosing method, or null if none.
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
OperandBundleUse getOperandBundleAt(unsigned Index) const
Return the operand bundle at a specific index.
This class represents a function call, abstracting a target machine's calling convention.
This is the shared class of boolean and integer constants.
A constant pointer value that points to null.
This class represents a range of values.
ConstantRange add(const ConstantRange &Other) const
Return a new range representing the possible values resulting from an addition of a value in this ran...
static ConstantRange fromKnownBits(const KnownBits &Known, bool IsSigned)
Initialize a range based on a known bits constraint.
ConstantRange smul_fast(const ConstantRange &Other) const
Return range of possible values for a signed multiplication of this and Other.
bool isEmptySet() const
Return true if this set contains no members.
ConstantRange smul_sat(const ConstantRange &Other) const
Perform a signed saturating multiplication of two constant ranges.
APInt getUnsignedMax() const
Return the largest unsigned value contained in the ConstantRange.
ConstantRange intersectWith(const ConstantRange &CR, PreferredRangeType Type=Smallest) const
Return the range that results from the intersection of this range with another range.
APInt getSignedMax() const
Return the largest signed value contained in the ConstantRange.
uint32_t getBitWidth() const
Get the bit width of this ConstantRange.
ConstantRange sub(const ConstantRange &Other) const
Return a new range representing the possible values resulting from a subtraction of a value in this r...
ConstantRange sextOrTrunc(uint32_t BitWidth) const
Make this range have the bit width given by BitWidth.
This class represents an Operation in the Expression.
A parsed version of the target data layout string in and methods for querying it.
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&... Args)
bool erase(const KeyT &Val)
Analysis pass which computes a DominatorTree.
Legacy analysis pass which computes a DominatorTree.
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
bool isNotCapturedBefore(const Value *Object, const Instruction *I, bool OrAt) override
Check whether Object is not captured before instruction I.
void removeInstruction(Instruction *I)
FunctionPass class - This class is used to implement most global optimizations.
bool hasNoUnsignedSignedWrap() const
bool isInBounds() const
Test whether this is an inbounds GEP, as defined by LangRef.html.
Type * getSourceElementType() const
bool hasNoUnsignedWrap() const
unsigned getPointerAddressSpace() const
Method to return the address space of the pointer operand.
Module * getParent()
Get the module that this global value is contained inside of...
A wrapper class for inspecting calls to intrinsic functions.
bool mayBeBeforePointer() const
Whether accesses before the base pointer are possible.
static constexpr LocationSize beforeOrAfterPointer()
Any location before or after the base pointer (but still within the underlying object).
TypeSize getValue() const
static constexpr LocationSize afterPointer()
Any location after the base pointer (but still within the underlying object).
static MemoryEffectsBase readOnly()
Create MemoryEffectsBase that can read any memory.
static MemoryEffectsBase inaccessibleMemOnly(ModRefInfo MR=ModRefInfo::ModRef)
Create MemoryEffectsBase that can only access inaccessible memory.
static MemoryEffectsBase writeOnly()
Create MemoryEffectsBase that can write any memory.
Representation for a specific memory location.
LocationSize Size
The maximum size of the location, in address-units, or UnknownSize if the size is not known.
static MemoryLocation getBeforeOrAfter(const Value *Ptr, const AAMDNodes &AATags=AAMDNodes())
Return a location that may access any location before or after Ptr, while remaining within the underl...
const Value * Ptr
The address of the start of the location.
This is a utility class that provides an abstraction for the common functionality between Instruction...
op_range incoming_values()
BasicBlock * getIncomingBlock(unsigned i) const
Return incoming basic block number i.
Value * getIncomingValue(unsigned i) const
Return incoming value number x.
unsigned getNumIncomingValues() const
Return the number of incoming edges.
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
A set of analyses that are preserved following a run of a transformation pass.
This class represents the LLVM 'select' instruction.
bool isNotCapturedBefore(const Value *Object, const Instruction *I, bool OrAt) override
Check whether Object is not captured before instruction I.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Class to represent struct types.
Analysis pass providing the TargetLibraryInfo.
Provides information about what library functions are available for the current target.
static constexpr TypeSize getFixed(ScalarTy ExactSize)
bool isPointerTy() const
True if this is an instance of PointerType.
bool isSized(SmallPtrSetImpl< Type * > *Visited=nullptr) const
Return true if it makes sense to take the size of this type.
TypeSize getPrimitiveSizeInBits() const LLVM_READONLY
Return the basic size of this type if it is a primitive type.
A Use represents the edge between a Value definition and its users.
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
StringRef getName() const
Return a constant reference to the value's name.
const Value * stripPointerCastsForAliasAnalysis() const
Strip off pointer casts, all-zero GEPs, single-argument phi nodes and invariant group info.
constexpr ScalarTy getFixedValue() const
static constexpr bool isKnownLT(const FixedOrScalableQuantity &LHS, const FixedOrScalableQuantity &RHS)
constexpr bool isScalable() const
Returns whether the quantity is scaled by a runtime quantity (vscale).
constexpr ScalarTy getKnownMinValue() const
Returns the minimum value this quantity can represent.
StructType * getStructTypeOrNull() const
TypeSize getSequentialElementStride(const DataLayout &DL) const
const ParentTy * getParent() const
This class implements an extremely fast bulk output stream that can only output to a stream.
const APInt & umin(const APInt &A, const APInt &B)
Determine the smaller of two APInts considered to be unsigned.
APInt GreatestCommonDivisor(APInt A, APInt B)
Compute GCD of two unsigned APInt values.
bool match(Val *V, const Pattern &P)
VScaleVal_match m_VScale()
initializer< Ty > init(const Ty &Val)
This is an optimization pass for GlobalISel generic memory operations.
void dump(const SparseBitVector< ElementSize > &LHS, raw_ostream &out)
bool isValidAssumeForContext(const Instruction *I, const Instruction *CxtI, const DominatorTree *DT=nullptr, bool AllowEphemerals=false)
Return true if it is valid to use the assumptions provided by an assume intrinsic,...
detail::scope_exit< std::decay_t< Callable > > make_scope_exit(Callable &&F)
const Value * getArgumentAliasingToReturnedPointer(const CallBase *Call, bool MustPreserveNullness)
This function returns call pointer argument that is considered the same by aliasing rules.
auto enumerate(FirstRange &&First, RestRanges &&...Rest)
Given two or more input ranges, returns a new range whose values are tuples (A, B,...
bool isPotentiallyReachableFromMany(SmallVectorImpl< BasicBlock * > &Worklist, const BasicBlock *StopBB, const SmallPtrSetImpl< BasicBlock * > *ExclusionSet, const DominatorTree *DT=nullptr, const LoopInfo *LI=nullptr)
Determine whether there is at least one path from a block in 'Worklist' to 'StopBB' without passing t...
auto successors(const MachineBasicBlock *BB)
void append_range(Container &C, Range &&R)
Wrapper function to append range R to container C.
const Value * getUnderlyingObject(const Value *V, unsigned MaxLookup=6)
This method strips off any GEP address adjustments, pointer casts or llvm.threadlocal....
bool isNonEscapingLocalObject(const Value *V, SmallDenseMap< const Value *, bool, 8 > *IsCapturedCache=nullptr)
Returns true if the pointer is to a function-local object that never escapes from the function.
ConstantRange computeConstantRange(const Value *V, bool ForSigned, bool UseInstrInfo=true, AssumptionCache *AC=nullptr, const Instruction *CtxI=nullptr, const DominatorTree *DT=nullptr, unsigned Depth=0)
Determine the possible constant range of an integer or vector of integer value.
bool getObjectSize(const Value *Ptr, uint64_t &Size, const DataLayout &DL, const TargetLibraryInfo *TLI, ObjectSizeOpts Opts={})
Compute the size of the object pointed by Ptr.
bool isModSet(const ModRefInfo MRI)
bool NullPointerIsDefined(const Function *F, unsigned AS=0)
Check whether null pointer dereferencing is considered undefined behavior for a given function or an ...
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
ConstantRange getVScaleRange(const Function *F, unsigned BitWidth)
Determine the possible constant range of vscale with the given bit width, based on the vscale_range f...
FunctionPass * createBasicAAWrapperPass()
bool isMallocOrCallocLikeFn(const Value *V, const TargetLibraryInfo *TLI)
Tests if a value is a call or invoke to a library function that allocates memory similar to malloc or...
bool isKnownNonZero(const Value *V, const SimplifyQuery &Q, unsigned Depth=0)
Return true if the given value is known to be non-zero when defined.
ModRefInfo
Flags indicating whether a memory access modifies or references memory.
@ Ref
The access may reference the value stored in memory.
@ ModRef
The access may reference and may modify the value stored in memory.
@ Mod
The access may modify the value stored in memory.
@ NoModRef
The access neither references nor modifies the value stored in memory.
Instruction * FindEarliestCapture(const Value *V, Function &F, bool ReturnCaptures, bool StoreCaptures, const DominatorTree &DT, unsigned MaxUsesToExplore=0)
bool isKnownNonEqual(const Value *V1, const Value *V2, const DataLayout &DL, AssumptionCache *AC=nullptr, const Instruction *CxtI=nullptr, const DominatorTree *DT=nullptr, bool UseInstrInfo=true)
Return true if the given values are known to be non-equal when defined.
void initializeBasicAAWrapperPassPass(PassRegistry &)
void computeKnownBits(const Value *V, KnownBits &Known, const DataLayout &DL, unsigned Depth=0, AssumptionCache *AC=nullptr, const Instruction *CxtI=nullptr, const DominatorTree *DT=nullptr, bool UseInstrInfo=true)
Determine which bits of V are known to be either zero or one and return them in the KnownZero/KnownOn...
bool isModAndRefSet(const ModRefInfo MRI)
bool isIdentifiedFunctionLocal(const Value *V)
Return true if V is umabigously identified at the function-level.
constexpr unsigned BitWidth
bool isEscapeSource(const Value *V)
Returns true if the pointer is one which would have been considered an escape by isNonEscapingLocalOb...
gep_type_iterator gep_type_begin(const User *GEP)
bool isIdentifiedObject(const Value *V)
Return true if this pointer refers to a distinct and identifiable object.
bool isPotentiallyReachable(const Instruction *From, const Instruction *To, const SmallPtrSetImpl< BasicBlock * > *ExclusionSet=nullptr, const DominatorTree *DT=nullptr, const LoopInfo *LI=nullptr)
Determine whether instruction 'To' is reachable from 'From', without passing through any blocks in Ex...
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
std::optional< bool > InBounds
SmallVector< VariableGEPIndex, 4 > VarIndices
void print(raw_ostream &OS) const
static constexpr int Definitive
Cache entry is neither an assumption nor does it use a (non-definitive) assumption.
static constexpr int AssumptionBased
Cache entry is not an assumption itself, but may be using an assumption from higher up the stack.
A special type used by analysis passes to provide an address that identifies that particular analysis...
virtual bool isNotCapturedBefore(const Value *Object, const Instruction *I, bool OrAt)=0
Check whether Object is not captured before instruction I.
Various options to control the behavior of getObjectSize.
bool NullIsUnknownSize
If this is true, null pointers in address space 0 will be treated as though they can't be evaluated.
bool RoundToAlign
Whether to round the result up to the alignment of allocas, byval arguments, and global variables.
A lightweight accessor for an operand bundle meant to be passed around by value.
StringRef getTagName() const
Return the tag of this operand bundle as a string.
A utility class that uses RAII to save and restore the value of a variable.