29#include <unordered_map>
32#define DEBUG_TYPE "debug-ata"
34STATISTIC(NumDefsScanned,
"Number of dbg locs that get scanned for removal");
35STATISTIC(NumDefsRemoved,
"Number of dbg locs removed");
36STATISTIC(NumWedgesScanned,
"Number of dbg wedges scanned");
37STATISTIC(NumWedgesChanged,
"Number of dbg wedges changed");
41 cl::desc(
"Maximum num basic blocks before debug info dropped"),
56 return static_cast<VariableID>(Wrapped::getEmptyKey());
59 return static_cast<VariableID>(Wrapped::getTombstoneKey());
62 return Wrapped::getHashValue(
static_cast<unsigned>(Val));
77 std::unordered_map<const Instruction *, SmallVector<VarLocInfo>>
92 return Variables[
static_cast<unsigned>(
ID)];
98 auto R = VarLocsBeforeInst.find(Before);
99 if (R == VarLocsBeforeInst.end())
106 VarLocsBeforeInst[Before] = std::move(Wedge);
128 VarLocsBeforeInst[Before].emplace_back(VarLoc);
135 unsigned Counter = -1;
136 OS <<
"=== Variables ===\n";
143 OS <<
"[" << Counter <<
"] " << V.getVariable()->getName();
144 if (
auto F = V.getFragment())
145 OS <<
" bits [" <<
F->OffsetInBits <<
", "
146 <<
F->OffsetInBits +
F->SizeInBits <<
")";
147 if (
const auto *IA = V.getInlinedAt())
148 OS <<
" inlined-at " << *IA;
153 OS <<
"DEF Var=[" << (
unsigned)Loc.VariableID <<
"]"
154 <<
" Expr=" << *Loc.Expr <<
" Values=(";
155 for (
auto *Op : Loc.Values.location_ops()) {
156 errs() << Op->getName() <<
" ";
162 OS <<
"=== Single location vars ===\n";
169 OS <<
"=== In-line variable defs ===";
171 OS <<
"\n" << BB.getName() <<
":\n";
183 for (
const auto &VarLoc :
Builder.SingleLocVars)
186 SingleVarLocEnd = VarLocRecords.
size();
190 for (
auto &
P :
Builder.VarLocsBeforeInst) {
191 unsigned BlockStart = VarLocRecords.
size();
194 unsigned BlockEnd = VarLocRecords.
size();
196 if (BlockEnd != BlockStart)
197 VarLocsBeforeInst[
P.first] = {BlockStart, BlockEnd};
201 assert(Variables.empty() &&
"Expect clear before init");
204 Variables.reserve(
Builder.Variables.size() + 1);
205 Variables.push_back(
DebugVariable(
nullptr, std::nullopt,
nullptr));
206 Variables.append(
Builder.Variables.begin(),
Builder.Variables.end());
211 VarLocRecords.
clear();
212 VarLocsBeforeInst.clear();
220static std::pair<Value *, DIExpression *>
223 APInt OffsetInBytes(
DL.getTypeSizeInBits(Start->getType()),
false);
225 Start->stripAndAccumulateInBoundsConstantOffsets(
DL, OffsetInBytes);
228 Ops = {dwarf::DW_OP_plus_uconst, OffsetInBytes.
getZExtValue()};
239static std::optional<int64_t>
244 unsigned ExpectedDerefIdx = 0;
246 if (NumElements > 2 && Elements[0] == dwarf::DW_OP_plus_uconst) {
248 ExpectedDerefIdx = 2;
249 }
else if (NumElements > 3 && Elements[0] == dwarf::DW_OP_constu) {
250 ExpectedDerefIdx = 3;
251 if (Elements[2] == dwarf::DW_OP_plus)
253 else if (Elements[2] == dwarf::DW_OP_minus)
260 if (ExpectedDerefIdx >= NumElements)
265 if (Elements[ExpectedDerefIdx] != dwarf::DW_OP_deref)
269 if (NumElements == ExpectedDerefIdx + 1)
271 unsigned ExpectedFragFirstIdx = ExpectedDerefIdx + 1;
272 unsigned ExpectedFragFinalIdx = ExpectedFragFirstIdx + 2;
273 if (NumElements == ExpectedFragFinalIdx + 1 &&
309class MemLocFragmentFill {
319 OffsetInBitsTy, BaseAddress,
322 FragsInMemMap::Allocator IntervalMapAlloc;
335 unsigned OffsetInBits;
348 static bool intervalMapsAreEqual(
const FragsInMemMap &
A,
349 const FragsInMemMap &
B) {
350 auto AIt =
A.begin(), AEnd =
A.end();
351 auto BIt =
B.begin(), BEnd =
B.end();
352 for (; AIt != AEnd; ++AIt, ++BIt) {
355 if (AIt.start() != BIt.start() || AIt.stop() != BIt.stop())
364 static bool varFragMapsAreEqual(
const VarFragMap &
A,
const VarFragMap &
B) {
365 if (
A.size() !=
B.size())
367 for (
const auto &APair :
A) {
368 auto BIt =
B.find(APair.first);
371 if (!intervalMapsAreEqual(APair.second, BIt->second))
378 std::string
toString(
unsigned BaseID) {
380 return Bases[BaseID].getVariableLocationOp(0)->getName().str();
386 std::string
toString(FragsInMemMap::const_iterator It,
bool Newline =
true) {
388 std::stringstream S(
String);
390 S <<
"[" << It.start() <<
", " << It.stop()
393 S <<
"invalid iterator (end)";
400 FragsInMemMap meetFragments(
const FragsInMemMap &
A,
const FragsInMemMap &
B) {
401 FragsInMemMap
Result(IntervalMapAlloc);
402 for (
auto AIt =
A.begin(), AEnd =
A.end(); AIt != AEnd; ++AIt) {
409 if (!
B.overlaps(AIt.start(), AIt.stop()))
413 auto FirstOverlap =
B.find(AIt.start());
414 assert(FirstOverlap !=
B.end());
415 bool IntersectStart = FirstOverlap.start() < AIt.start();
417 <<
", IntersectStart: " << IntersectStart <<
"\n");
420 auto LastOverlap =
B.find(AIt.stop());
422 LastOverlap !=
B.end() && LastOverlap.start() < AIt.stop();
424 <<
", IntersectEnd: " << IntersectEnd <<
"\n");
427 if (IntersectStart && IntersectEnd && FirstOverlap == LastOverlap) {
435 if (*AIt && *AIt == *FirstOverlap)
436 Result.insert(AIt.start(), AIt.stop(), *AIt);
444 auto Next = FirstOverlap;
445 if (IntersectStart) {
448 if (*AIt && *AIt == *FirstOverlap)
449 Result.insert(AIt.start(), FirstOverlap.stop(), *AIt);
459 if (*AIt && *AIt == *LastOverlap)
460 Result.insert(LastOverlap.start(), AIt.stop(), *AIt);
469 while (Next !=
B.end() && Next.start() < AIt.stop() &&
470 Next.stop() <= AIt.stop()) {
472 <<
"- insert intersection of a and " <<
toString(Next));
473 if (*AIt && *AIt == *Next)
474 Result.insert(Next.start(), Next.stop(), *Next);
483 void meetVars(VarFragMap &
A,
const VarFragMap &
B) {
487 for (
auto It =
A.begin(), End =
A.end(); It != End; ++It) {
488 unsigned AVar = It->first;
489 FragsInMemMap &AFrags = It->second;
490 auto BIt =
B.find(AVar);
491 if (BIt ==
B.end()) {
496 <<
Aggregates[AVar].first->getName() <<
"\n");
497 AFrags = meetFragments(AFrags, BIt->second);
507 bool FirstMeet =
true;
515 if (!Visited.
count(Pred))
518 auto PredLiveOut = LiveOut.
find(Pred);
523 BBLiveIn = PredLiveOut->second;
528 meetVars(BBLiveIn, PredLiveOut->second);
534 if (BBLiveIn.size() == 0)
538 auto CurrentLiveInEntry = LiveIn.
find(&BB);
540 if (CurrentLiveInEntry == LiveIn.
end()) {
543 LiveIn[&BB] = std::move(BBLiveIn);
549 if (!varFragMapsAreEqual(BBLiveIn, CurrentLiveInEntry->second)) {
551 CurrentLiveInEntry->second = std::move(BBLiveIn);
560 unsigned StartBit,
unsigned EndBit,
unsigned Base,
562 assert(StartBit < EndBit &&
"Cannot create fragment of size <= 0");
567 Loc.OffsetInBits = StartBit;
568 Loc.SizeInBits = EndBit - StartBit;
569 assert(
Base &&
"Expected a non-zero ID for Base address");
572 BBInsertBeforeMap[&BB][&Before].push_back(Loc);
574 <<
" bits [" << StartBit <<
", " << EndBit <<
")\n");
578 VarFragMap &LiveSet) {
595 StartBit = Frag->OffsetInBits;
596 EndBit = StartBit + Frag->SizeInBits;
611 const unsigned Base =
612 DerefOffsetInBytes && *DerefOffsetInBytes * 8 == StartBit
616 << StartBit <<
", " << EndBit <<
"): " <<
toString(
Base)
623 auto FragIt = LiveSet.find(Var);
626 if (FragIt == LiveSet.end()) {
628 auto P = LiveSet.try_emplace(Var, FragsInMemMap(IntervalMapAlloc));
629 assert(
P.second &&
"Var already in map?");
631 P.first->second.insert(StartBit, EndBit,
Base);
636 FragsInMemMap &FragMap = FragIt->second;
639 if (!FragMap.overlaps(StartBit, EndBit)) {
641 FragMap.insert(StartBit, EndBit,
Base);
647 auto FirstOverlap = FragMap.find(StartBit);
648 assert(FirstOverlap != FragMap.end());
649 bool IntersectStart = FirstOverlap.start() < StartBit;
652 auto LastOverlap = FragMap.find(EndBit);
653 bool IntersectEnd = LastOverlap.valid() && LastOverlap.start() < EndBit;
656 if (IntersectStart && IntersectEnd && FirstOverlap == LastOverlap) {
657 LLVM_DEBUG(
dbgs() <<
"- Intersect single interval @ both ends\n");
665 auto EndBitOfOverlap = FirstOverlap.stop();
666 unsigned OverlapValue = FirstOverlap.value();
669 FirstOverlap.setStop(StartBit);
670 insertMemLoc(BB, Before, Var, FirstOverlap.start(), StartBit,
671 OverlapValue, VarLoc.
DL);
674 FragMap.insert(EndBit, EndBitOfOverlap, OverlapValue);
675 insertMemLoc(BB, Before, Var, EndBit, EndBitOfOverlap, OverlapValue,
679 FragMap.insert(StartBit, EndBit,
Base);
689 if (IntersectStart) {
692 FirstOverlap.setStop(StartBit);
693 insertMemLoc(BB, Before, Var, FirstOverlap.start(), StartBit,
694 *FirstOverlap, VarLoc.
DL);
703 LastOverlap.setStart(EndBit);
704 insertMemLoc(BB, Before, Var, EndBit, LastOverlap.stop(), *LastOverlap,
720 auto It = FirstOverlap;
723 while (It.valid() && It.start() >= StartBit && It.stop() <= EndBit) {
728 assert(!FragMap.overlaps(StartBit, EndBit));
730 FragMap.insert(StartBit, EndBit,
Base);
736 void process(
BasicBlock &BB, VarFragMap &LiveSet) {
737 BBInsertBeforeMap[&BB].
clear();
739 if (
const auto *Locs = FnVarLocs->
getWedge(&
I)) {
741 addDef(Loc,
I, *
I.getParent(), LiveSet);
750 : Fn(Fn), VarsWithStackSlot(VarsWithStackSlot) {}
776 this->FnVarLocs = FnVarLocs;
781 std::priority_queue<unsigned int, std::vector<unsigned int>,
782 std::greater<unsigned int>>
784 std::priority_queue<unsigned int, std::vector<unsigned int>,
785 std::greater<unsigned int>>
790 unsigned int RPONumber = 0;
791 for (
auto RI = RPOT.begin(), RE = RPOT.end(); RI != RE; ++RI) {
792 OrderToBB[RPONumber] = *RI;
793 BBToOrder[*RI] = RPONumber;
794 Worklist.push(RPONumber);
797 LiveIn.
init(RPONumber);
798 LiveOut.
init(RPONumber);
811 while (!Worklist.empty() || !Pending.empty()) {
817 while (!Worklist.empty()) {
821 bool InChanged = meet(*BB, Visited);
823 InChanged |= Visited.
insert(BB).second;
826 << BB->
getName() <<
" has new InLocs, process it\n");
830 VarFragMap LiveSet = LiveIn[BB];
833 process(*BB, LiveSet);
836 if (!varFragMapsAreEqual(LiveOut[BB], LiveSet)) {
838 <<
" has new OutLocs, add succs to worklist: [ ");
839 LiveOut[BB] = std::move(LiveSet);
841 if (OnPending.
insert(*I).second) {
843 Pending.push(BBToOrder[*
I]);
850 Worklist.swap(Pending);
853 assert(Pending.empty() &&
"Pending should be empty");
857 for (
auto &Pair : BBInsertBeforeMap) {
858 InsertMap &
Map = Pair.second;
859 for (
auto &Pair : Map) {
861 assert(InsertBefore &&
"should never be null");
862 auto FragMemLocs = Pair.second;
865 for (
auto &FragMemLoc : FragMemLocs) {
866 DIExpression *Expr = DIExpression::get(Ctx, std::nullopt);
868 Expr, FragMemLoc.OffsetInBits, FragMemLoc.SizeInBits);
870 FragMemLoc.OffsetInBits / 8);
872 FragMemLoc.DL.getInlinedAt());
873 FnVarLocs->
addVarLoc(InsertBefore, Var, Expr, FragMemLoc.DL,
874 Bases[FragMemLoc.Base]);
884class AssignmentTrackingLowering {
909 enum class LocKind { Mem, Val,
None };
926 enum S { Known, NoneOrPhi }
Status;
933 bool isSameSourceAssignment(
const Assignment &
Other)
const {
939 static const char *LUT[] = {
"Known",
"NoneOrPhi"};
954 return Assignment(Known,
ID, Source);
957 return Assignment(Known,
ID,
nullptr);
959 static Assignment makeNoneOrPhi() {
960 return Assignment(NoneOrPhi,
nullptr,
nullptr);
976 using UntaggedStoreAssignmentMap =
983 unsigned TrackedVariablesVectorSize = 0;
988 UntaggedStoreAssignmentMap UntaggedStoreVars;
992 InsertMap InsertBeforeMap;
999 static bool mapsAreEqual(
const BitVector &Mask,
const AssignmentMap &
A,
1000 const AssignmentMap &
B) {
1002 return A[VarID].isSameSourceAssignment(B[VarID]);
1014 AssignmentMap StackHomeValue;
1016 AssignmentMap DebugValue;
1031 const AssignmentMap &getAssignmentMap(AssignmentKind Kind)
const {
1034 return StackHomeValue;
1040 AssignmentMap &getAssignmentMap(AssignmentKind Kind) {
1041 return const_cast<AssignmentMap &
>(
1042 const_cast<const BlockInfo *
>(
this)->getAssignmentMap(Kind));
1045 bool isVariableTracked(
VariableID Var)
const {
1046 return VariableIDsInBlock[
static_cast<unsigned>(Var)];
1049 const Assignment &getAssignment(AssignmentKind Kind,
VariableID Var)
const {
1050 assert(isVariableTracked(Var) &&
"Var not tracked in block");
1051 return getAssignmentMap(Kind)[
static_cast<unsigned>(Var)];
1055 assert(isVariableTracked(Var) &&
"Var not tracked in block");
1056 return LiveLoc[
static_cast<unsigned>(Var)];
1062 VariableIDsInBlock.
set(
static_cast<unsigned>(Var));
1063 LiveLoc[
static_cast<unsigned>(Var)] = K;
1069 void setAssignment(AssignmentKind Kind,
VariableID Var,
1070 const Assignment &AV) {
1071 VariableIDsInBlock.
set(
static_cast<unsigned>(Var));
1072 getAssignmentMap(Kind)[
static_cast<unsigned>(Var)] = AV;
1078 bool hasAssignment(AssignmentKind Kind,
VariableID Var,
1079 const Assignment &AV)
const {
1080 if (!isVariableTracked(Var))
1082 return AV.isSameSourceAssignment(getAssignment(Kind, Var));
1088 return VariableIDsInBlock ==
Other.VariableIDsInBlock &&
1089 LiveLoc ==
Other.LiveLoc &&
1090 mapsAreEqual(VariableIDsInBlock, StackHomeValue,
1091 Other.StackHomeValue) &&
1092 mapsAreEqual(VariableIDsInBlock, DebugValue,
Other.DebugValue);
1096 return LiveLoc.size() == DebugValue.size() &&
1097 LiveLoc.size() == StackHomeValue.size();
1101 void init(
int NumVars) {
1102 StackHomeValue.clear();
1105 VariableIDsInBlock =
BitVector(NumVars);
1106 StackHomeValue.insert(StackHomeValue.begin(), NumVars,
1107 Assignment::makeNoneOrPhi());
1108 DebugValue.insert(DebugValue.begin(), NumVars,
1109 Assignment::makeNoneOrPhi());
1110 LiveLoc.
insert(LiveLoc.
begin(), NumVars, LocKind::None);
1114 template <
typename ElmtType,
typename FnInputType>
1118 ElmtType (*Fn)(FnInputType, FnInputType)) {
1123 static BlockInfo join(
const BlockInfo &
A,
const BlockInfo &
B,
int NumVars) {
1143 Intersect &=
B.VariableIDsInBlock;
1145 for (
auto VarID : Intersect.
set_bits()) {
1146 joinElmt(VarID, Join.LiveLoc,
A.LiveLoc,
B.LiveLoc, joinKind);
1147 joinElmt(VarID, Join.DebugValue,
A.DebugValue,
B.DebugValue,
1149 joinElmt(VarID, Join.StackHomeValue,
A.StackHomeValue,
B.StackHomeValue,
1153 Join.VariableIDsInBlock =
A.VariableIDsInBlock;
1154 Join.VariableIDsInBlock |=
B.VariableIDsInBlock;
1201 static LocKind joinKind(LocKind
A, LocKind
B);
1202 static Assignment joinAssignment(
const Assignment &
A,
const Assignment &
B);
1203 BlockInfo joinBlockInfo(
const BlockInfo &
A,
const BlockInfo &
B);
1209 void process(
BasicBlock &BB, BlockInfo *LiveSet);
1214 void processNonDbgInstruction(
Instruction &
I, BlockInfo *LiveSet);
1215 void processDbgInstruction(
Instruction &
I, BlockInfo *LiveSet);
1218 void processTaggedInstruction(
Instruction &
I, BlockInfo *LiveSet);
1221 void processUntaggedInstruction(
Instruction &
I, BlockInfo *LiveSet);
1223 void processDbgValue(
DbgValueInst &DVI, BlockInfo *LiveSet);
1225 void addMemDef(BlockInfo *LiveSet,
VariableID Var,
const Assignment &AV);
1227 void addDbgDef(BlockInfo *LiveSet,
VariableID Var,
const Assignment &AV);
1231 void setLocKind(BlockInfo *LiveSet,
VariableID Var, LocKind K);
1234 LocKind getLocKind(BlockInfo *LiveSet,
VariableID Var);
1236 bool hasVarWithAssignment(BlockInfo *LiveSet, BlockInfo::AssignmentKind Kind,
1252 : Fn(Fn), Layout(Layout), VarsWithStackSlot(VarsWithStackSlot) {}
1260AssignmentTrackingLowering::getContainedFragments(
VariableID Var)
const {
1261 auto R = VarContains.
find(Var);
1262 if (R == VarContains.
end())
1263 return std::nullopt;
1267void AssignmentTrackingLowering::touchFragment(
VariableID Var) {
1268 VarsTouchedThisFrame.insert(Var);
1271void AssignmentTrackingLowering::setLocKind(BlockInfo *LiveSet,
VariableID Var,
1273 auto SetKind = [
this](BlockInfo *LiveSet,
VariableID Var, LocKind
K) {
1274 LiveSet->setLocKind(Var, K);
1277 SetKind(LiveSet, Var, K);
1280 for (
VariableID Frag : getContainedFragments(Var))
1281 SetKind(LiveSet, Frag, K);
1284AssignmentTrackingLowering::LocKind
1285AssignmentTrackingLowering::getLocKind(BlockInfo *LiveSet,
VariableID Var) {
1286 return LiveSet->getLocKind(Var);
1289void AssignmentTrackingLowering::addMemDef(BlockInfo *LiveSet,
VariableID Var,
1290 const Assignment &AV) {
1291 LiveSet->setAssignment(BlockInfo::Stack, Var, AV);
1296 Assignment FragAV = AV;
1297 FragAV.Source =
nullptr;
1298 for (
VariableID Frag : getContainedFragments(Var))
1299 LiveSet->setAssignment(BlockInfo::Stack, Frag, FragAV);
1302void AssignmentTrackingLowering::addDbgDef(BlockInfo *LiveSet,
VariableID Var,
1303 const Assignment &AV) {
1304 LiveSet->setAssignment(BlockInfo::Debug, Var, AV);
1309 Assignment FragAV = AV;
1310 FragAV.Source =
nullptr;
1311 for (
VariableID Frag : getContainedFragments(Var))
1312 LiveSet->setAssignment(BlockInfo::Debug, Frag, FragAV);
1316 return cast<DIAssignID>(
I.getMetadata(LLVMContext::MD_DIAssignID));
1324bool AssignmentTrackingLowering::hasVarWithAssignment(
1325 BlockInfo *LiveSet, BlockInfo::AssignmentKind Kind,
VariableID Var,
1326 const Assignment &AV) {
1327 if (!LiveSet->hasAssignment(Kind, Var, AV))
1332 for (
VariableID Frag : getContainedFragments(Var))
1333 if (!LiveSet->hasAssignment(Kind, Frag, AV))
1339const char *
locStr(AssignmentTrackingLowering::LocKind Loc) {
1340 using LocKind = AssignmentTrackingLowering::LocKind;
1353void AssignmentTrackingLowering::emitDbgValue(
1354 AssignmentTrackingLowering::LocKind Kind,
1366 assert(InsertBefore &&
"Shouldn't be inserting after a terminator");
1375 InsertBeforeMap[InsertBefore].
push_back(VarLoc);
1379 if (Kind == LocKind::Mem) {
1380 const auto *DAI = cast<DbgAssignIntrinsic>(Source);
1383 if (DAI->isKillAddress()) {
1385 Kind = LocKind::Val;
1387 Value *Val = DAI->getAddress();
1390 "fragment info should be stored in value-expression only");
1393 if (
auto OptFragInfo =
Source->getExpression()->getFragmentInfo()) {
1394 auto FragInfo = *OptFragInfo;
1396 Expr, FragInfo.OffsetInBits, FragInfo.SizeInBits);
1399 std::tie(Val, Expr) =
1406 if (Kind == LocKind::Val) {
1411 if (Kind == LocKind::None) {
1417void AssignmentTrackingLowering::processNonDbgInstruction(
1418 Instruction &
I, AssignmentTrackingLowering::BlockInfo *LiveSet) {
1419 if (
I.hasMetadata(LLVMContext::MD_DIAssignID))
1420 processTaggedInstruction(
I, LiveSet);
1422 processUntaggedInstruction(
I, LiveSet);
1425void AssignmentTrackingLowering::processUntaggedInstruction(
1426 Instruction &
I, AssignmentTrackingLowering::BlockInfo *LiveSet) {
1438 assert(!
I.hasMetadata(LLVMContext::MD_DIAssignID));
1439 auto It = UntaggedStoreVars.find(&
I);
1440 if (It == UntaggedStoreVars.end())
1443 LLVM_DEBUG(
dbgs() <<
"processUntaggedInstruction on UNTAGGED INST " <<
I
1447 for (
auto [Var, Info] : It->second) {
1451 addMemDef(LiveSet, Var, Assignment::makeNoneOrPhi());
1452 addDbgDef(LiveSet, Var, Assignment::makeNoneOrPhi());
1453 setLocKind(LiveSet, Var, LocKind::Mem);
1461 if (
auto Frag =
V.getFragment()) {
1464 assert(R &&
"unexpected createFragmentExpression failure");
1468 if (
Info.OffsetInBits)
1469 Ops = {dwarf::DW_OP_plus_uconst,
Info.OffsetInBits / 8};
1475 assert(InsertBefore &&
"Shouldn't be inserting after a terminator");
1480 Fn.
getContext(), 0, 0,
V.getVariable()->getScope(), InlinedAt);
1489 InsertBeforeMap[InsertBefore].push_back(VarLoc);
1493void AssignmentTrackingLowering::processTaggedInstruction(
1494 Instruction &
I, AssignmentTrackingLowering::BlockInfo *LiveSet) {
1509 "expected DAI's variable to have stack slot");
1512 addMemDef(LiveSet, Var, AV);
1520 if (hasVarWithAssignment(LiveSet, BlockInfo::Debug, Var, AV)) {
1526 LiveSet->DebugValue[
static_cast<unsigned>(Var)].
dump(
dbgs());
1528 setLocKind(LiveSet, Var, LocKind::Mem);
1529 emitDbgValue(LocKind::Mem, DAI, &
I);
1538 LocKind PrevLoc = getLocKind(LiveSet, Var);
1540 case LocKind::Val: {
1544 setLocKind(LiveSet, Var, LocKind::Val);
1546 case LocKind::Mem: {
1550 Assignment DbgAV = LiveSet->getAssignment(BlockInfo::Debug, Var);
1551 if (DbgAV.Status == Assignment::NoneOrPhi) {
1554 setLocKind(LiveSet, Var, LocKind::None);
1555 emitDbgValue(LocKind::None, DAI, &
I);
1559 setLocKind(LiveSet, Var, LocKind::Val);
1561 emitDbgValue(LocKind::Val, DbgAV.Source, &
I);
1564 emitDbgValue(LocKind::None, DAI, &
I);
1568 case LocKind::None: {
1572 setLocKind(LiveSet, Var, LocKind::None);
1579 BlockInfo *LiveSet) {
1587 addDbgDef(LiveSet, Var, AV);
1595 if (hasVarWithAssignment(LiveSet, BlockInfo::Stack, Var, AV)) {
1603 <<
"Val, Stack matches Debug program but address is killed\n";);
1604 Kind = LocKind::Val;
1607 Kind = LocKind::Mem;
1609 setLocKind(LiveSet, Var, Kind);
1610 emitDbgValue(Kind, &DAI, &DAI);
1615 setLocKind(LiveSet, Var, LocKind::Val);
1616 emitDbgValue(LocKind::Val, &DAI, &DAI);
1620void AssignmentTrackingLowering::processDbgValue(
DbgValueInst &DVI,
1621 BlockInfo *LiveSet) {
1634 Assignment AV = Assignment::makeNoneOrPhi();
1635 addDbgDef(LiveSet, Var, AV);
1639 <<
" -> Val, dbg.value override");
1641 setLocKind(LiveSet, Var, LocKind::Val);
1642 emitDbgValue(LocKind::Val, &DVI, &DVI);
1645void AssignmentTrackingLowering::processDbgInstruction(
1646 Instruction &
I, AssignmentTrackingLowering::BlockInfo *LiveSet) {
1647 if (
auto *DAI = dyn_cast<DbgAssignIntrinsic>(&
I))
1648 processDbgAssign(*DAI, LiveSet);
1649 else if (
auto *DVI = dyn_cast<DbgValueInst>(&
I))
1650 processDbgValue(*DVI, LiveSet);
1653void AssignmentTrackingLowering::resetInsertionPoint(
Instruction &After) {
1656 if (R == InsertBeforeMap.end())
1661void AssignmentTrackingLowering::process(
BasicBlock &BB, BlockInfo *LiveSet) {
1662 for (
auto II = BB.
begin(), EI = BB.
end(); II != EI;) {
1663 assert(VarsTouchedThisFrame.empty());
1667 if (!isa<DbgInfoIntrinsic>(&*II)) {
1668 if (II->isTerminator())
1670 resetInsertionPoint(*II);
1671 processNonDbgInstruction(*II, LiveSet);
1672 assert(LiveSet->isValid());
1676 if (!isa<DbgInfoIntrinsic>(&*II))
1678 resetInsertionPoint(*II);
1679 processDbgInstruction(*II, LiveSet);
1680 assert(LiveSet->isValid());
1686 for (
auto Var : VarsTouchedThisFrame) {
1687 LocKind Loc = getLocKind(LiveSet, Var);
1695 if (Loc != LocKind::Mem) {
1698 NotAlwaysStackHomed.insert(Aggr);
1701 VarsTouchedThisFrame.clear();
1705AssignmentTrackingLowering::LocKind
1706AssignmentTrackingLowering::joinKind(LocKind
A, LocKind
B) {
1709 return A ==
B ?
A : LocKind::None;
1712AssignmentTrackingLowering::Assignment
1713AssignmentTrackingLowering::joinAssignment(
const Assignment &
A,
1714 const Assignment &
B) {
1721 if (!
A.isSameSourceAssignment(
B))
1722 return Assignment::makeNoneOrPhi();
1723 if (
A.Status == Assignment::NoneOrPhi)
1724 return Assignment::makeNoneOrPhi();
1740 if (
A.Source ==
B.Source)
1742 if (
A.Source ==
nullptr ||
B.Source ==
nullptr)
1744 if (
A.Source->isIdenticalTo(
B.Source))
1749 assert(
A.Status ==
B.Status &&
A.Status == Assignment::Known);
1751 return Assignment::make(
A.ID, Source);
1754AssignmentTrackingLowering::BlockInfo
1755AssignmentTrackingLowering::joinBlockInfo(
const BlockInfo &
A,
1756 const BlockInfo &
B) {
1757 return BlockInfo::join(
A,
B, TrackedVariablesVectorSize);
1760bool AssignmentTrackingLowering::join(
1763 bool FirstJoin =
true;
1775 if (!Visited.
count(Pred))
1778 auto PredLiveOut = LiveOut.
find(Pred);
1784 BBLiveIn = PredLiveOut->second;
1786 BBLiveIn = joinBlockInfo(std::move(BBLiveIn), PredLiveOut->second);
1791 BBLiveIn.init(TrackedVariablesVectorSize);
1793 auto CurrentLiveInEntry = LiveIn.
find(&BB);
1796 if (CurrentLiveInEntry == LiveIn.
end() ||
1797 BBLiveIn != CurrentLiveInEntry->second) {
1798 LiveIn[&BB] = std::move(BBLiveIn);
1809 auto ALeft =
A.OffsetInBits;
1810 auto BLeft =
B.OffsetInBits;
1814 auto ARight = ALeft +
A.SizeInBits;
1815 auto BRight = BLeft +
B.SizeInBits;
1816 if (BRight > ARight)
1821static std::optional<at::AssignmentInfo>
1826 if (
const auto *
SI = dyn_cast<StoreInst>(&
I))
1828 if (
const auto *
MI = dyn_cast<MemIntrinsic>(&
I))
1831 return std::nullopt;
1855 unsigned &TrackedVariablesVectorSize) {
1867 for (
auto &BB : Fn) {
1868 for (
auto &
I : BB) {
1869 if (
auto *DDI = dyn_cast<DbgDeclareInst>(&
I)) {
1871 }
else if (
auto *DII = dyn_cast<DbgVariableIntrinsic>(&
I)) {
1874 if (Seen.
insert(DV).second)
1875 FragmentMap[DA].push_back(DV);
1881 std::optional<DIExpression::FragmentInfo> FragInfo =
1882 [&
Info, DAI]() -> std::optional<DIExpression::FragmentInfo> {
1884 F.OffsetInBits =
Info->OffsetInBits;
1885 F.SizeInBits =
Info->SizeInBits;
1887 F.OffsetInBits += ExistingFrag->OffsetInBits;
1889 if (
F.OffsetInBits == 0 &&
F.SizeInBits == *Sz)
1890 return std::nullopt;
1900 UntaggedStoreVars[&
I].push_back(
1903 if (Seen.
insert(DV).second)
1904 FragmentMap[DA].push_back(DV);
1912 for (
auto &Pair : FragmentMap) {
1916 assert(!(Elmt.getFragmentOrDefault() == Next.getFragmentOrDefault()));
1917 return Elmt.getFragmentOrDefault().SizeInBits >
1918 Next.getFragmentOrDefault().SizeInBits;
1924 for (
auto Pair : FragmentMap) {
1925 auto &Frags = Pair.second;
1926 for (
auto It = Frags.begin(), IEnd = Frags.end(); It != IEnd; ++It) {
1936 for (; OtherIt != IEnd; ++OtherIt) {
1940 Map[OtherVar].push_back(ThisVar);
1951 for (
auto *DDI : Declares)
1953 DDI->getDebugLoc(), DDI->getWrappedLocation());
1960 <<
": too many blocks (" << Fn.
size() <<
")\n");
1965 FnVarLocs = FnVarLocsBuilder;
1975 Fn, FnVarLocs, UntaggedStoreVars, TrackedVariablesVectorSize);
1979 std::priority_queue<unsigned int, std::vector<unsigned int>,
1980 std::greater<unsigned int>>
1982 std::priority_queue<unsigned int, std::vector<unsigned int>,
1983 std::greater<unsigned int>>
1988 unsigned int RPONumber = 0;
1989 for (
auto RI = RPOT.begin(), RE = RPOT.end(); RI != RE; ++RI) {
1990 OrderToBB[RPONumber] = *RI;
1991 BBToOrder[*RI] = RPONumber;
1992 Worklist.push(RPONumber);
1995 LiveIn.
init(RPONumber);
1996 LiveOut.
init(RPONumber);
2010 while (!Worklist.empty()) {
2015 while (!Worklist.empty()) {
2019 bool InChanged = join(*BB, Visited);
2021 InChanged |= Visited.
insert(BB).second;
2026 BlockInfo LiveSet = LiveIn[BB];
2029 process(*BB, &LiveSet);
2032 if (LiveOut[BB] != LiveSet) {
2034 <<
" has new OutLocs, add succs to worklist: [ ");
2035 LiveOut[BB] = std::move(LiveSet);
2037 if (OnPending.
insert(*I).second) {
2039 Pending.push(BBToOrder[*
I]);
2046 Worklist.swap(Pending);
2049 assert(Pending.empty() &&
"Pending should be empty");
2055 bool InsertedAnyIntrinsics =
false;
2064 for (
const auto &Pair : InsertBeforeMap) {
2065 const auto &Vec = Pair.second;
2071 if (NotAlwaysStackHomed.contains(Aggr))
2081 NotAlwaysStackHomed.insert(Aggr);
2090 if (AlwaysStackHomed.
insert(Aggr).second) {
2100 InsertedAnyIntrinsics =
true;
2106 for (
const auto &[InsertBefore, Vec] : InsertBeforeMap) {
2113 if (AlwaysStackHomed.
contains(Aggr))
2116 InsertedAnyIntrinsics =
true;
2119 FnVarLocs->
setWedge(InsertBefore, std::move(NewDefs));
2122 InsertedAnyIntrinsics |= emitPromotedVarLocs(FnVarLocs);
2124 return InsertedAnyIntrinsics;
2127bool AssignmentTrackingLowering::emitPromotedVarLocs(
2129 bool InsertedAnyIntrinsics =
false;
2132 for (
auto &BB : Fn) {
2133 for (
auto &
I : BB) {
2135 auto *DVI = dyn_cast<DbgValueInst>(&
I);
2143 assert(InsertBefore &&
"Unexpected: debug intrinsics after a terminator");
2147 InsertedAnyIntrinsics =
true;
2150 return InsertedAnyIntrinsics;
2163 bool Changed =
false;
2170 if (!isa<DbgVariableIntrinsic>(
I)) {
2172 VariableSet.
clear();
2176 const auto *Locs = FnVarLocs.
getWedge(&
I);
2181 bool ChangedThisWedge =
false;
2186 for (
auto RIt = Locs->rbegin(), REnd = Locs->rend(); RIt != REnd; ++RIt) {
2189 bool FirstDefOfFragment = VariableSet.
insert(Key).second;
2193 if (FirstDefOfFragment) {
2199 ChangedThisWedge =
true;
2206 if (ChangedThisWedge) {
2207 std::reverse(NewDefsReversed.
begin(), NewDefsReversed.
end());
2208 FnVarLocs.
setWedge(&
I, std::move(NewDefsReversed));
2227 bool Changed =
false;
2236 const auto *Locs = FnVarLocs.
getWedge(&
I);
2241 bool ChangedThisWedge =
false;
2249 std::nullopt, Loc.DL.getInlinedAt());
2250 auto VMI = VariableMap.
find(Key);
2254 if (VMI == VariableMap.
end() || VMI->second.first != Loc.Values ||
2255 VMI->second.second != Loc.Expr) {
2256 VariableMap[Key] = {Loc.Values, Loc.Expr};
2262 ChangedThisWedge =
true;
2267 if (ChangedThisWedge) {
2268 FnVarLocs.
setWedge(&
I, std::move(NewDefs));
2296 VarsWithDef[
A].
insert(V.getFragmentOrDefault());
2302 auto FragsIt = VarsWithDef.
find(
A);
2303 if (FragsIt == VarsWithDef.
end())
2306 return DIExpression::fragmentsOverlap(Frag, V.getFragmentOrDefault());
2310 bool Changed =
false;
2318 const auto *Locs = FnVarLocs.
getWedge(&
I);
2323 bool ChangedThisWedge =
false;
2331 Loc.DL.getInlinedAt()};
2336 if (Loc.Values.isKillLocation(Loc.Expr) && !HasDefinedBits(Aggr, Var)) {
2339 ChangedThisWedge =
true;
2343 DefineBits(Aggr, Var);
2348 if (ChangedThisWedge) {
2349 FnVarLocs.
setWedge(&
I, std::move(NewDefs));
2360 bool MadeChanges =
false;
2374 for (
auto &BB : Fn) {
2375 for (
auto &
I : BB) {
2396 bool Changed =
false;
2401 AssignmentTrackingLowering
Pass(Fn, Layout, &VarsWithStackSlot);
2402 Changed =
Pass.run(FnVarLocs);
2406 MemLocFragmentFill
Pass(Fn, &VarsWithStackSlot);
2407 Pass.run(FnVarLocs);
2422 LLVM_DEBUG(
dbgs() <<
"AssignmentTrackingAnalysis run on " <<
F.getName()
2424 auto DL = std::make_unique<DataLayout>(
F.getParent());
2436 Results->print(
errs(),
F);
2448 "Assignment Tracking Analysis",
false,
true)
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Function Alias Analysis Results
std::pair< const DILocalVariable *, const DILocation * > DebugAggregate
A whole (unfragmented) source variable.
static void analyzeFunction(Function &Fn, const DataLayout &Layout, FunctionVarLocsBuilder *FnVarLocs)
static std::pair< Value *, DIExpression * > walkToAllocaAndPrependOffsetDeref(const DataLayout &DL, Value *Start, DIExpression *Expression)
Walk backwards along constant GEPs and bitcasts to the base storage from Start as far as possible.
static DIAssignID * getIDFromMarker(const DbgAssignIntrinsic &DAI)
static DenseSet< DebugAggregate > findVarsWithStackSlot(Function &Fn)
static bool fullyContains(DIExpression::FragmentInfo A, DIExpression::FragmentInfo B)
Return true if A fully contains B.
static DebugAggregate getAggregate(const DbgVariableIntrinsic *DII)
static std::optional< at::AssignmentInfo > getUntaggedStoreAssignmentInfo(const Instruction &I, const DataLayout &Layout)
static bool removeUndefDbgLocsFromEntryBlock(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
static cl::opt< bool > PrintResults("print-debug-ata", cl::init(false), cl::Hidden)
Print the results of the analysis. Respects -filter-print-funcs.
const char * locStr(AssignmentTrackingLowering::LocKind Loc)
static bool removeRedundantDbgLocsUsingForwardScan(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
Remove redundant location defs using a forward scan.
static bool removeRedundantDbgLocs(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
static cl::opt< bool > EnableMemLocFragFill("mem-loc-frag-fill", cl::init(true), cl::Hidden)
Option for debugging the pass, determines if the memory location fragment filling happens after gener...
static DIAssignID * getIDFromInst(const Instruction &I)
static AssignmentTrackingLowering::OverlapMap buildOverlapMapAndRecordDeclares(Function &Fn, FunctionVarLocsBuilder *FnVarLocs, AssignmentTrackingLowering::UntaggedStoreAssignmentMap &UntaggedStoreVars, unsigned &TrackedVariablesVectorSize)
Build a map of {Variable x: Variables y} where all variable fragments contained within the variable f...
static std::optional< int64_t > getDerefOffsetInBytes(const DIExpression *DIExpr)
Extract the offset used in DIExpr.
static bool removeRedundantDbgLocsUsingBackwardScan(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
Remove redundant definitions within sequences of consecutive location defs.
static cl::opt< unsigned > MaxNumBlocks("debug-ata-max-blocks", cl::init(10000), cl::desc("Maximum num basic blocks before debug info dropped"), cl::Hidden)
This file implements the BitVector class.
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
Analysis containing CSE Info
This file defines DenseMapInfo traits for DenseMap.
This file contains constants used for implementing Dwarf debug support.
std::optional< std::vector< StOtherPiece > > Other
This file implements a coalescing interval map for small objects.
This header defines various interfaces for pass management in LLVM.
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
This file builds on the ADT/GraphTraits.h file to build a generic graph post order iterator.
static bool isValid(const char C)
Returns true if C is a valid mangled character: <0-9a-zA-Z_>.
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
Scalar Replacement Of Aggregates
This file defines the SmallSet class.
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
Helper class to build FunctionVarLocs, since that class isn't easy to modify.
void addVarLoc(Instruction *Before, DebugVariable Var, DIExpression *Expr, DebugLoc DL, RawLocationWrapper R)
Add a def to the wedge of defs just before /p Before.
void setWedge(const Instruction *Before, SmallVector< VarLocInfo > &&Wedge)
Replace the defs that come just before /p Before with /p Wedge.
const SmallVectorImpl< VarLocInfo > * getWedge(const Instruction *Before) const
Return ptr to wedge of defs or nullptr if no defs come just before /p Before.
unsigned getNumVariables() const
void addSingleLocVar(DebugVariable Var, DIExpression *Expr, DebugLoc DL, RawLocationWrapper R)
Add a def for a variable that is valid for its lifetime.
VariableID insertVariable(DebugVariable V)
Find or insert V and return the ID.
const DebugVariable & getVariable(VariableID ID) const
Get a variable from its ID.
Class for arbitrary precision integers.
uint64_t getZExtValue() const
Get zero extended value.
bool getBoolValue() const
Convert APInt to a boolean value.
an instruction to allocate memory on the stack
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
AssignmentTrackingAnalysis()
bool runOnFunction(Function &F) override
runOnFunction - Virtual method overriden by subclasses to do the per-function processing of the pass.
LLVM Basic Block Representation.
iterator begin()
Instruction iterator methods.
bool isEntryBlock() const
Return true if this is the entry block of the containing function.
iterator_range< const_set_bits_iterator > set_bits() const
A structured debug information entry.
static DIExpression * append(const DIExpression *Expr, ArrayRef< uint64_t > Ops)
Append the opcodes Ops to DIExpr.
unsigned getNumElements() const
bool startsWithDeref() const
Return whether the first element a DW_OP_deref.
static std::optional< FragmentInfo > getFragmentInfo(expr_op_iterator Start, expr_op_iterator End)
Retrieve the details of this fragment expression.
ArrayRef< uint64_t > getElements() const
static std::optional< DIExpression * > createFragmentExpression(const DIExpression *Expr, unsigned OffsetInBits, unsigned SizeInBits)
Create a DIExpression to describe one part of an aggregate variable that is fragmented across multipl...
static DIExpression * prepend(const DIExpression *Expr, uint8_t Flags, int64_t Offset=0)
Prepend DIExpr with a deref and offset operation and optionally turn it into a stack value or/and an ...
static DIExpression * prependOpcodes(const DIExpression *Expr, SmallVectorImpl< uint64_t > &Ops, bool StackValue=false, bool EntryValue=false)
Prepend DIExpr with the given opcodes and optionally turn it into a stack value.
std::optional< uint64_t > getSizeInBits() const
Determines the size of the variable's type.
StringRef getName() const
A parsed version of the target data layout string in and methods for querying it.
This represents the llvm.dbg.assign instruction.
DIAssignID * getAssignID() const
bool isKillAddress() const
Check whether this kills the address component.
This represents the llvm.dbg.value instruction.
This is the common base class for debug info intrinsics for variables.
DILocalVariable * getVariable() const
DIExpression * getExpression() const
RawLocationWrapper getWrappedLocation() const
DILocation * getInlinedAt() const
Identifies a unique instance of a variable.
const DILocation * getInlinedAt() const
const DILocalVariable * getVariable() const
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
void init(unsigned InitNumEntries)
Implements a dense probed hash-table based set.
Class representing an expression and its matching format.
FunctionPass class - This class is used to implement most global optimizations.
Data structure describing the variable locations in a function.
void print(raw_ostream &OS, const Function &Fn) const
const VarLocInfo * locs_begin(const Instruction *Before) const
First variable location definition that comes before Before.
const VarLocInfo * single_locs_begin() const
const VarLocInfo * locs_end(const Instruction *Before) const
One past the last variable location definition that comes before Before.
const VarLocInfo * single_locs_end() const
One past the last single-location variable location definition.
void init(FunctionVarLocsBuilder &Builder)
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
Module * getParent()
Get the module that this global value is contained inside of...
const DebugLoc & getDebugLoc() const
Return the debug location for this node as a DebugLoc.
bool isTerminator() const
const_iterator begin() const
void insert(KeyT a, KeyT b, ValT y)
insert - Add a mapping of [a;b] to y, coalesce with adjacent intervals.
void clear()
clear - Remove all entries.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
void push_back(MachineInstr *MI)
This class implements a map that also provides access to all stored values in a deterministic order.
const DataLayout & getDataLayout() const
Get the data layout for the module's target platform.
Pass interface - Implemented by all 'passes'.
static PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
Lightweight class that wraps the location operand metadata of a debug intrinsic.
Value * getVariableLocationOp(unsigned OpIdx) const
Implements a dense probed hash-table based set with some number of buckets stored inline.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
reference emplace_back(ArgTypes &&... Args)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Target - Wrapper for Target specific information.
static IntegerType * getInt1Ty(LLVMContext &C)
UniqueVector - This class produces a sequential ID number (base 1) for each unique entry that is adde...
unsigned insert(const T &Entry)
insert - Append entry to the vector if it doesn't already exist.
size_t size() const
size - Returns the number of entries in the vector.
LLVM Value Representation.
StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
bool contains(const_arg_type_t< ValueT > V) const
Check if the set contains the given element.
size_type count(const_arg_type_t< ValueT > V) const
Return 1 if the specified key is in the set, 0 otherwise.
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr std::underlying_type_t< E > Mask()
Get a bitmask with 1s in all places up to the high-order bit of E's largest value.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
void deleteAll(Function *F)
Remove all Assignment Tracking related intrinsics and metadata from F.
AssignmentMarkerRange getAssignmentMarkers(DIAssignID *ID)
Return a range of dbg.assign intrinsics which use \ID as an operand.
std::optional< AssignmentInfo > getAssignmentInfo(const DataLayout &DL, const MemIntrinsic *I)
initializer< Ty > init(const Ty &Val)
std::optional< const char * > toString(const std::optional< DWARFFormValue > &V)
Take an optional DWARFFormValue and try to extract a string value from it.
@ DW_OP_LLVM_fragment
Only used in LLVM metadata.
PointerTypeMap run(const Module &M)
Compute the PointerTypeMap for the module M.
This is an optimization pass for GlobalISel generic memory operations.
void dump(const SparseBitVector< ElementSize > &LHS, raw_ostream &out)
Interval::succ_iterator succ_end(Interval *I)
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
bool operator!=(uint64_t V1, const APInt &V2)
Interval::succ_iterator succ_begin(Interval *I)
succ_begin/succ_end - define methods so that Intervals may be used just like BasicBlocks can with the...
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
Interval::pred_iterator pred_end(Interval *I)
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
auto reverse(ContainerTy &&C)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool isFunctionInPrintList(StringRef FunctionName)
Interval::pred_iterator pred_begin(Interval *I)
pred_begin/pred_end - define methods so that Intervals may be used just like BasicBlocks can with the...
VariableID
Type wrapper for integer ID for Variables. 0 is reserved.
raw_fd_ostream & errs()
This returns a reference to a raw_ostream for standard error.
bool isAssignmentTrackingEnabled(const Module &M)
Return true if assignment tracking is enabled for module M.
constexpr std::nullopt_t None
Holds the characteristics of one fragment of a larger variable.
static VariableID getTombstoneKey()
static bool isEqual(const VariableID &LHS, const VariableID &RHS)
static unsigned getHashValue(const VariableID &Val)
static VariableID getEmptyKey()
An information struct used to provide DenseMap with the various necessary components for a given valu...
Variable location definition used by FunctionVarLocs.
RawLocationWrapper Values
llvm::VariableID VariableID