40#include <unordered_map>
43#define DEBUG_TYPE "debug-ata"
45STATISTIC(NumDefsScanned,
"Number of dbg locs that get scanned for removal");
46STATISTIC(NumDefsRemoved,
"Number of dbg locs removed");
47STATISTIC(NumWedgesScanned,
"Number of dbg wedges scanned");
48STATISTIC(NumWedgesChanged,
"Number of dbg wedges changed");
52 cl::desc(
"Maximum num basic blocks before debug info dropped"),
73 return static_cast<VariableID>(Wrapped::getEmptyKey());
76 return static_cast<VariableID>(Wrapped::getTombstoneKey());
79 return Wrapped::getHashValue(
static_cast<unsigned>(Val));
101 friend FunctionVarLocs;
105 std::unordered_map<VarLocInsertPt, SmallVector<VarLocInfo>> VarLocsBeforeInst;
114 return static_cast<VariableID>(Variables.insert(V));
119 return Variables[
static_cast<unsigned>(
ID)];
125 auto R = VarLocsBeforeInst.find(Before);
126 if (R == VarLocsBeforeInst.end())
133 VarLocsBeforeInst[Before] = std::move(Wedge);
144 SingleLocVars.emplace_back(VarLoc);
155 VarLocsBeforeInst[Before].emplace_back(VarLoc);
162 unsigned Counter = -1;
163 OS <<
"=== Variables ===\n";
170 OS <<
"[" << Counter <<
"] " << V.getVariable()->getName();
171 if (
auto F = V.getFragment())
172 OS <<
" bits [" <<
F->OffsetInBits <<
", "
173 <<
F->OffsetInBits +
F->SizeInBits <<
")";
174 if (
const auto *IA = V.getInlinedAt())
175 OS <<
" inlined-at " << *IA;
180 OS <<
"DEF Var=[" << (
unsigned)
Loc.VariableID <<
"]"
181 <<
" Expr=" << *
Loc.Expr <<
" Values=(";
182 for (
auto *
Op :
Loc.Values.location_ops()) {
183 errs() <<
Op->getName() <<
" ";
189 OS <<
"=== Single location vars ===\n";
196 OS <<
"=== In-line variable defs ===";
198 OS <<
"\n" << BB.getName() <<
":\n";
210 for (
const auto &VarLoc : Builder.SingleLocVars)
211 VarLocRecords.emplace_back(VarLoc);
213 SingleVarLocEnd = VarLocRecords.size();
219 for (
auto &
P : Builder.VarLocsBeforeInst) {
225 unsigned BlockStart = VarLocRecords.size();
232 auto It = Builder.VarLocsBeforeInst.find(&DVR);
233 if (It == Builder.VarLocsBeforeInst.end())
236 VarLocRecords.emplace_back(VarLoc);
239 VarLocRecords.emplace_back(VarLoc);
240 unsigned BlockEnd = VarLocRecords.size();
242 if (BlockEnd != BlockStart)
243 VarLocsBeforeInst[
I] = {BlockStart, BlockEnd};
247 assert(Variables.empty() &&
"Expect clear before init");
250 Variables.reserve(Builder.Variables.size() + 1);
251 Variables.push_back(
DebugVariable(
nullptr, std::nullopt,
nullptr));
252 Variables.append(Builder.Variables.begin(), Builder.Variables.end());
257 VarLocRecords.clear();
258 VarLocsBeforeInst.clear();
266static std::pair<Value *, DIExpression *>
269 APInt OffsetInBytes(
DL.getTypeSizeInBits(Start->getType()),
false);
271 Start->stripAndAccumulateInBoundsConstantOffsets(
DL, OffsetInBytes);
285static std::optional<int64_t>
290 unsigned ExpectedDerefIdx = 0;
292 if (NumElements > 2 && Elements[0] == dwarf::DW_OP_plus_uconst) {
294 ExpectedDerefIdx = 2;
295 }
else if (NumElements > 3 && Elements[0] == dwarf::DW_OP_constu) {
296 ExpectedDerefIdx = 3;
297 if (Elements[2] == dwarf::DW_OP_plus)
299 else if (Elements[2] == dwarf::DW_OP_minus)
306 if (ExpectedDerefIdx >= NumElements)
311 if (Elements[ExpectedDerefIdx] != dwarf::DW_OP_deref)
315 if (NumElements == ExpectedDerefIdx + 1)
317 unsigned ExpectedFragFirstIdx = ExpectedDerefIdx + 1;
318 unsigned ExpectedFragFinalIdx = ExpectedFragFirstIdx + 2;
319 if (NumElements == ExpectedFragFinalIdx + 1 &&
369class MemLocFragmentFill {
371 FunctionVarLocsBuilder *FnVarLocs;
372 const DenseSet<DebugAggregate> *VarsWithStackSlot;
373 bool CoalesceAdjacentFragments;
376 using BaseAddress = unsigned;
377 using OffsetInBitsTy = unsigned;
378 using FragTraits = IntervalMapHalfOpenInfo<OffsetInBitsTy>;
379 using FragsInMemMap = IntervalMap<
380 OffsetInBitsTy, BaseAddress,
381 IntervalMapImpl::NodeSizer<OffsetInBitsTy, BaseAddress>::LeafSize,
383 FragsInMemMap::Allocator IntervalMapAlloc;
384 using VarFragMap = DenseMap<unsigned, FragsInMemMap>;
388 UniqueVector<RawLocationWrapper> Bases;
390 DenseMap<const BasicBlock *, VarFragMap> LiveIn;
391 DenseMap<const BasicBlock *, VarFragMap> LiveOut;
396 unsigned OffsetInBits;
400 using InsertMap = MapVector<VarLocInsertPt, SmallVector<FragMemLoc>>;
407 DenseMap<const BasicBlock *, InsertMap> BBInsertBeforeMap;
409 static bool intervalMapsAreEqual(
const FragsInMemMap &
A,
410 const FragsInMemMap &
B) {
411 auto AIt =
A.
begin(), AEnd =
A.end();
412 auto BIt =
B.begin(), BEnd =
B.end();
413 for (; AIt != AEnd; ++AIt, ++BIt) {
416 if (AIt.start() != BIt.start() || AIt.stop() != BIt.stop())
425 static bool varFragMapsAreEqual(
const VarFragMap &
A,
const VarFragMap &
B) {
426 if (
A.size() !=
B.size())
428 for (
const auto &APair :
A) {
429 auto BIt =
B.find(APair.first);
432 if (!intervalMapsAreEqual(APair.second, BIt->second))
439 std::string
toString(
unsigned BaseID) {
441 return Bases[BaseID].getVariableLocationOp(0)->getName().str();
447 std::string
toString(FragsInMemMap::const_iterator It,
bool Newline =
true) {
449 std::stringstream S(
String);
451 S <<
"[" << It.start() <<
", " << It.stop()
454 S <<
"invalid iterator (end)";
461 FragsInMemMap meetFragments(
const FragsInMemMap &
A,
const FragsInMemMap &
B) {
462 FragsInMemMap
Result(IntervalMapAlloc);
463 for (
auto AIt =
A.begin(), AEnd =
A.end(); AIt != AEnd; ++AIt) {
470 if (!
B.overlaps(AIt.start(), AIt.stop()))
474 auto FirstOverlap =
B.find(AIt.start());
475 assert(FirstOverlap !=
B.end());
476 bool IntersectStart = FirstOverlap.start() < AIt.start();
478 <<
", IntersectStart: " << IntersectStart <<
"\n");
481 auto LastOverlap =
B.find(AIt.stop());
483 LastOverlap !=
B.end() && LastOverlap.start() < AIt.stop();
485 <<
", IntersectEnd: " << IntersectEnd <<
"\n");
488 if (IntersectStart && IntersectEnd && FirstOverlap == LastOverlap) {
496 if (*AIt && *AIt == *FirstOverlap)
497 Result.insert(AIt.start(), AIt.stop(), *AIt);
505 auto Next = FirstOverlap;
506 if (IntersectStart) {
509 if (*AIt && *AIt == *FirstOverlap)
510 Result.insert(AIt.start(), FirstOverlap.stop(), *AIt);
520 if (*AIt && *AIt == *LastOverlap)
521 Result.insert(LastOverlap.start(), AIt.stop(), *AIt);
530 while (
Next !=
B.end() &&
Next.start() < AIt.stop() &&
531 Next.stop() <= AIt.stop()) {
533 <<
"- insert intersection of a and " <<
toString(
Next));
534 if (*AIt && *AIt == *
Next)
544 void meetVars(VarFragMap &
A,
const VarFragMap &
B) {
548 for (
auto It =
A.begin(), End =
A.end(); It != End; ++It) {
549 unsigned AVar = It->first;
550 FragsInMemMap &AFrags = It->second;
551 auto BIt =
B.find(AVar);
552 if (BIt ==
B.end()) {
557 <<
Aggregates[AVar].first->getName() <<
"\n");
558 AFrags = meetFragments(AFrags, BIt->second);
562 bool meet(
const BasicBlock &BB,
563 const SmallPtrSet<BasicBlock *, 16> &Visited) {
568 bool FirstMeet =
true;
575 if (!Visited.
count(Pred))
578 auto PredLiveOut = LiveOut.
find(Pred);
583 BBLiveIn = PredLiveOut->second;
586 LLVM_DEBUG(
dbgs() <<
"BBLiveIn = meet BBLiveIn, " << Pred->getName()
588 meetVars(BBLiveIn, PredLiveOut->second);
594 if (BBLiveIn.size() == 0)
603 CurrentLiveInEntry->second = std::move(BBLiveIn);
609 if (!varFragMapsAreEqual(BBLiveIn, CurrentLiveInEntry->second)) {
611 CurrentLiveInEntry->second = std::move(BBLiveIn);
619 void insertMemLoc(BasicBlock &BB,
VarLocInsertPt Before,
unsigned Var,
620 unsigned StartBit,
unsigned EndBit,
unsigned Base,
622 assert(StartBit < EndBit &&
"Cannot create fragment of size <= 0");
627 Loc.OffsetInBits = StartBit;
628 Loc.SizeInBits = EndBit - StartBit;
629 assert(
Base &&
"Expected a non-zero ID for Base address");
632 BBInsertBeforeMap[&BB][Before].push_back(Loc);
634 <<
" bits [" << StartBit <<
", " << EndBit <<
")\n");
641 void coalesceFragments(BasicBlock &BB,
VarLocInsertPt Before,
unsigned Var,
642 unsigned StartBit,
unsigned EndBit,
unsigned Base,
644 if (!CoalesceAdjacentFragments)
651 auto CoalescedFrag = FragMap.find(StartBit);
653 if (CoalescedFrag.start() == StartBit && CoalescedFrag.stop() == EndBit)
656 LLVM_DEBUG(
dbgs() <<
"- Insert loc for bits " << CoalescedFrag.start()
657 <<
" to " << CoalescedFrag.stop() <<
"\n");
658 insertMemLoc(BB, Before, Var, CoalescedFrag.start(), CoalescedFrag.stop(),
662 void addDef(
const VarLocInfo &VarLoc,
VarLocInsertPt Before, BasicBlock &BB,
663 VarFragMap &LiveSet) {
676 const DIExpression *DIExpr = VarLoc.
Expr;
680 StartBit = Frag->OffsetInBits;
681 EndBit = StartBit + Frag->SizeInBits;
696 const unsigned Base =
697 DerefOffsetInBytes && *DerefOffsetInBytes * 8 == StartBit
701 << StartBit <<
", " << EndBit <<
"): " <<
toString(
Base)
708 auto FragIt = LiveSet.find(Var);
711 if (FragIt == LiveSet.end()) {
713 auto P = LiveSet.try_emplace(Var, FragsInMemMap(IntervalMapAlloc));
714 assert(
P.second &&
"Var already in map?");
716 P.first->second.insert(StartBit, EndBit,
Base);
721 FragsInMemMap &FragMap = FragIt->second;
724 if (!FragMap.overlaps(StartBit, EndBit)) {
726 FragMap.insert(StartBit, EndBit,
Base);
727 coalesceFragments(BB, Before, Var, StartBit, EndBit,
Base, VarLoc.
DL,
734 auto FirstOverlap = FragMap.find(StartBit);
735 assert(FirstOverlap != FragMap.end());
736 bool IntersectStart = FirstOverlap.start() < StartBit;
739 auto LastOverlap = FragMap.find(EndBit);
740 bool IntersectEnd = LastOverlap.valid() && LastOverlap.start() < EndBit;
743 if (IntersectStart && IntersectEnd && FirstOverlap == LastOverlap) {
744 LLVM_DEBUG(
dbgs() <<
"- Intersect single interval @ both ends\n");
752 auto EndBitOfOverlap = FirstOverlap.stop();
753 unsigned OverlapValue = FirstOverlap.value();
756 FirstOverlap.setStop(StartBit);
757 insertMemLoc(BB, Before, Var, FirstOverlap.start(), StartBit,
758 OverlapValue, VarLoc.
DL);
761 FragMap.insert(EndBit, EndBitOfOverlap, OverlapValue);
762 insertMemLoc(BB, Before, Var, EndBit, EndBitOfOverlap, OverlapValue,
766 FragMap.insert(StartBit, EndBit,
Base);
776 if (IntersectStart) {
779 FirstOverlap.setStop(StartBit);
780 insertMemLoc(BB, Before, Var, FirstOverlap.start(), StartBit,
781 *FirstOverlap, VarLoc.
DL);
790 LastOverlap.setStart(EndBit);
791 insertMemLoc(BB, Before, Var, EndBit, LastOverlap.stop(), *LastOverlap,
807 auto It = FirstOverlap;
810 while (It.valid() && It.start() >= StartBit && It.stop() <= EndBit) {
815 assert(!FragMap.overlaps(StartBit, EndBit));
817 FragMap.insert(StartBit, EndBit,
Base);
820 coalesceFragments(BB, Before, Var, StartBit, EndBit,
Base, VarLoc.
DL,
824 bool skipVariable(
const DILocalVariable *V) {
return !
V->getSizeInBits(); }
826 void process(BasicBlock &BB, VarFragMap &LiveSet) {
827 BBInsertBeforeMap[&BB].
clear();
829 for (DbgVariableRecord &DVR :
filterDbgVars(
I.getDbgRecordRange())) {
830 if (
const auto *Locs = FnVarLocs->
getWedge(&DVR)) {
831 for (
const VarLocInfo &Loc : *Locs) {
832 addDef(Loc, &DVR, *
I.getParent(), LiveSet);
836 if (
const auto *Locs = FnVarLocs->
getWedge(&
I)) {
837 for (
const VarLocInfo &Loc : *Locs) {
838 addDef(Loc, &
I, *
I.getParent(), LiveSet);
845 MemLocFragmentFill(Function &Fn,
846 const DenseSet<DebugAggregate> *VarsWithStackSlot,
847 bool CoalesceAdjacentFragments)
848 : Fn(Fn), VarsWithStackSlot(VarsWithStackSlot),
849 CoalesceAdjacentFragments(CoalesceAdjacentFragments) {}
871 void run(FunctionVarLocsBuilder *FnVarLocs) {
875 this->FnVarLocs = FnVarLocs;
879 ReversePostOrderTraversal<Function *> RPOT(&Fn);
880 std::priority_queue<unsigned int, std::vector<unsigned int>,
881 std::greater<unsigned int>>
883 std::priority_queue<unsigned int, std::vector<unsigned int>,
884 std::greater<unsigned int>>
886 DenseMap<unsigned int, BasicBlock *> OrderToBB;
887 DenseMap<BasicBlock *, unsigned int> BBToOrder;
889 unsigned int RPONumber = 0;
890 for (BasicBlock *BB : RPOT) {
891 OrderToBB[RPONumber] = BB;
892 BBToOrder[BB] = RPONumber;
893 Worklist.push(RPONumber);
909 SmallPtrSet<BasicBlock *, 16> Visited;
910 while (!Worklist.empty() || !Pending.empty()) {
914 SmallPtrSet<BasicBlock *, 16> OnPending;
916 while (!Worklist.empty()) {
920 bool InChanged = meet(*BB, Visited);
922 InChanged |= Visited.
insert(BB).second;
925 << BB->
getName() <<
" has new InLocs, process it\n");
929 VarFragMap LiveSet = LiveIn[BB];
932 process(*BB, LiveSet);
935 if (!varFragMapsAreEqual(LiveOut[BB], LiveSet)) {
937 <<
" has new OutLocs, add succs to worklist: [ ");
938 LiveOut[BB] = std::move(LiveSet);
940 if (OnPending.
insert(Succ).second) {
942 Pending.push(BBToOrder[Succ]);
949 Worklist.swap(Pending);
952 assert(Pending.empty() &&
"Pending should be empty");
956 for (
auto &Pair : BBInsertBeforeMap) {
957 InsertMap &
Map = Pair.second;
958 for (
auto &Pair : Map) {
959 auto InsertBefore = Pair.first;
960 assert(InsertBefore &&
"should never be null");
961 auto FragMemLocs = Pair.second;
964 for (
auto &FragMemLoc : FragMemLocs) {
965 DIExpression *Expr = DIExpression::get(Ctx, {});
966 if (FragMemLoc.SizeInBits !=
967 *
Aggregates[FragMemLoc.Var].first->getSizeInBits())
969 Expr, FragMemLoc.OffsetInBits, FragMemLoc.SizeInBits);
971 FragMemLoc.OffsetInBits / 8);
972 DebugVariable Var(
Aggregates[FragMemLoc.Var].first, Expr,
973 FragMemLoc.DL.getInlinedAt());
974 FnVarLocs->
addVarLoc(InsertBefore, Var, Expr, FragMemLoc.DL,
975 Bases[FragMemLoc.Base]);
985class AssignmentTrackingLowering {
1010 enum class LocKind { Mem, Val,
None };
1027 enum S { Known, NoneOrPhi } Status;
1032 DbgVariableRecord *
Source =
nullptr;
1034 bool isSameSourceAssignment(
const Assignment &
Other)
const {
1037 return std::tie(Status,
ID) == std::tie(
Other.Status,
Other.ID);
1039 void dump(raw_ostream &OS) {
1040 static const char *
LUT[] = {
"Known",
"NoneOrPhi"};
1041 OS <<
LUT[Status] <<
"(id=";
1054 static Assignment make(DIAssignID *
ID, DbgVariableRecord *Source) {
1056 "Cannot make an assignment from a non-assign DbgVariableRecord");
1057 return Assignment(Known,
ID, Source);
1059 static Assignment makeFromMemDef(DIAssignID *
ID) {
1060 return Assignment(Known,
ID);
1062 static Assignment makeNoneOrPhi() {
return Assignment(NoneOrPhi,
nullptr); }
1064 Assignment() : Status(NoneOrPhi),
ID(nullptr) {}
1065 Assignment(S Status, DIAssignID *
ID) : Status(Status),
ID(
ID) {
1069 Assignment(S Status, DIAssignID *
ID, DbgVariableRecord *Source)
1076 using AssignmentMap = SmallVector<Assignment>;
1077 using LocMap = SmallVector<LocKind>;
1078 using OverlapMap = DenseMap<VariableID, SmallVector<VariableID>>;
1079 using UntaggedStoreAssignmentMap =
1082 using UnknownStoreAssignmentMap =
1083 DenseMap<const Instruction *, SmallVector<VariableID>>;
1088 unsigned TrackedVariablesVectorSize = 0;
1093 UntaggedStoreAssignmentMap UntaggedStoreVars;
1096 UnknownStoreAssignmentMap UnknownStoreVars;
1099 using InstInsertMap = MapVector<VarLocInsertPt, SmallVector<VarLocInfo>>;
1100 InstInsertMap InsertBeforeMap;
1103 void resetInsertionPoint(Instruction &After);
1104 void resetInsertionPoint(DbgVariableRecord &After);
1106 void emitDbgValue(LocKind Kind, DbgVariableRecord *,
VarLocInsertPt After);
1108 static bool mapsAreEqual(
const BitVector &Mask,
const AssignmentMap &
A,
1109 const AssignmentMap &
B) {
1111 return A[VarID].isSameSourceAssignment(B[VarID]);
1120 BitVector VariableIDsInBlock;
1123 AssignmentMap StackHomeValue;
1125 AssignmentMap DebugValue;
1140 const AssignmentMap &getAssignmentMap(AssignmentKind Kind)
const {
1143 return StackHomeValue;
1149 AssignmentMap &getAssignmentMap(AssignmentKind Kind) {
1150 return const_cast<AssignmentMap &
>(
1151 const_cast<const BlockInfo *
>(
this)->getAssignmentMap(Kind));
1154 bool isVariableTracked(
VariableID Var)
const {
1155 return VariableIDsInBlock[
static_cast<unsigned>(Var)];
1158 const Assignment &getAssignment(AssignmentKind Kind,
VariableID Var)
const {
1159 assert(isVariableTracked(Var) &&
"Var not tracked in block");
1160 return getAssignmentMap(Kind)[
static_cast<unsigned>(Var)];
1164 assert(isVariableTracked(Var) &&
"Var not tracked in block");
1165 return LiveLoc[
static_cast<unsigned>(Var)];
1171 VariableIDsInBlock.
set(
static_cast<unsigned>(Var));
1172 LiveLoc[
static_cast<unsigned>(Var)] = K;
1178 void setAssignment(AssignmentKind Kind,
VariableID Var,
1179 const Assignment &AV) {
1180 VariableIDsInBlock.
set(
static_cast<unsigned>(Var));
1181 getAssignmentMap(Kind)[
static_cast<unsigned>(Var)] = AV;
1187 bool hasAssignment(AssignmentKind Kind,
VariableID Var,
1188 const Assignment &AV)
const {
1189 if (!isVariableTracked(Var))
1191 return AV.isSameSourceAssignment(getAssignment(Kind, Var));
1197 return VariableIDsInBlock ==
Other.VariableIDsInBlock &&
1198 LiveLoc ==
Other.LiveLoc &&
1199 mapsAreEqual(VariableIDsInBlock, StackHomeValue,
1200 Other.StackHomeValue) &&
1201 mapsAreEqual(VariableIDsInBlock, DebugValue,
Other.DebugValue);
1205 return LiveLoc.size() == DebugValue.size() &&
1206 LiveLoc.size() == StackHomeValue.size();
1210 void init(
int NumVars) {
1211 StackHomeValue.clear();
1214 VariableIDsInBlock = BitVector(NumVars);
1215 StackHomeValue.insert(StackHomeValue.begin(), NumVars,
1216 Assignment::makeNoneOrPhi());
1217 DebugValue.insert(DebugValue.begin(), NumVars,
1218 Assignment::makeNoneOrPhi());
1219 LiveLoc.
insert(LiveLoc.
begin(), NumVars, LocKind::None);
1223 template <
typename ElmtType,
typename FnInputType>
1227 ElmtType (*Fn)(FnInputType, FnInputType)) {
1232 static BlockInfo
join(
const BlockInfo &
A,
const BlockInfo &
B,
int NumVars) {
1251 BitVector Intersect =
A.VariableIDsInBlock;
1252 Intersect &=
B.VariableIDsInBlock;
1255 joinElmt(
VarID, Join.LiveLoc,
A.LiveLoc,
B.LiveLoc, joinKind);
1256 joinElmt(
VarID, Join.DebugValue,
A.DebugValue,
B.DebugValue,
1258 joinElmt(
VarID, Join.StackHomeValue,
A.StackHomeValue,
B.StackHomeValue,
1262 Join.VariableIDsInBlock =
A.VariableIDsInBlock;
1263 Join.VariableIDsInBlock |=
B.VariableIDsInBlock;
1270 const DataLayout &Layout;
1271 const DenseSet<DebugAggregate> *VarsWithStackSlot;
1272 FunctionVarLocsBuilder *FnVarLocs;
1273 DenseMap<const BasicBlock *, BlockInfo> LiveIn;
1274 DenseMap<const BasicBlock *, BlockInfo> LiveOut;
1277 DenseSet<VariableID> VarsTouchedThisFrame;
1280 DenseSet<DebugAggregate> NotAlwaysStackHomed;
1282 VariableID getVariableID(
const DebugVariable &Var) {
1290 bool join(
const BasicBlock &BB,
const SmallPtrSet<BasicBlock *, 16> &Visited);
1310 static LocKind joinKind(LocKind
A, LocKind
B);
1311 static Assignment joinAssignment(
const Assignment &
A,
const Assignment &
B);
1312 BlockInfo joinBlockInfo(
const BlockInfo &
A,
const BlockInfo &
B);
1318 void process(BasicBlock &BB, BlockInfo *LiveSet);
1323 void processNonDbgInstruction(Instruction &
I, BlockInfo *LiveSet);
1326 void processTaggedInstruction(Instruction &
I, BlockInfo *LiveSet);
1329 void processUntaggedInstruction(Instruction &
I, BlockInfo *LiveSet);
1330 void processUnknownStoreToVariable(Instruction &
I,
VariableID &Var,
1331 BlockInfo *LiveSet);
1332 void processDbgAssign(DbgVariableRecord *Assign, BlockInfo *LiveSet);
1333 void processDbgVariableRecord(DbgVariableRecord &DVR, BlockInfo *LiveSet);
1334 void processDbgValue(DbgVariableRecord *DbgValue, BlockInfo *LiveSet);
1336 void addMemDef(BlockInfo *LiveSet,
VariableID Var,
const Assignment &AV);
1338 void addDbgDef(BlockInfo *LiveSet,
VariableID Var,
const Assignment &AV);
1342 void setLocKind(BlockInfo *LiveSet,
VariableID Var, LocKind K);
1345 LocKind getLocKind(BlockInfo *LiveSet,
VariableID Var);
1347 bool hasVarWithAssignment(BlockInfo *LiveSet, BlockInfo::AssignmentKind Kind,
1358 bool emitPromotedVarLocs(FunctionVarLocsBuilder *FnVarLocs);
1361 AssignmentTrackingLowering(Function &Fn,
const DataLayout &Layout,
1362 const DenseSet<DebugAggregate> *VarsWithStackSlot)
1363 : Fn(Fn), Layout(Layout), VarsWithStackSlot(VarsWithStackSlot) {}
1366 bool run(FunctionVarLocsBuilder *FnVarLocs);
1371AssignmentTrackingLowering::getContainedFragments(
VariableID Var)
const {
1372 auto R = VarContains.find(Var);
1373 if (R == VarContains.end())
1378void AssignmentTrackingLowering::touchFragment(
VariableID Var) {
1379 VarsTouchedThisFrame.insert(Var);
1382void AssignmentTrackingLowering::setLocKind(BlockInfo *LiveSet,
VariableID Var,
1384 auto SetKind = [
this](BlockInfo *LiveSet,
VariableID Var, LocKind
K) {
1385 LiveSet->setLocKind(Var, K);
1388 SetKind(LiveSet, Var, K);
1391 for (
VariableID Frag : getContainedFragments(Var))
1392 SetKind(LiveSet, Frag, K);
1395AssignmentTrackingLowering::LocKind
1396AssignmentTrackingLowering::getLocKind(BlockInfo *LiveSet,
VariableID Var) {
1397 return LiveSet->getLocKind(Var);
1400void AssignmentTrackingLowering::addMemDef(BlockInfo *LiveSet,
VariableID Var,
1401 const Assignment &AV) {
1402 LiveSet->setAssignment(BlockInfo::Stack, Var, AV);
1407 Assignment FragAV = AV;
1408 FragAV.Source =
nullptr;
1409 for (
VariableID Frag : getContainedFragments(Var))
1410 LiveSet->setAssignment(BlockInfo::Stack, Frag, FragAV);
1413void AssignmentTrackingLowering::addDbgDef(BlockInfo *LiveSet,
VariableID Var,
1414 const Assignment &AV) {
1415 LiveSet->setAssignment(BlockInfo::Debug, Var, AV);
1420 Assignment FragAV = AV;
1421 FragAV.Source =
nullptr;
1422 for (
VariableID Frag : getContainedFragments(Var))
1423 LiveSet->setAssignment(BlockInfo::Debug, Frag, FragAV);
1432 "Cannot get a DIAssignID from a non-assign DbgVariableRecord!");
1437bool AssignmentTrackingLowering::hasVarWithAssignment(
1438 BlockInfo *LiveSet, BlockInfo::AssignmentKind Kind,
VariableID Var,
1439 const Assignment &AV) {
1440 if (!LiveSet->hasAssignment(Kind, Var, AV))
1445 for (
VariableID Frag : getContainedFragments(Var))
1446 if (!LiveSet->hasAssignment(Kind, Frag, AV))
1452const char *
locStr(AssignmentTrackingLowering::LocKind
Loc) {
1453 using LocKind = AssignmentTrackingLowering::LocKind;
1474 if (!
Next->hasDbgRecords())
1476 return &*
Next->getDbgRecordRange().begin();
1484void AssignmentTrackingLowering::emitDbgValue(
1497 assert(InsertBefore &&
"Shouldn't be inserting after a terminator");
1506 InsertBeforeMap[InsertBefore].
push_back(VarLoc);
1510 if (Kind == LocKind::Mem) {
1515 if (
Assign->isKillAddress()) {
1517 Kind = LocKind::Val;
1522 "fragment info should be stored in value-expression only");
1525 if (
auto OptFragInfo =
Source->getExpression()->getFragmentInfo()) {
1526 auto FragInfo = *OptFragInfo;
1528 Expr, FragInfo.OffsetInBits, FragInfo.SizeInBits);
1531 std::tie(Val, Expr) =
1538 if (Kind == LocKind::Val) {
1539 Emit(
Source->getRawLocation(),
Source->getExpression());
1543 if (Kind == LocKind::None) {
1544 Emit(
nullptr,
Source->getExpression());
1549void AssignmentTrackingLowering::processNonDbgInstruction(
1550 Instruction &
I, AssignmentTrackingLowering::BlockInfo *LiveSet) {
1551 if (
I.hasMetadata(LLVMContext::MD_DIAssignID))
1552 processTaggedInstruction(
I, LiveSet);
1554 processUntaggedInstruction(
I, LiveSet);
1557void AssignmentTrackingLowering::processUnknownStoreToVariable(
1561 addMemDef(LiveSet, Var, Assignment::makeNoneOrPhi());
1564 if (getLocKind(LiveSet, Var) != LocKind::Mem)
1568 Assignment DbgAV = LiveSet->getAssignment(BlockInfo::Debug, Var);
1569 if (DbgAV.Status != Assignment::NoneOrPhi && DbgAV.Source) {
1571 DbgAV.dump(
dbgs());
dbgs() <<
"\n");
1572 setLocKind(LiveSet, Var, LocKind::Val);
1573 emitDbgValue(LocKind::Val, DbgAV.Source, &
I);
1579 assert(InsertBefore &&
"Shouldn't be inserting after a terminator");
1585 Fn.
getContext(), 0, 0,
V.getVariable()->getScope(), InlinedAt);
1593 InsertBeforeMap[InsertBefore].push_back(VarLoc);
1596void AssignmentTrackingLowering::processUntaggedInstruction(
1597 Instruction &
I, AssignmentTrackingLowering::BlockInfo *LiveSet) {
1609 assert(!
I.hasMetadata(LLVMContext::MD_DIAssignID));
1610 auto It = UntaggedStoreVars.find(&
I);
1611 if (It == UntaggedStoreVars.end()) {
1618 if (
auto UnhandledStoreIt = UnknownStoreVars.find(&
I);
1619 UnhandledStoreIt != UnknownStoreVars.end()) {
1620 LLVM_DEBUG(
dbgs() <<
"Processing untagged unknown store " <<
I <<
"\n");
1621 for (
auto &Var : UnhandledStoreIt->second)
1622 processUnknownStoreToVariable(
I, Var, LiveSet);
1627 LLVM_DEBUG(
dbgs() <<
"processUntaggedInstruction on UNTAGGED INST " <<
I
1631 for (
auto [Var,
Info] : It->second) {
1635 addMemDef(LiveSet, Var, Assignment::makeNoneOrPhi());
1636 addDbgDef(LiveSet, Var, Assignment::makeNoneOrPhi());
1637 setLocKind(LiveSet, Var, LocKind::Mem);
1645 if (
auto Frag =
V.getFragment()) {
1648 assert(R &&
"unexpected createFragmentExpression failure");
1652 if (
Info.OffsetInBits)
1653 Ops = {dwarf::DW_OP_plus_uconst,
Info.OffsetInBits / 8};
1660 assert(InsertBefore &&
"Shouldn't be inserting after a terminator");
1665 Fn.
getContext(), 0, 0,
V.getVariable()->getScope(), InlinedAt);
1674 InsertBeforeMap[InsertBefore].push_back(VarLoc);
1678void AssignmentTrackingLowering::processTaggedInstruction(
1679 Instruction &
I, AssignmentTrackingLowering::BlockInfo *LiveSet) {
1685 if (LinkedDPAssigns.empty())
1694 "expected Assign's variable to have stack slot");
1697 addMemDef(LiveSet, Var, AV);
1705 if (hasVarWithAssignment(LiveSet, BlockInfo::Debug, Var, AV)) {
1711 LiveSet->DebugValue[
static_cast<unsigned>(Var)].dump(
dbgs());
1713 setLocKind(LiveSet, Var, LocKind::Mem);
1714 emitDbgValue(LocKind::Mem, Assign, &
I);
1723 LocKind PrevLoc = getLocKind(LiveSet, Var);
1725 case LocKind::Val: {
1729 setLocKind(LiveSet, Var, LocKind::Val);
1731 case LocKind::Mem: {
1735 Assignment DbgAV = LiveSet->getAssignment(BlockInfo::Debug, Var);
1736 if (DbgAV.Status == Assignment::NoneOrPhi) {
1739 setLocKind(LiveSet, Var, LocKind::None);
1740 emitDbgValue(LocKind::None, Assign, &
I);
1744 setLocKind(LiveSet, Var, LocKind::Val);
1746 emitDbgValue(LocKind::Val, DbgAV.Source, &
I);
1749 emitDbgValue(LocKind::None, Assign, &
I);
1753 case LocKind::None: {
1757 setLocKind(LiveSet, Var, LocKind::None);
1764 BlockInfo *LiveSet) {
1771 Assignment AV = Assignment::make(
getIDFromMarker(*DbgAssign), DbgAssign);
1772 addDbgDef(LiveSet, Var, AV);
1774 LLVM_DEBUG(
dbgs() <<
"processDbgAssign on " << *DbgAssign <<
"\n";);
1780 if (hasVarWithAssignment(LiveSet, BlockInfo::Stack, Var, AV)) {
1788 <<
"Val, Stack matches Debug program but address is killed\n";);
1789 Kind = LocKind::Val;
1792 Kind = LocKind::Mem;
1794 setLocKind(LiveSet, Var, Kind);
1795 emitDbgValue(Kind, DbgAssign, DbgAssign);
1800 setLocKind(LiveSet, Var, LocKind::Val);
1801 emitDbgValue(LocKind::Val, DbgAssign, DbgAssign);
1806 BlockInfo *LiveSet) {
1819 Assignment AV = Assignment::makeNoneOrPhi();
1820 addDbgDef(LiveSet, Var, AV);
1824 <<
" -> Val, dbg.value override");
1826 setLocKind(LiveSet, Var, LocKind::Val);
1831 if (
auto F =
DbgValue.getExpression()->getFragmentInfo())
1832 return F->SizeInBits == 0;
1836void AssignmentTrackingLowering::processDbgVariableRecord(
1843 processDbgAssign(&DVR, LiveSet);
1845 processDbgValue(&DVR, LiveSet);
1848void AssignmentTrackingLowering::resetInsertionPoint(
Instruction &After) {
1851 if (R == InsertBeforeMap.end())
1857 if (R == InsertBeforeMap.end())
1862void AssignmentTrackingLowering::process(
BasicBlock &BB, BlockInfo *LiveSet) {
1865 bool ProcessedLeadingDbgRecords = !BB.
begin()->hasDbgRecords();
1867 assert(VarsTouchedThisFrame.empty());
1874 if (ProcessedLeadingDbgRecords) {
1879 if (
II->isTerminator())
1881 resetInsertionPoint(*
II);
1882 processNonDbgInstruction(*
II, LiveSet);
1883 assert(LiveSet->isValid());
1889 if (
II != EI &&
II->hasDbgRecords()) {
1894 resetInsertionPoint(DVR);
1895 processDbgVariableRecord(DVR, LiveSet);
1896 assert(LiveSet->isValid());
1899 ProcessedLeadingDbgRecords =
true;
1907 for (
auto Var : VarsTouchedThisFrame) {
1908 LocKind
Loc = getLocKind(LiveSet, Var);
1916 if (
Loc != LocKind::Mem) {
1919 NotAlwaysStackHomed.insert(Aggr);
1922 VarsTouchedThisFrame.clear();
1926AssignmentTrackingLowering::LocKind
1927AssignmentTrackingLowering::joinKind(LocKind
A, LocKind
B) {
1930 return A ==
B ?
A : LocKind::None;
1933AssignmentTrackingLowering::Assignment
1934AssignmentTrackingLowering::joinAssignment(
const Assignment &
A,
1935 const Assignment &
B) {
1942 if (!
A.isSameSourceAssignment(
B))
1943 return Assignment::makeNoneOrPhi();
1944 if (
A.Status == Assignment::NoneOrPhi)
1945 return Assignment::makeNoneOrPhi();
1961 if (
A.Source ==
B.Source)
1963 if (!
A.Source || !
B.Source)
1965 if (
A.Source->isEquivalentTo(*
B.Source))
1970 assert(
A.Status ==
B.Status &&
A.Status == Assignment::Known);
1972 return Assignment::make(
A.ID, Source);
1975AssignmentTrackingLowering::BlockInfo
1976AssignmentTrackingLowering::joinBlockInfo(
const BlockInfo &
A,
1977 const BlockInfo &
B) {
1978 return BlockInfo::join(
A,
B, TrackedVariablesVectorSize);
1981bool AssignmentTrackingLowering::join(
1993 if (Visited.
count(Pred))
1998 if (VisitedPreds.
empty()) {
2000 bool DidInsert = It.second;
2002 It.first->second.init(TrackedVariablesVectorSize);
2007 if (VisitedPreds.
size() == 1) {
2008 const BlockInfo &PredLiveOut = LiveOut.
find(VisitedPreds[0])->second;
2015 if (PredLiveOut != CurrentLiveInEntry->second) {
2016 CurrentLiveInEntry->second = PredLiveOut;
2024 const BlockInfo &PredLiveOut0 = LiveOut.
find(VisitedPreds[0])->second;
2025 const BlockInfo &PredLiveOut1 = LiveOut.
find(VisitedPreds[1])->second;
2026 BlockInfo BBLiveIn = joinBlockInfo(PredLiveOut0, PredLiveOut1);
2031 const auto &PredLiveOut = LiveOut.
find(Pred);
2033 "block should have been processed already");
2034 BBLiveIn = joinBlockInfo(std::move(BBLiveIn), PredLiveOut->second);
2038 auto CurrentLiveInEntry = LiveIn.
find(&BB);
2041 if (CurrentLiveInEntry == LiveIn.
end())
2043 else if (BBLiveIn != CurrentLiveInEntry->second)
2044 CurrentLiveInEntry->second = std::move(BBLiveIn);
2053 auto ALeft =
A.OffsetInBits;
2054 auto BLeft =
B.OffsetInBits;
2058 auto ARight = ALeft +
A.SizeInBits;
2059 auto BRight = BLeft +
B.SizeInBits;
2060 if (BRight > ARight)
2065static std::optional<at::AssignmentInfo>
2075 return std::nullopt;
2083 if (
ID != Intrinsic::experimental_vp_strided_store &&
2084 ID != Intrinsic::masked_store &&
ID != Intrinsic::vp_scatter &&
2085 ID != Intrinsic::masked_scatter &&
ID != Intrinsic::vp_store &&
2086 ID != Intrinsic::masked_compressstore)
2120 AssignmentTrackingLowering::UntaggedStoreAssignmentMap &UntaggedStoreVars,
2121 AssignmentTrackingLowering::UnknownStoreAssignmentMap &UnknownStoreVars,
2122 unsigned &TrackedVariablesVectorSize) {
2136 if (
Record->isDbgDeclare()) {
2142 if (!VarsWithStackSlot.
contains(DA))
2144 if (Seen.
insert(DV).second)
2145 FragmentMap[DA].push_back(DV);
2147 for (
auto &BB : Fn) {
2148 for (
auto &
I : BB) {
2150 ProcessDbgRecord(&DVR);
2154 std::optional<DIExpression::FragmentInfo> FragInfo;
2159 I.getDataLayout(),
Info->Base,
2160 Info->OffsetInBits,
Info->SizeInBits, Assign, FragInfo) ||
2161 (FragInfo && FragInfo->SizeInBits == 0))
2170 FragInfo = Assign->getExpression()->getFragmentInfo();
2174 Assign->getDebugLoc().getInlinedAt());
2176 if (!VarsWithStackSlot.
contains(DA))
2180 UntaggedStoreVars[&
I].push_back(
2183 if (Seen.
insert(DV).second)
2184 FragmentMap[DA].push_back(DV);
2187 HandleDbgAssignForStore(DVR);
2195 Assign->getDebugLoc().getInlinedAt());
2197 if (!VarsWithStackSlot.
contains(DA))
2204 HandleDbgAssignForUnknownStore(DVR);
2211 for (
auto &Pair : FragmentMap) {
2213 std::sort(Frags.
begin(), Frags.
end(),
2215 return Elmt.getFragmentOrDefault().SizeInBits >
2216 Next.getFragmentOrDefault().SizeInBits;
2223 AssignmentTrackingLowering::OverlapMap Map;
2224 for (
auto &Pair : FragmentMap) {
2225 auto &Frags = Pair.second;
2226 for (
auto It = Frags.begin(), IEnd = Frags.end(); It != IEnd; ++It) {
2236 for (; OtherIt != IEnd; ++OtherIt) {
2240 Map[OtherVar].push_back(ThisVar);
2251 for (
auto *DVR : DPDeclares)
2258bool AssignmentTrackingLowering::run(FunctionVarLocsBuilder *FnVarLocsBuilder) {
2261 <<
": too many blocks (" << Fn.
size() <<
")\n");
2266 FnVarLocs = FnVarLocsBuilder;
2276 Fn, FnVarLocs, *VarsWithStackSlot, UntaggedStoreVars, UnknownStoreVars,
2277 TrackedVariablesVectorSize);
2281 std::priority_queue<unsigned int, std::vector<unsigned int>,
2282 std::greater<unsigned int>>
2284 std::priority_queue<unsigned int, std::vector<unsigned int>,
2285 std::greater<unsigned int>>
2290 unsigned int RPONumber = 0;
2292 OrderToBB[RPONumber] = BB;
2293 BBToOrder[BB] = RPONumber;
2294 Worklist.push(RPONumber);
2312 while (!Worklist.empty()) {
2317 while (!Worklist.empty()) {
2321 bool InChanged =
join(*BB, Visited);
2323 InChanged |= Visited.
insert(BB).second;
2328 BlockInfo LiveSet = LiveIn[BB];
2331 process(*BB, &LiveSet);
2334 if (LiveOut[BB] != LiveSet) {
2336 <<
" has new OutLocs, add succs to worklist: [ ");
2337 LiveOut[BB] = std::move(LiveSet);
2339 if (OnPending.
insert(Succ).second) {
2341 Pending.push(BBToOrder[Succ]);
2348 Worklist.swap(Pending);
2351 assert(Pending.empty() &&
"Pending should be empty");
2357 bool InsertedAnyIntrinsics =
false;
2366 for (
const auto &Pair : InsertBeforeMap) {
2367 auto &Vec = Pair.second;
2373 if (NotAlwaysStackHomed.contains(Aggr))
2383 NotAlwaysStackHomed.insert(Aggr);
2392 if (AlwaysStackHomed.
insert(Aggr).second) {
2401 InsertedAnyIntrinsics =
true;
2407 for (
const auto &[InsertBefore, Vec] : InsertBeforeMap) {
2414 if (AlwaysStackHomed.
contains(Aggr))
2417 InsertedAnyIntrinsics =
true;
2420 FnVarLocs->
setWedge(InsertBefore, std::move(NewDefs));
2423 InsertedAnyIntrinsics |= emitPromotedVarLocs(FnVarLocs);
2425 return InsertedAnyIntrinsics;
2428bool AssignmentTrackingLowering::emitPromotedVarLocs(
2429 FunctionVarLocsBuilder *FnVarLocs) {
2430 bool InsertedAnyIntrinsics =
false;
2439 assert(InsertBefore &&
"Unexpected: debug intrinsics after a terminator");
2443 InsertedAnyIntrinsics =
true;
2445 for (
auto &BB : Fn) {
2446 for (
auto &
I : BB) {
2450 TranslateDbgRecord(&DVR);
2453 return InsertedAnyIntrinsics;
2473 VariableDefinedBytes.
clear();
2475 auto HandleLocsForWedge = [&](
auto *WedgePosition) {
2477 const auto *Locs = FnVarLocs.
getWedge(WedgePosition);
2482 bool ChangedThisWedge =
false;
2487 for (
auto RIt = Locs->rbegin(), REnd = Locs->rend(); RIt != REnd; ++RIt) {
2491 uint64_t SizeInBits = Aggr.first->getSizeInBits().value_or(0);
2495 const uint64_t MaxSizeBytes = 2048;
2497 if (SizeInBytes == 0 || SizeInBytes > MaxSizeBytes) {
2511 bool FirstDefinition = InsertResult.second;
2512 BitVector &DefinedBytes = InsertResult.first->second;
2515 RIt->Expr->getFragmentInfo().value_or(
2517 bool InvalidFragment = Fragment.endInBits() > SizeInBits;
2518 uint64_t StartInBytes = Fragment.startInBits() / 8;
2522 if (FirstDefinition || InvalidFragment ||
2524 if (!InvalidFragment)
2525 DefinedBytes.
set(StartInBytes, EndInBytes);
2532 ChangedThisWedge =
true;
2537 if (ChangedThisWedge) {
2538 std::reverse(NewDefsReversed.
begin(), NewDefsReversed.
end());
2539 FnVarLocs.
setWedge(WedgePosition, std::move(NewDefsReversed));
2544 HandleLocsForWedge(&
I);
2546 HandleLocsForWedge(&DVR);
2571 auto HandleLocsForWedge = [&](
auto *WedgePosition) {
2572 const auto *Locs = FnVarLocs.
getWedge(WedgePosition);
2577 bool ChangedThisWedge =
false;
2585 std::nullopt,
Loc.DL.getInlinedAt());
2590 if (Inserted || VMI->second.first !=
Loc.Values ||
2591 VMI->second.second !=
Loc.Expr) {
2592 VMI->second = {
Loc.Values,
Loc.Expr};
2598 ChangedThisWedge =
true;
2603 if (ChangedThisWedge) {
2604 FnVarLocs.
setWedge(WedgePosition, std::move(NewDefs));
2611 HandleLocsForWedge(&DVR);
2612 HandleLocsForWedge(&
I);
2637 VarsWithDef[
A].
insert(V.getFragmentOrDefault());
2643 auto FragsIt = VarsWithDef.
find(
A);
2644 if (FragsIt == VarsWithDef.
end())
2647 return DIExpression::fragmentsOverlap(Frag, V.getFragmentOrDefault());
2658 auto HandleLocsForWedge = [&](
auto *WedgePosition) {
2659 const auto *Locs = FnVarLocs.
getWedge(WedgePosition);
2664 bool ChangedThisWedge =
false;
2672 Loc.DL.getInlinedAt()};
2677 if (
Loc.Values.isKillLocation(
Loc.Expr) && !HasDefinedBits(Aggr, Var)) {
2680 ChangedThisWedge =
true;
2684 DefineBits(Aggr, Var);
2689 if (ChangedThisWedge) {
2690 FnVarLocs.
setWedge(WedgePosition, std::move(NewDefs));
2696 HandleLocsForWedge(&DVR);
2697 HandleLocsForWedge(&
I);
2705 bool MadeChanges =
false;
2719 for (
auto &BB : Fn) {
2720 for (
auto &
I : BB) {
2746 AssignmentTrackingLowering
Pass(Fn, Layout, &VarsWithStackSlot);
2751 MemLocFragmentFill
Pass(Fn, &VarsWithStackSlot,
2753 Pass.run(FnVarLocs);
2770 auto &
DL =
F.getDataLayout();
2794 LLVM_DEBUG(
dbgs() <<
"AssignmentTrackingAnalysis run on " <<
F.getName()
2804 Results->init(Builder);
2807 Results->print(
errs(),
F);
2819 "Assignment Tracking Analysis",
false,
true)
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Function Alias Analysis Results
std::pair< const DILocalVariable *, const DILocation * > DebugAggregate
A whole (unfragmented) source variable.
VarLocInsertPt getNextNode(const DbgRecord *DVR)
static void analyzeFunction(Function &Fn, const DataLayout &Layout, FunctionVarLocsBuilder *FnVarLocs)
static std::pair< Value *, DIExpression * > walkToAllocaAndPrependOffsetDeref(const DataLayout &DL, Value *Start, DIExpression *Expression)
Walk backwards along constant GEPs and bitcasts to the base storage from Start as far as possible.
static DenseSet< DebugAggregate > findVarsWithStackSlot(Function &Fn)
static AssignmentTrackingLowering::OverlapMap buildOverlapMapAndRecordDeclares(Function &Fn, FunctionVarLocsBuilder *FnVarLocs, const DenseSet< DebugAggregate > &VarsWithStackSlot, AssignmentTrackingLowering::UntaggedStoreAssignmentMap &UntaggedStoreVars, AssignmentTrackingLowering::UnknownStoreAssignmentMap &UnknownStoreVars, unsigned &TrackedVariablesVectorSize)
Build a map of {Variable x: Variables y} where all variable fragments contained within the variable f...
static bool fullyContains(DIExpression::FragmentInfo A, DIExpression::FragmentInfo B)
Return true if A fully contains B.
static std::optional< at::AssignmentInfo > getUntaggedStoreAssignmentInfo(const Instruction &I, const DataLayout &Layout)
static bool removeUndefDbgLocsFromEntryBlock(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
static cl::opt< bool > PrintResults("print-debug-ata", cl::init(false), cl::Hidden)
Print the results of the analysis. Respects -filter-print-funcs.
const char * locStr(AssignmentTrackingLowering::LocKind Loc)
PointerUnion< const Instruction *, const DbgRecord * > VarLocInsertPt
static bool removeRedundantDbgLocsUsingForwardScan(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
Remove redundant location defs using a forward scan.
static bool removeRedundantDbgLocs(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
static cl::opt< bool > EnableMemLocFragFill("mem-loc-frag-fill", cl::init(true), cl::Hidden)
Option for debugging the pass, determines if the memory location fragment filling happens after gener...
static DIAssignID * getIDFromMarker(const DbgVariableRecord &DVR)
static DebugAggregate getAggregate(const DebugVariable &Var)
static bool hasZeroSizedFragment(DbgVariableRecord &DbgValue)
static DIAssignID * getIDFromInst(const Instruction &I)
AllocaInst * getUnknownStore(const Instruction &I, const DataLayout &Layout)
static std::optional< int64_t > getDerefOffsetInBytes(const DIExpression *DIExpr)
Extract the offset used in DIExpr.
static bool removeRedundantDbgLocsUsingBackwardScan(const BasicBlock *BB, FunctionVarLocsBuilder &FnVarLocs)
Remove redundant definitions within sequences of consecutive location defs.
static cl::opt< cl::boolOrDefault > CoalesceAdjacentFragmentsOpt("debug-ata-coalesce-frags", cl::Hidden)
Coalesce adjacent dbg locs describing memory locations that have contiguous fragments.
static cl::opt< unsigned > MaxNumBlocks("debug-ata-max-blocks", cl::init(10000), cl::desc("Maximum num basic blocks before debug info dropped"), cl::Hidden)
static bool shouldCoalesceFragments(Function &F)
This file implements the BitVector class.
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
Analysis containing CSE Info
static ManagedStatic< cl::opt< bool, true >, CreateDebug > Debug
This file defines DenseMapInfo traits for DenseMap.
This file contains constants used for implementing Dwarf debug support.
Module.h This file contains the declarations for the Module class.
This header defines various interfaces for pass management in LLVM.
This file implements a coalescing interval map for small objects.
const AbstractManglingParser< Derived, Alloc >::OperatorInfo AbstractManglingParser< Derived, Alloc >::Ops[]
IntervalMap< SlotIndex, DbgVariableValue, 4 > LocMap
Map of where a user value is live to that value.
print mir2vec MIR2Vec Vocabulary Printer Pass
uint64_t IntrinsicInst * II
FunctionAnalysisManager FAM
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
This file builds on the ADT/GraphTraits.h file to build a generic graph post order iterator.
static bool isValid(const char C)
Returns true if C is a valid mangled character: <0-9a-zA-Z_>.
Scalar Replacement Of Aggregates
This file defines the 'Statistic' class, which is designed to be an easy way to expose various metric...
#define STATISTIC(VARNAME, DESC)
Helper class to build FunctionVarLocs, since that class isn't easy to modify.
void setWedge(VarLocInsertPt Before, SmallVector< VarLocInfo > &&Wedge)
Replace the defs that come just before /p Before with /p Wedge.
const SmallVectorImpl< VarLocInfo > * getWedge(VarLocInsertPt Before) const
Return ptr to wedge of defs or nullptr if no defs come just before /p Before.
unsigned getNumVariables() const
void addSingleLocVar(DebugVariable Var, DIExpression *Expr, DebugLoc DL, RawLocationWrapper R)
Add a def for a variable that is valid for its lifetime.
VariableID insertVariable(DebugVariable V)
Find or insert V and return the ID.
void addVarLoc(VarLocInsertPt Before, DebugVariable Var, DIExpression *Expr, DebugLoc DL, RawLocationWrapper R)
Add a def to the wedge of defs just before /p Before.
const DebugVariable & getVariable(VariableID ID) const
Get a variable from its ID.
Class recording the (high level) value of a variable.
Class for arbitrary precision integers.
uint64_t getZExtValue() const
Get zero extended value.
bool getBoolValue() const
Convert APInt to a boolean value.
an instruction to allocate memory on the stack
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
AssignmentTrackingAnalysis()
bool runOnFunction(Function &F) override
runOnFunction - Virtual method overriden by subclasses to do the per-function processing of the pass.
LLVM Basic Block Representation.
iterator begin()
Instruction iterator methods.
LLVM_ABI bool isEntryBlock() const
Return true if this is the entry block of the containing function.
int find_first_unset_in(unsigned Begin, unsigned End) const
find_first_unset_in - Returns the index of the first unset bit in the range [Begin,...
iterator_range< const_set_bits_iterator > set_bits() const
A structured debug information entry.
static LLVM_ABI DIExpression * append(const DIExpression *Expr, ArrayRef< uint64_t > Ops)
Append the opcodes Ops to DIExpr.
unsigned getNumElements() const
DbgVariableFragmentInfo FragmentInfo
LLVM_ABI bool startsWithDeref() const
Return whether the first element a DW_OP_deref.
static LLVM_ABI std::optional< FragmentInfo > getFragmentInfo(expr_op_iterator Start, expr_op_iterator End)
Retrieve the details of this fragment expression.
ArrayRef< uint64_t > getElements() const
static LLVM_ABI std::optional< DIExpression * > createFragmentExpression(const DIExpression *Expr, unsigned OffsetInBits, unsigned SizeInBits)
Create a DIExpression to describe one part of an aggregate variable that is fragmented across multipl...
static LLVM_ABI DIExpression * prepend(const DIExpression *Expr, uint8_t Flags, int64_t Offset=0)
Prepend DIExpr with a deref and offset operation and optionally turn it into a stack value or/and an ...
static LLVM_ABI DIExpression * prependOpcodes(const DIExpression *Expr, SmallVectorImpl< uint64_t > &Ops, bool StackValue=false, bool EntryValue=false)
Prepend DIExpr with the given opcodes and optionally turn it into a stack value.
LLVM_ABI std::optional< uint64_t > getSizeInBits() const
Determines the size of the variable's type.
StringRef getName() const
A parsed version of the target data layout string in and methods for querying it.
LLVM_ABI unsigned getIndexTypeSizeInBits(Type *Ty) const
The size in bits of the index used in GEP calculation for this type.
Instruction * MarkedInstr
Link back to the Instruction that owns this marker.
LLVM_ABI iterator_range< simple_ilist< DbgRecord >::iterator > getDbgRecordRange()
Produce a range over all the DbgRecords in this Marker.
Base class for non-instruction debug metadata records that have positions within IR.
DebugLoc getDebugLoc() const
Record of a variable value-assignment, aka a non instruction representation of the dbg....
LLVM_ABI bool isKillAddress() const
Check whether this kills the address component.
LLVM_ABI DIAssignID * getAssignID() const
DIExpression * getExpression() const
DILocalVariable * getVariable() const
Metadata * getRawLocation() const
Returns the metadata operand for the first location description.
Result run(Function &F, FunctionAnalysisManager &FAM)
PreservedAnalyses run(Function &F, FunctionAnalysisManager &FAM)
LLVM_ABI DILocation * getInlinedAt() const
Identifies a unique instance of a variable.
const DILocation * getInlinedAt() const
const DILocalVariable * getVariable() const
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
void reserve(size_type NumEntries)
Grow the densemap so that it can contain at least NumEntries items before resizing again.
Implements a dense probed hash-table based set.
Class representing an expression and its matching format.
Data structure describing the variable locations in a function.
void print(raw_ostream &OS, const Function &Fn) const
const VarLocInfo * locs_begin(const Instruction *Before) const
First variable location definition that comes before Before.
const VarLocInfo * single_locs_begin() const
const VarLocInfo * locs_end(const Instruction *Before) const
One past the last variable location definition that comes before Before.
const VarLocInfo * single_locs_end() const
One past the last single-location variable location definition.
void init(FunctionVarLocsBuilder &Builder)
const DataLayout & getDataLayout() const
Get the data layout of the module this function belongs to.
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
bool isTerminator() const
const_iterator begin() const
void insert(KeyT a, KeyT b, ValT y)
insert - Add a mapping of [a;b] to y, coalesce with adjacent intervals.
void clear()
clear - Remove all entries.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
void push_back(MachineInstr *MI)
Pass interface - Implemented by all 'passes'.
A discriminated union of two or more pointer types, with the discriminator in the low bit of the poin...
void * getOpaqueValue() const
static LLVM_ABI PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
Lightweight class that wraps the location operand metadata of a debug intrinsic.
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
static LLVM_ABI IntegerType * getInt1Ty(LLVMContext &C)
UniqueVector - This class produces a sequential ID number (base 1) for each unique entry that is adde...
unsigned insert(const T &Entry)
insert - Append entry to the vector if it doesn't already exist.
LLVM Value Representation.
LLVM_ABI StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
bool contains(const_arg_type_t< ValueT > V) const
Check if the set contains the given element.
size_type count(const_arg_type_t< ValueT > V) const
Return 1 if the specified key is in the set, 0 otherwise.
self_iterator getIterator()
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
DenseMap< FragmentOfVar, SmallVector< DIExpression::FragmentInfo, 1 > > OverlapMap
constexpr std::underlying_type_t< E > Mask()
Get a bitmask with 1s in all places up to the high-order bit of E's largest value.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
@ Tail
Attemps to make calls as fast as possible while guaranteeing that tail call optimization can always b...
@ BasicBlock
Various leaf nodes.
LLVM_ABI void deleteAll(Function *F)
Remove all Assignment Tracking related intrinsics and metadata from F.
SmallVector< DbgVariableRecord * > getDVRAssignmentMarkers(const Instruction *Inst)
Return a range of dbg_assign records for which Inst performs the assignment they encode.
LLVM_ABI std::optional< AssignmentInfo > getAssignmentInfo(const DataLayout &DL, const MemIntrinsic *I)
LLVM_ABI bool calculateFragmentIntersect(const DataLayout &DL, const Value *Dest, uint64_t SliceOffsetInBits, uint64_t SliceSizeInBits, const DbgVariableRecord *DVRAssign, std::optional< DIExpression::FragmentInfo > &Result)
Calculate the fragment of the variable in DAI covered from (Dest + SliceOffsetInBits) to to (Dest + S...
initializer< Ty > init(const Ty &Val)
@ DW_OP_LLVM_fragment
Only used in LLVM metadata.
PointerTypeMap run(const Module &M)
Compute the PointerTypeMap for the module M.
friend class Instruction
Iterator for Instructions in a `BasicBlock.
This is an optimization pass for GlobalISel generic memory operations.
void dump(const SparseBitVector< ElementSize > &LHS, raw_ostream &out)
std::tuple< const DIScope *, const DIScope *, const DILocalVariable * > VarID
A unique key that represents a debug variable.
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly.
Printable print(const GCNRegPressure &RP, const GCNSubtarget *ST=nullptr, unsigned DynamicVGPRBlockSize=0)
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
auto successors(const MachineBasicBlock *BB)
bool operator!=(uint64_t V1, const APInt &V2)
bool operator==(const AddressRangeValuePair &LHS, const AddressRangeValuePair &RHS)
bool any_of(R &&range, UnaryPredicate P)
Provide wrappers to std::any_of which take ranges instead of having to pass begin/end explicitly.
auto reverse(ContainerTy &&C)
LLVM_ABI raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
bool isFunctionInPrintList(StringRef FunctionName)
VariableID
Type wrapper for integer ID for Variables. 0 is reserved.
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
LLVM_ATTRIBUTE_VISIBILITY_DEFAULT AnalysisKey InnerAnalysisManagerProxy< AnalysisManagerT, IRUnitT, ExtraArgTs... >::Key
LLVM_ABI raw_fd_ostream & errs()
This returns a reference to a raw_ostream for standard error.
constexpr T divideCeil(U Numerator, V Denominator)
Returns the integer ceil(Numerator / Denominator).
std::string join(IteratorT Begin, IteratorT End, StringRef Separator)
Joins the strings in the range [Begin, End), adding Separator between the elements.
LLVM_ABI bool isAssignmentTrackingEnabled(const Module &M)
Return true if assignment tracking is enabled for module M.
FunctionAddr VTableAddr Next
DWARFExpression::Operation Op
ArrayRef(const T &OneElt) -> ArrayRef< T >
std::string toString(const APInt &I, unsigned Radix, bool Signed, bool formatAsCLiteral=false, bool UpperCase=true, bool InsertSeparators=false)
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
auto predecessors(const MachineBasicBlock *BB)
AnalysisManager< Function > FunctionAnalysisManager
Convenience typedef for the Function analysis manager.
static auto filterDbgVars(iterator_range< simple_ilist< DbgRecord >::iterator > R)
Filter the DbgRecord range to DbgVariableRecord types only and downcast.
bool debuginfoShouldUseDebugInstrRef(const Triple &T)
Implement std::hash so that hash_code can be used in STL containers.
A special type used by analysis passes to provide an address that identifies that particular analysis...
static VariableID getTombstoneKey()
static bool isEqual(const VariableID &LHS, const VariableID &RHS)
static unsigned getHashValue(const VariableID &Val)
static VariableID getEmptyKey()
DenseMapInfo< unsigned > Wrapped
An information struct used to provide DenseMap with the various necessary components for a given valu...
Variable location definition used by FunctionVarLocs.
RawLocationWrapper Values
llvm::VariableID VariableID
VarLocInsertPt argument_type
result_type operator()(const argument_type &Arg) const