85#ifndef LLVM_ANALYSIS_MEMORYSSA_H
86#define LLVM_ANALYSIS_MEMORYSSA_H
110template <
class GraphType>
struct GraphTraits;
116class MemorySSAWalker;
122namespace MSSAHelpers {
135template <
class T>
class memoryaccess_def_iterator_base;
144 public ilist_node<MemoryAccess, ilist_tag<MSSAHelpers::AllAccessTag>>,
145 public ilist_node<MemoryAccess, ilist_tag<MSSAHelpers::DefsOnlyTag>> {
155 void *
operator new(size_t) =
delete;
160 unsigned ID = V->getValueID();
161 return ID == MemoryUseVal ||
ID == MemoryPhiVal ||
ID == MemoryDefVal;
221 inline unsigned getID()
const;
254 void *
operator new(size_t) =
delete;
286 unsigned NumOperands)
288 MemoryInstruction(
MI) {
321 void *
operator new(
size_t S) {
return User::operator
new(S, 1); }
322 void operator delete(
void *
Ptr) { User::operator
delete(
Ptr); }
331 OptimizedID = DMA->
getID();
385 void *
operator new(
size_t S) {
return User::operator
new(S, 2); }
386 void operator delete(
void *
Ptr) { User::operator
delete(
Ptr); }
394 OptimizedID = MA->
getID();
398 return cast_or_null<MemoryAccess>(getOperand(1));
402 return getOptimized() && OptimizedID == getOptimized()->getID();
407 setOperand(1,
nullptr);
428 if (
auto *MU = dyn_cast<MemoryUse>(MUD))
434 if (
auto *MU = dyn_cast<MemoryUse>(MUD))
440 if (
const auto *MU = dyn_cast<MemoryUse>(MUD))
481 void *
operator new(
size_t S) {
return User::operator
new(S); }
484 void operator delete(
void *
Ptr) { User::operator
delete(
Ptr); }
491 ReservedSpace(NumPreds) {
492 allocHungoffUses(ReservedSpace);
501 return reinterpret_cast<block_iterator>(op_begin() + ReservedSpace);
511 return block_begin() + getNumOperands();
515 return make_range(block_begin(), block_end());
519 return make_range(block_begin(), block_end());
532 assert(V &&
"PHI node got a null value!");
545 assert(
this == U.getUser() &&
"Iterator doesn't point to PHI's Uses?");
546 return getIncomingBlock(
unsigned(&U - op_begin()));
552 return getIncomingBlock(
I.getUse());
556 assert(BB &&
"PHI node got a null basic block!");
557 block_begin()[
I] = BB;
562 if (getNumOperands() == ReservedSpace)
565 setNumHungOffUseOperands(getNumOperands() + 1);
566 setIncomingValue(getNumOperands() - 1, V);
567 setIncomingBlock(getNumOperands() - 1, BB);
573 for (
unsigned I = 0,
E = getNumOperands();
I !=
E; ++
I)
574 if (block_begin()[
I] == BB)
580 int Idx = getBasicBlockIndex(BB);
581 assert(
Idx >= 0 &&
"Invalid basic block argument!");
582 return getIncomingValue(
Idx);
587 unsigned E = getNumOperands();
588 assert(
I <
E &&
"Cannot remove out of bounds Phi entry.");
591 assert(
E >= 2 &&
"Cannot only remove incoming values in MemoryPhis with "
592 "at least 2 values.");
593 setIncomingValue(
I, getIncomingValue(
E - 1));
594 setIncomingBlock(
I, block_begin()[
E - 1]);
595 setOperand(
E - 1,
nullptr);
596 block_begin()[
E - 1] =
nullptr;
597 setNumHungOffUseOperands(getNumOperands() - 1);
603 for (
unsigned I = 0,
E = getNumOperands();
I !=
E; ++
I)
604 if (Pred(getIncomingValue(
I), getIncomingBlock(
I))) {
605 unorderedDeleteIncoming(
I);
606 E = getNumOperands();
609 assert(getNumOperands() >= 1 &&
610 "Cannot remove all incoming blocks in a MemoryPhi.");
615 unorderedDeleteIncomingIf(
622 unorderedDeleteIncomingIf(
627 return V->getValueID() == MemoryPhiVal;
647 unsigned ReservedSpace;
651 void growOperands() {
652 unsigned E = getNumOperands();
654 ReservedSpace = std::max(
E +
E / 2, 2u);
655 growHungoffUses(ReservedSpace,
true);
658 static void deleteMe(DerivedUser *Self);
662 assert((isa<MemoryDef>(
this) || isa<MemoryPhi>(
this)) &&
663 "only memory defs and phis have ids");
664 if (
const auto *MD = dyn_cast<MemoryDef>(
this))
666 return cast<MemoryPhi>(
this)->getID();
670 if (
const auto *MD = dyn_cast<MemoryDef>(
this))
671 return MD->isOptimized();
672 return cast<MemoryUse>(
this)->isOptimized();
676 if (
const auto *MD = dyn_cast<MemoryDef>(
this))
677 return MD->getOptimized();
678 return cast<MemoryUse>(
this)->getOptimized();
682 if (
auto *MD = dyn_cast<MemoryDef>(
this))
683 MD->setOptimized(MA);
685 cast<MemoryUse>(
this)->setOptimized(MA);
689 if (
auto *MD = dyn_cast<MemoryDef>(
this))
690 MD->resetOptimized();
692 cast<MemoryUse>(
this)->resetOptimized();
718 return cast_or_null<MemoryUseOrDef>(ValueToMemoryAccess.lookup(
I));
722 return cast_or_null<MemoryPhi>(ValueToMemoryAccess.lookup(cast<Value>(BB)));
738 return MA == LiveOnEntryDef.get();
742 return LiveOnEntryDef.get();
758 return getWritableBlockAccesses(BB);
766 return getWritableBlockDefs(BB);
784 void verifyMemorySSA(VerificationLevel = VerificationLevel::Fast)
const;
795 void ensureOptimizedUses();
803 void verifyOrderingDominationAndDefUses(
805 void verifyDominationNumbers(
const Function &
F)
const;
806 void verifyPrevDefInPhis(
Function &
F)
const;
810 auto It = PerBlockAccesses.find(BB);
811 return It == PerBlockAccesses.end() ? nullptr : It->second.get();
816 auto It = PerBlockDefs.find(BB);
817 return It == PerBlockDefs.
end() ? nullptr : It->second.get();
830 renamePass(DT->getNode(BB), IncomingVal, Visited,
true,
true);
834 void removeFromLists(
MemoryAccess *,
bool ShouldDelete =
true);
838 AccessList::iterator);
841 bool CreationMustSucceed =
true);
844 class ClobberWalkerBase;
846 class SkipSelfWalker;
849 CachingWalker *getWalkerImpl();
858 void markUnreachableAsLiveOnEntry(
BasicBlock *BB);
860 template <
typename AliasAnalysisType>
868 bool SkipVisited =
false,
bool RenameAllUses =
false);
869 AccessList *getOrCreateAccessList(
const BasicBlock *);
870 DefsList *getOrCreateDefsList(
const BasicBlock *);
885 AccessMap PerBlockAccesses;
886 DefsMap PerBlockDefs;
887 std::unique_ptr<MemoryAccess, ValueDeleter> LiveOnEntryDef;
896 std::unique_ptr<ClobberWalkerBase> WalkerBase;
897 std::unique_ptr<CachingWalker> Walker;
898 std::unique_ptr<SkipSelfWalker> SkipWalker;
900 bool IsOptimized =
false;
937 std::unique_ptr<MemorySSA>
MSSA;
949 bool EnsureOptimizedUses;
953 :
OS(
OS), EnsureOptimizedUses(EnsureOptimizedUses) {}
992 std::unique_ptr<MemorySSA> MSSA;
1038 assert(MA &&
"Handed an instruction that MemorySSA doesn't recognize?");
1114 std::forward_iterator_tag, T, ptrdiff_t, T *,
1116 using BaseT =
typename memoryaccess_def_iterator_base::iterator_facade_base;
1123 return Access ==
Other.Access && (!Access || ArgNo ==
Other.ArgNo);
1133 MemoryPhi *MP = dyn_cast<MemoryPhi>(Access);
1134 assert(MP &&
"Tried to get phi arg block when not iterating over a PHI");
1138 typename std::iterator_traits<BaseT>::pointer
operator*()
const {
1139 assert(Access &&
"Tried to access past the end of our iterator");
1142 if (
const MemoryPhi *MP = dyn_cast<MemoryPhi>(Access))
1143 return MP->getIncomingValue(ArgNo);
1144 return cast<MemoryUseOrDef>(Access)->getDefiningAccess();
1147 using BaseT::operator++;
1149 assert(Access &&
"Hit end of iterator");
1150 if (
const MemoryPhi *MP = dyn_cast<MemoryPhi>(Access)) {
1151 if (++ArgNo >= MP->getNumIncomingValues()) {
1162 T *Access =
nullptr;
1212 std::forward_iterator_tag,
1213 const MemoryAccessPair> {
1214 using BaseT = upward_defs_iterator::iterator_facade_base;
1218 : DefIterator(
Info.first), Location(
Info.second),
1219 OriginalAccess(
Info.first), DT(DT) {
1220 CurrentPair.first =
nullptr;
1222 WalkingPhi =
Info.first && isa<MemoryPhi>(
Info.first);
1223 fillInCurrentPair();
1229 return DefIterator ==
Other.DefIterator;
1232 typename std::iterator_traits<BaseT>::reference
operator*()
const {
1234 "Tried to access past the end of our iterator");
1238 using BaseT::operator++;
1241 "Tried to access past the end of the iterator");
1243 if (DefIterator != OriginalAccess->
defs_end())
1244 fillInCurrentPair();
1254 bool IsGuaranteedLoopInvariant(
const Value *
Ptr)
const;
1256 void fillInCurrentPair() {
1257 CurrentPair.first = *DefIterator;
1258 CurrentPair.second = Location;
1259 if (WalkingPhi && Location.
Ptr) {
1265 Translator.translateValue(OriginalAccess->
getBlock(),
1267 if (
Addr != CurrentPair.second.Ptr)
1268 CurrentPair.second = CurrentPair.second.getWithNewPtr(
Addr);
1275 if (!IsGuaranteedLoopInvariant(CurrentPair.second.Ptr))
1276 CurrentPair.second = CurrentPair.second.getWithNewSize(
1283 MemoryLocation Location;
1284 MemoryAccess *OriginalAccess =
nullptr;
1285 DominatorTree *DT =
nullptr;
1286 bool WalkingPhi =
false;
1289inline upward_defs_iterator
1296inline iterator_range<upward_defs_iterator>
1314template <
class T,
bool UseOptimizedChain = false>
1317 std::forward_iterator_tag, MemoryAccess *> {
1325 if (
auto *MUD = dyn_cast<MemoryUseOrDef>(MA)) {
1326 if (UseOptimizedChain && MUD->isOptimized())
1327 MA = MUD->getOptimized();
1329 MA = MUD->getDefiningAccess();
1344inline iterator_range<def_chain_iterator<T>>
1346#ifdef EXPENSIVE_CHECKS
1348 "UpTo isn't in the def chain!");
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
Analysis containing CSE Info
Returns the sub type a function will return at a given Idx Should correspond to the result type of an ExtractValue instruction executed with just that one unsigned Idx
This file defines the DenseMap class.
Machine Check Debug Module
This file provides utility analysis objects describing memory locations.
#define DEFINE_TRANSPARENT_OPERAND_ACCESSORS(CLASS, VALUECLASS)
Macro for generating out-of-class operand accessor definitions.
static bool dominates(MachineBasicBlock &MBB, MachineBasicBlock::const_iterator A, MachineBasicBlock::const_iterator B)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
API to communicate dependencies between analyses during invalidation.
A container for analyses that lazily runs them and caches their results.
Represent the analysis usage information of a pass.
LLVM Basic Block Representation.
const Module * getModule() const
Return the module owning the function this basic block belongs to, or nullptr if the function does no...
This class is a wrapper over an AAResults, and it is intended to be used only when there are no IR ch...
Extension point for the Value hierarchy.
void(*)(DerivedUser *) DeleteValueTy
A MemorySSAWalker that does no alias queries, or anything else.
MemoryAccess * getClobberingMemoryAccess(MemoryAccess *, BatchAAResults &) override
Does the same thing as getClobberingMemoryAccess(const Instruction *I), but takes a MemoryAccess inst...
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
FunctionPass class - This class is used to implement most global optimizations.
This is an important class for using LLVM in a threaded context.
static constexpr LocationSize beforeOrAfterPointer()
Any location before or after the base pointer (but still within the underlying object).
AllAccessType::reverse_self_iterator getReverseIterator()
MemoryAccess(LLVMContext &C, unsigned Vty, DeleteValueTy DeleteValue, BasicBlock *BB, unsigned NumOperands)
AllAccessType::const_self_iterator getIterator() const
MemoryAccess(const MemoryAccess &)=delete
static bool classof(const Value *V)
DefsOnlyType::const_self_iterator getDefsIterator() const
DefsOnlyType::self_iterator getDefsIterator()
DefsOnlyType::reverse_self_iterator getReverseDefsIterator()
DefsOnlyType::const_reverse_self_iterator getReverseDefsIterator() const
memoryaccess_def_iterator defs_end()
BasicBlock * getBlock() const
user_iterator iterator
The user iterators for a memory access.
AllAccessType::const_reverse_self_iterator getReverseIterator() const
void print(raw_ostream &OS) const
unsigned getID() const
Used for debugging and tracking things about MemoryAccesses.
MemoryAccess & operator=(const MemoryAccess &)=delete
void setBlock(BasicBlock *BB)
Used by MemorySSA to change the block of a MemoryAccess when it is moved.
const_user_iterator const_iterator
memoryaccess_def_iterator defs_begin()
This iterator walks over all of the defs in a given MemoryAccess.
AllAccessType::self_iterator getIterator()
Get the iterators for the all access list and the defs only list We default to the all access list.
Represents a read-write access to memory, whether it is a must-alias, or a may-alias.
static bool classof(const Value *MA)
MemoryAccess * getOptimized() const
DECLARE_TRANSPARENT_OPERAND_ACCESSORS(MemoryAccess)
MemoryDef(LLVMContext &C, MemoryAccess *DMA, Instruction *MI, BasicBlock *BB, unsigned Ver)
void setOptimized(MemoryAccess *MA)
Representation for a specific memory location.
const Value * Ptr
The address of the start of the location.
Represents phi nodes for memory accesses.
void setIncomingBlock(unsigned I, BasicBlock *BB)
void allocHungoffUses(unsigned N)
this is more complicated than the generic User::allocHungoffUses, because we have to allocate Uses fo...
void setIncomingValue(unsigned I, MemoryAccess *V)
static bool classof(const Value *V)
void unorderedDeleteIncomingValue(const MemoryAccess *MA)
const_block_iterator block_end() const
BasicBlock * getIncomingBlock(const Use &U) const
Return incoming basic block corresponding to an operand of the PHI.
DECLARE_TRANSPARENT_OPERAND_ACCESSORS(MemoryAccess)
Provide fast operand accessors.
unsigned getNumIncomingValues() const
Return the number of incoming edges.
MemoryAccess * getIncomingValueForBlock(const BasicBlock *BB) const
block_iterator block_end()
const_block_iterator block_begin() const
iterator_range< block_iterator > blocks()
void unorderedDeleteIncomingIf(Fn &&Pred)
void unorderedDeleteIncoming(unsigned I)
BasicBlock * getIncomingBlock(unsigned I) const
Return incoming basic block number i.
const_op_range incoming_values() const
BasicBlock * getIncomingBlock(MemoryAccess::const_user_iterator I) const
Return incoming basic block corresponding to value use iterator.
static unsigned getIncomingValueNumForOperand(unsigned I)
void addIncoming(MemoryAccess *V, BasicBlock *BB)
Add an incoming value to the end of the PHI list.
op_range incoming_values()
void unorderedDeleteIncomingBlock(const BasicBlock *BB)
MemoryPhi(LLVMContext &C, BasicBlock *BB, unsigned Ver, unsigned NumPreds=0)
MemoryAccess * getIncomingValue(unsigned I) const
Return incoming value number x.
static unsigned getOperandNumForIncomingValue(unsigned I)
int getBasicBlockIndex(const BasicBlock *BB) const
Return the first index of the specified basic block in the value list for this PHI.
iterator_range< const_block_iterator > blocks() const
BasicBlock *const * const_block_iterator
block_iterator block_begin()
An analysis that produces MemorySSA for a function.
Result run(Function &F, FunctionAnalysisManager &AM)
Printer pass for MemorySSA.
PreservedAnalyses run(Function &F, FunctionAnalysisManager &AM)
MemorySSAPrinterPass(raw_ostream &OS, bool EnsureOptimizedUses)
static bool defClobbersUseOrDef(MemoryDef *MD, const MemoryUseOrDef *MU, AliasAnalysis &AA)
Printer pass for MemorySSA via the walker.
MemorySSAWalkerPrinterPass(raw_ostream &OS)
PreservedAnalyses run(Function &F, FunctionAnalysisManager &AM)
This is the generic walker interface for walkers of MemorySSA.
MemoryAccess * getClobberingMemoryAccess(const Instruction *I, BatchAAResults &AA)
Given a memory Mod/Ref/ModRef'ing instruction, calling this will give you the nearest dominating Memo...
virtual ~MemorySSAWalker()=default
MemoryAccess * getClobberingMemoryAccess(MemoryAccess *MA, const MemoryLocation &Loc)
virtual void invalidateInfo(MemoryAccess *)
Given a memory access, invalidate anything this walker knows about that access.
virtual MemoryAccess * getClobberingMemoryAccess(MemoryAccess *, const MemoryLocation &, BatchAAResults &AA)=0
Given a potentially clobbering memory access and a new location, calling this will give you the neare...
virtual MemoryAccess * getClobberingMemoryAccess(MemoryAccess *, BatchAAResults &AA)=0
Does the same thing as getClobberingMemoryAccess(const Instruction *I), but takes a MemoryAccess inst...
MemoryAccess * getClobberingMemoryAccess(const Instruction *I)
MemoryAccess * getClobberingMemoryAccess(MemoryAccess *MA)
Legacy analysis pass which computes MemorySSA.
void verifyAnalysis() const override
verifyAnalysis() - This member can be implemented by a analysis pass to check state of analysis infor...
void releaseMemory() override
releaseMemory() - This member can be implemented by a pass if it wants to be able to release its memo...
bool runOnFunction(Function &) override
runOnFunction - Virtual method overriden by subclasses to do the per-function processing of the pass.
const MemorySSA & getMSSA() const
void getAnalysisUsage(AnalysisUsage &AU) const override
getAnalysisUsage - This function should be overriden by passes that need analysis information to do t...
void print(raw_ostream &OS, const Module *M=nullptr) const override
print - Print out the internal state of the pass.
Encapsulates MemorySSA, including all data associated with memory accesses.
const AccessList * getBlockAccesses(const BasicBlock *BB) const
Return the list of MemoryAccess's for a given basic block.
void renamePass(BasicBlock *BB, MemoryAccess *IncomingVal, SmallPtrSetImpl< BasicBlock * > &Visited)
AccessList * getWritableBlockAccesses(const BasicBlock *BB) const
InsertionPlace
Used in various insertion functions to specify whether we are talking about the beginning or end of a...
DefsList * getWritableBlockDefs(const BasicBlock *BB) const
MemorySSA(MemorySSA &&)=delete
DominatorTree & getDomTree() const
MemoryUseOrDef * getMemoryAccess(const Instruction *I) const
Given a memory Mod/Ref'ing instruction, get the MemorySSA access associated with it.
MemoryPhi * getMemoryAccess(const BasicBlock *BB) const
MemoryAccess * getLiveOnEntryDef() const
const DefsList * getBlockDefs(const BasicBlock *BB) const
Return the list of MemoryDef's and MemoryPhi's for a given basic block.
bool isLiveOnEntryDef(const MemoryAccess *MA) const
Return true if MA represents the live on entry value.
Class that has the common methods + fields of memory uses/defs.
~MemoryUseOrDef()=default
MemoryAccess * getDefiningAccess() const
Get the access that produces the memory state used by this Use.
void resetOptimized()
Reset the ID of what this MemoryUse was optimized to, causing it to be rewalked by the walker if nece...
MemoryAccess * getOptimized() const
Return the MemoryAccess associated with the optimized use, or nullptr.
MemoryUseOrDef(LLVMContext &C, MemoryAccess *DMA, unsigned Vty, DeleteValueTy DeleteValue, Instruction *MI, BasicBlock *BB, unsigned NumOperands)
void setDefiningAccess(MemoryAccess *DMA, bool Optimized=false)
void setOptimized(MemoryAccess *)
Sets the optimized use for a MemoryDef.
DECLARE_TRANSPARENT_OPERAND_ACCESSORS(MemoryAccess)
Instruction * getMemoryInst() const
Get the instruction that this MemoryUse represents.
static bool classof(const Value *MA)
bool isOptimized() const
Do we have an optimized use?
Represents read-only accesses to memory.
MemoryAccess * getOptimized() const
DECLARE_TRANSPARENT_OPERAND_ACCESSORS(MemoryAccess)
MemoryUse(LLVMContext &C, MemoryAccess *DMA, Instruction *MI, BasicBlock *BB)
void print(raw_ostream &OS) const
bool isOptimized() const
Whether the MemoryUse is optimized.
static bool classof(const Value *MA)
void setOptimized(MemoryAccess *DMA)
A Module instance is used to store all the information related to an LLVM module.
const DataLayout & getDataLayout() const
Get the data layout for the module's target platform.
PHITransAddr - An address value which tracks and handles phi translation.
A set of analyses that are preserved following a run of a transformation pass.
A templated base class for SmallPtrSet which provides the typesafe interface that is common across al...
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
The instances of the Type class are immutable: once they are created, they are never changed.
A Use represents the edge between a Value definition and its users.
void allocHungoffUses(unsigned N, bool IsPhi=false)
Allocate the array of Uses, followed by a pointer (with bottom bit set) to the User.
void setOperand(unsigned i, Value *Val)
Value * getOperand(unsigned i) const
LLVM Value Representation.
user_iterator_impl< const User > const_user_iterator
unsigned getValueID() const
Return an ID for the concrete type of this object.
void deleteValue()
Delete a pointer to a generic Value.
user_iterator_impl< User > user_iterator
Iterator for intrusive lists based on ilist_node.
reverse_self_iterator getReverseIterator()
self_iterator getIterator()
An intrusive list with ownership and callbacks specified/controlled by ilist_traits,...
CRTP base class which implements the entire standard iterator facade in terms of a minimal subset of ...
A range adaptor for a pair of iterators.
memoryaccess_def_iterator_base()=default
BasicBlock * getPhiArgBlock() const
std::iterator_traits< BaseT >::pointer operator*() const
bool operator==(const memoryaccess_def_iterator_base &Other) const
memoryaccess_def_iterator_base(T *Start)
memoryaccess_def_iterator_base & operator++()
This class implements an extremely fast bulk output stream that can only output to a stream.
A simple intrusive list implementation.
Provide an iterator that walks defs, giving both the memory access, and the current pointer location,...
upward_defs_iterator(const MemoryAccessPair &Info, DominatorTree *DT)
std::iterator_traits< BaseT >::reference operator*() const
BasicBlock * getPhiArgBlock() const
upward_defs_iterator & operator++()
bool operator==(const upward_defs_iterator &Other) const
This file defines the ilist_node class template, which is a convenient base class for creating classe...
This provides a very simple, boring adaptor for a begin and end iterator into a range type.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
@ Fast
Attempts to make calls as fast as possible (e.g.
@ C
The default llvm calling convention, compatible with C.
@ BasicBlock
Various leaf nodes.
NodeAddr< UseNode * > Use
This is an optimization pass for GlobalISel generic memory operations.
void dump(const SparseBitVector< ElementSize > &LHS, raw_ostream &out)
auto find(R &&Range, const T &Val)
Provide wrappers to std::find which take ranges instead of having to pass begin/end explicitly.
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
@ INVALID_MEMORYACCESS_ID
upward_defs_iterator upward_defs_begin(const MemoryAccessPair &Pair, DominatorTree &DT)
std::pair< const MemoryAccess *, MemoryLocation > ConstMemoryAccessPair
Printable print(const GCNRegPressure &RP, const GCNSubtarget *ST=nullptr)
iterator_range< def_chain_iterator< T > > def_chain(T MA, MemoryAccess *UpTo=nullptr)
memoryaccess_def_iterator_base< MemoryAccess > memoryaccess_def_iterator
memoryaccess_def_iterator_base< const MemoryAccess > const_memoryaccess_def_iterator
bool VerifyMemorySSA
Enables verification of MemorySSA.
std::pair< MemoryAccess *, MemoryLocation > MemoryAccessPair
upward_defs_iterator upward_defs_end()
raw_ostream & operator<<(raw_ostream &OS, const APFixedPoint &FX)
OutputIt move(R &&Range, OutputIt Out)
Provide wrappers to std::move which take ranges instead of having to pass begin/end explicitly.
iterator_range< upward_defs_iterator > upward_defs(const MemoryAccessPair &Pair, DominatorTree &DT)
iterator_range< def_chain_iterator< T, true > > optimized_def_chain(T MA)
Implement std::hash so that hash_code can be used in STL containers.
A CRTP mix-in that provides informational APIs needed for analysis passes.
A special type used by analysis passes to provide an address that identifies that particular analysis...
FixedNumOperandTraits - determine the allocation regime of the Use array when it is a prefix to the U...
static ChildIteratorType child_begin(NodeRef N)
MemoryAccess::iterator ChildIteratorType
static ChildIteratorType child_end(NodeRef N)
static NodeRef getEntryNode(NodeRef N)
static ChildIteratorType child_begin(NodeRef N)
static ChildIteratorType child_end(NodeRef N)
static NodeRef getEntryNode(NodeRef N)
HungoffOperandTraits - determine the allocation regime of the Use array when it is not a prefix to th...
Result(std::unique_ptr< MemorySSA > &&MSSA)
bool invalidate(Function &F, const PreservedAnalyses &PA, FunctionAnalysisManager::Invalidator &Inv)
std::unique_ptr< MemorySSA > MSSA
Verifier pass for MemorySSA.
PreservedAnalyses run(Function &F, FunctionAnalysisManager &AM)
static unsigned operands(const MemoryUseOrDef *MUD)
static Use * op_end(MemoryUseOrDef *MUD)
static Use * op_begin(MemoryUseOrDef *MUD)
Compile-time customization of User operands.
A CRTP mix-in to automatically provide informational APIs needed for passes.
Walks the defining accesses of MemoryDefs.
bool operator==(const def_chain_iterator &O) const
def_chain_iterator & operator++()
static void deleteNode(MemoryAccess *MA)
Use delete by default for iplist and ilist.