Go to the documentation of this file.
28 for (
const Use &U : FPtr->
uses()) {
38 if (isa<BitCastInst>(
User)) {
41 }
else if (
auto *CI = dyn_cast<CallInst>(
User)) {
42 DevirtCalls.push_back({Offset, *CI});
43 }
else if (
auto *II = dyn_cast<InvokeInst>(
User)) {
44 DevirtCalls.push_back({Offset, *II});
45 }
else if (HasNonCallUses) {
46 *HasNonCallUses =
true;
55 for (
const Use &U : VPtr->
uses()) {
57 if (isa<BitCastInst>(
User)) {
59 }
else if (isa<LoadInst>(
User)) {
61 }
else if (
auto GEP = dyn_cast<GetElementPtrInst>(
User)) {
63 if (VPtr ==
GEP->getPointerOperand() &&
GEP->hasAllConstantIndices()) {
65 int64_t GEPOffset =
M->getDataLayout().getIndexedOffsetInType(
66 GEP->getSourceElementType(), Indices);
83 for (
const Use &CIU : CI->
uses())
84 if (
auto *Assume = dyn_cast<AssumeInst>(CIU.getUser()))
85 Assumes.push_back(Assume);
100 Intrinsic::type_checked_load);
104 HasNonCallUses =
true;
108 for (
const Use &U : CI->
uses()) {
109 auto CIU = U.getUser();
110 if (
auto EVI = dyn_cast<ExtractValueInst>(CIU)) {
111 if (EVI->getNumIndices() == 1 && EVI->getIndices()[0] == 0) {
112 LoadedPtrs.push_back(EVI);
115 if (EVI->getNumIndices() == 1 && EVI->getIndices()[0] == 1) {
116 Preds.push_back(EVI);
120 HasNonCallUses =
true;
123 for (
Value *LoadedPtr : LoadedPtrs)
125 Offset->getZExtValue(), CI, DT);
130 if (
I->getType()->isPointerTy()) {
138 if (
auto *
C = dyn_cast<ConstantStruct>(
I)) {
148 if (
auto *
C = dyn_cast<ConstantArray>(
I)) {
152 unsigned Op = Offset / ElemSize;
153 if (
Op >=
C->getNumOperands())
157 Offset % ElemSize,
M, TopLevelGlobal);
161 if (
auto *CI = dyn_cast<ConstantInt>(
I)) {
162 if (Offset == 0 && CI->getZExtValue() == 0) {
166 if (
auto *
C = dyn_cast<ConstantExpr>(
I)) {
167 switch (
C->getOpcode()) {
168 case Instruction::Trunc:
169 case Instruction::PtrToInt:
172 case Instruction::Sub: {
173 auto *Operand0 = cast<Constant>(
C->getOperand(0));
174 auto *Operand1 = cast<Constant>(
C->getOperand(1));
177 auto *CE = dyn_cast<ConstantExpr>(
C);
180 if (CE->getOpcode() != Instruction::GetElementPtr)
182 return CE->getOperand(0);
188 if (Operand1TargetGlobal != TopLevelGlobal)
201 for (
auto *U :
F->users()) {
202 auto *PtrExpr = dyn_cast<ConstantExpr>(U);
203 if (!PtrExpr || PtrExpr->getOpcode() != Instruction::PtrToInt)
206 for (
auto *PtrToIntUser : PtrExpr->users()) {
207 auto *SubExpr = dyn_cast<ConstantExpr>(PtrToIntUser);
208 if (!SubExpr || SubExpr->getOpcode() != Instruction::Sub)
211 SubExpr->replaceNonMetadataUsesWith(
This is an optimization pass for GlobalISel generic memory operations.
We currently emits eax Perhaps this is what we really should generate is Is imull three or four cycles eax eax The current instruction priority is based on pattern complexity The former is more complex because it folds a load so the latter will not be emitted Perhaps we should use AddedComplexity to give LEA32r a higher priority We should always try to match LEA first since the LEA matching code does some estimate to determine whether the match is profitable if we care more about code then imull is better It s two bytes shorter than movl leal On a Pentium M
A parsed version of the target data layout string in and methods for querying it.
const Function * getParent() const
Return the enclosing method, or null if none.
void replaceRelativePointerUsersWithZero(Function *F)
Finds the same "relative pointer" pattern as described above, where the target is F,...
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
Class to represent array types.
bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
(vector float) vec_cmpeq(*A, *B) C
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
iterator_range< use_iterator > uses()
static Constant * get(Type *Ty, uint64_t V, bool IsSigned=false)
If Ty is a vector type, return a Constant with a splat of the given value.
This is an important base class in LLVM.
Used to lazily calculate structure layout information for a target machine, based on the DataLayout s...
Module * getParent()
Get the module that this global value is contained inside of...
unsigned getElementContainingOffset(uint64_t Offset) const
Given a valid byte offset into the structure, returns the structure index that contains it.
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
A Module instance is used to store all the information related to an LLVM module.
Intrinsic::ID getIntrinsicID() const LLVM_READONLY
getIntrinsicID - This method returns the ID number of the specified function, or Intrinsic::not_intri...
uint64_t getSizeInBytes() const
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
const Value * stripPointerCasts() const
Strip off pointer casts, all-zero GEPs and address space casts.
void findDevirtualizableCallsForTypeCheckedLoad(SmallVectorImpl< DevirtCallSite > &DevirtCalls, SmallVectorImpl< Instruction * > &LoadedPtrs, SmallVectorImpl< Instruction * > &Preds, bool &HasNonCallUses, const CallInst *CI, DominatorTree &DT)
Given a call to the intrinsic @llvm.type.checked.load, find all devirtualizable call sites based on t...
uint64_t getElementOffset(unsigned Idx) const
Constant * getPointerAtOffset(Constant *I, uint64_t Offset, Module &M, Constant *TopLevelGlobal=nullptr)
Processes a Constant recursively looking into elements of arrays, structs and expressions to find a t...
Value * getArgOperand(unsigned i) const
const BasicBlock * getParent() const
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
This class represents a function call, abstracting a target machine's calling convention.
LLVM Value Representation.
Type * getElementType() const
void findDevirtualizableCallsForTypeTest(SmallVectorImpl< DevirtCallSite > &DevirtCalls, SmallVectorImpl< CallInst * > &Assumes, const CallInst *CI, DominatorTree &DT)
Given a call to the intrinsic @llvm.type.test, find all devirtualizable call sites based on the call ...
A Use represents the edge between a Value definition and its users.