42 #include "llvm/Config/llvm-config.h"
62 #include "llvm/IR/IntrinsicsAArch64.h"
103 using namespace llvm;
106 #define DEBUG_TYPE "codegenprepare"
108 STATISTIC(NumBlocksElim,
"Number of blocks eliminated");
109 STATISTIC(NumPHIsElim,
"Number of trivial PHIs eliminated");
110 STATISTIC(NumGEPsElim,
"Number of GEPs converted to casts");
111 STATISTIC(NumCmpUses,
"Number of uses of Cmp expressions replaced with uses of "
113 STATISTIC(NumCastUses,
"Number of uses of Cast expressions replaced with uses "
115 STATISTIC(NumMemoryInsts,
"Number of memory instructions whose address "
116 "computations were sunk");
118 "Number of phis created when address "
119 "computations were sunk to memory instructions");
121 "Number of select created when address "
122 "computations were sunk to memory instructions");
123 STATISTIC(NumExtsMoved,
"Number of [s|z]ext instructions combined with loads");
124 STATISTIC(NumExtUses,
"Number of uses of [s|z]ext instructions optimized");
126 "Number of and mask instructions added to form ext loads");
127 STATISTIC(NumAndUses,
"Number of uses of and mask instructions optimized");
128 STATISTIC(NumRetsDup,
"Number of return instructions duplicated");
129 STATISTIC(NumDbgValueMoved,
"Number of debug value instructions moved");
130 STATISTIC(NumSelectsExpanded,
"Number of selects turned into branches");
131 STATISTIC(NumStoreExtractExposed,
"Number of store(extractelement) exposed");
135 cl::desc(
"Disable branch optimizations in CodeGenPrepare"));
139 cl::desc(
"Disable GC optimizations in CodeGenPrepare"));
143 cl::desc(
"Disable select to branch conversion."));
147 cl::desc(
"Address sinking in CGP using GEPs."));
151 cl::desc(
"Enable sinkinig and/cmp into branches."));
155 cl::desc(
"Disable store(extract) optimizations in CodeGenPrepare"));
159 cl::desc(
"Stress test store(extract) optimizations in CodeGenPrepare"));
163 cl::desc(
"Disable ext(promotable(ld)) -> promoted(ext(ld)) optimization in "
168 cl::desc(
"Stress test ext(promotable(ld)) -> promoted(ext(ld)) "
169 "optimization in CodeGenPrepare"));
173 cl::desc(
"Disable protection against removing loop preheaders"));
177 cl::desc(
"Use profile info to add section prefix for hot/cold functions"));
182 cl::desc(
"In profiling mode like sampleFDO, if a function doesn't have "
183 "profile, we cannot tell the function is cold for sure because "
184 "it may be a function newly added without ever being sampled. "
185 "With the flag enabled, compiler can put such profile unknown "
186 "functions into a special section, so runtime system can choose "
187 "to handle it in a different way than .text section, to save "
188 "RAM for example. "));
192 cl::desc(
"Skip merging empty blocks if (frequency of empty block) / "
193 "(frequency of destination block) is greater than this ratio"));
197 cl::desc(
"Force store splitting no matter what the target query says."));
201 cl::desc(
"Enable merging of redundant sexts when one is dominating"
206 cl::desc(
"Disables combining addressing modes with different parts "
207 "in optimizeMemoryInst."));
211 cl::desc(
"Allow creation of Phis in Address sinking."));
215 cl::desc(
"Allow creation of selects in Address sinking."));
219 cl::desc(
"Allow combining of BaseReg field in Address sinking."));
223 cl::desc(
"Allow combining of BaseGV field in Address sinking."));
227 cl::desc(
"Allow combining of BaseOffs field in Address sinking."));
231 cl::desc(
"Allow combining of ScaledReg field in Address sinking."));
236 cl::desc(
"Enable splitting large offset of GEP."));
240 cl::desc(
"Enable ICMP_EQ to ICMP_S(L|G)T conversion."));
244 cl::desc(
"Enable BFI update verification for "
249 cl::desc(
"Enable converting phi types in CodeGenPrepare"));
268 class TypePromotionTransaction;
278 std::unique_ptr<BlockFrequencyInfo>
BFI;
279 std::unique_ptr<BranchProbabilityInfo> BPI;
294 SetOfInstrs InsertedInsts;
298 InstrToOrigTy PromotedInsts;
301 SetOfInstrs RemovedInsts;
321 ValueToSExts ValToSExtendedUses;
331 std::unique_ptr<DominatorTree> DT;
342 StringRef getPassName()
const override {
return "CodeGen Prepare"; }
354 template <
typename F>
355 void resetIteratorIfInvalidatedWhileCalling(
BasicBlock *
BB,
F f) {
359 Value *CurValue = &*CurInstIterator;
366 if (IterHandle != CurValue) {
367 CurInstIterator =
BB->begin();
375 DT = std::make_unique<DominatorTree>(
F);
379 void removeAllAssertingVHReferences(
Value *V);
382 bool eliminateMostlyEmptyBlocks(
Function &
F);
392 Type *AccessTy,
unsigned AddrSpace);
394 bool optimizeInlineAsmInst(
CallInst *CS);
404 bool optimizeExtractElementInst(
Instruction *Inst);
405 bool dupRetToEnableTailCallOpts(
BasicBlock *
BB,
bool &ModifiedDT);
411 bool tryToPromoteExts(TypePromotionTransaction &TPT,
414 unsigned CreatedInstsCost = 0);
416 bool splitLargeGEPOffsets();
420 bool performAddressTypePromotion(
422 bool AllowPromotionWithoutCommonHeader,
423 bool HasPromoted, TypePromotionTransaction &TPT,
425 bool splitBranchCondition(
Function &
F,
bool &ModifiedDT);
432 bool optimizeCmp(
CmpInst *Cmp,
bool &ModifiedDT);
433 bool combineToUSubWithOverflow(
CmpInst *Cmp,
bool &ModifiedDT);
434 bool combineToUAddWithOverflow(
CmpInst *Cmp,
bool &ModifiedDT);
443 "Optimize for code generation",
false,
false)
458 DL = &
F.getParent()->getDataLayout();
460 bool EverMadeChange =
false;
462 InsertedInsts.clear();
463 PromotedInsts.clear();
466 SubtargetInfo =
TM->getSubtargetImpl(
F);
467 TLI = SubtargetInfo->getTargetLowering();
468 TRI = SubtargetInfo->getRegisterInfo();
469 TLInfo = &getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(
F);
470 TTI = &getAnalysis<TargetTransformInfoWrapperPass>().getTTI(
F);
471 LI = &getAnalysis<LoopInfoWrapperPass>().getLoopInfo();
474 PSI = &getAnalysis<ProfileSummaryInfoWrapperPass>().getPSI();
475 OptSize =
F.hasOptSize();
480 if (
F.hasFnAttribute(Attribute::Hot) ||
482 F.setSectionPrefix(
"hot");
488 F.setSectionPrefix(
"unlikely");
491 F.setSectionPrefix(
"unknown");
500 while (
BB !=
nullptr) {
514 EverMadeChange |= eliminateAssumptions(
F);
518 EverMadeChange |= eliminateMostlyEmptyBlocks(
F);
520 bool ModifiedDT =
false;
522 EverMadeChange |= splitBranchCondition(
F, ModifiedDT);
528 bool MadeChange =
true;
534 bool ModifiedDTOnIteration =
false;
538 if (ModifiedDTOnIteration)
542 MadeChange |= mergeSExts(
F);
543 if (!LargeOffsetGEPMap.
empty())
544 MadeChange |= splitLargeGEPOffsets();
545 MadeChange |= optimizePhiTypes(
F);
548 eliminateFallThrough(
F);
554 EverMadeChange |= MadeChange;
555 SeenChainsForSExt.
clear();
556 ValToSExtendedUses.clear();
557 RemovedInsts.clear();
558 LargeOffsetGEPMap.
clear();
559 LargeOffsetGEPID.
clear();
574 if (!MadeChange)
continue;
582 MadeChange |= !WorkList.
empty();
583 while (!WorkList.
empty()) {
596 if (EverMadeChange || MadeChange)
597 MadeChange |= eliminateFallThrough(
F);
599 EverMadeChange |= MadeChange;
606 if (
auto *SP = dyn_cast<GCStatepointInst>(&
I))
607 Statepoints.push_back(SP);
608 for (
auto &
I : Statepoints)
609 EverMadeChange |= simplifyOffsetableRelocate(*
I);
614 EverMadeChange |= placeDbgValues(
F);
615 EverMadeChange |= placePseudoProbes(
F);
622 return EverMadeChange;
625 bool CodeGenPrepare::eliminateAssumptions(
Function &
F) {
626 bool MadeChange =
false;
628 CurInstIterator =
BB.begin();
629 while (CurInstIterator !=
BB.end()) {
631 if (
auto *Assume = dyn_cast<AssumeInst>(
I)) {
633 Value *Operand = Assume->getOperand(0);
634 Assume->eraseFromParent();
636 resetIteratorIfInvalidatedWhileCalling(&
BB, [&]() {
647 void CodeGenPrepare::removeAllAssertingVHReferences(
Value *V) {
648 LargeOffsetGEPMap.
erase(V);
649 NewGEPBases.
erase(V);
651 auto GEP = dyn_cast<GetElementPtrInst>(V);
657 auto VecI = LargeOffsetGEPMap.
find(
GEP->getPointerOperand());
658 if (VecI == LargeOffsetGEPMap.
end())
661 auto &GEPVector = VecI->second;
664 if (
I == GEPVector.end())
668 if (GEPVector.empty())
669 LargeOffsetGEPMap.
erase(VecI);
678 NewBFI.verifyMatch(*
BFI);
684 bool CodeGenPrepare::eliminateFallThrough(
Function &
F) {
685 bool Changed =
false;
691 Blocks.push_back(&Block);
694 for (
auto &Block : Blocks) {
695 auto *
BB = cast_or_null<BasicBlock>(Block);
703 if (!SinglePred || SinglePred ==
BB ||
BB->hasAddressTaken())
continue;
706 if (
Term && !
Term->isConditional()) {
718 for (
auto &Pred : Preds)
719 if (
auto *
BB = cast_or_null<BasicBlock>(Pred))
728 BranchInst *BI = dyn_cast<BranchInst>(
BB->getTerminator());
735 if (BBI !=
BB->begin()) {
737 while (isa<DbgInfoIntrinsic>(BBI)) {
738 if (BBI ==
BB->begin())
742 if (!isa<DbgInfoIntrinsic>(BBI) && !isa<PHINode>(BBI))
751 if (!canMergeBlocks(
BB, DestBB))
761 bool CodeGenPrepare::eliminateMostlyEmptyBlocks(
Function &
F) {
764 while (!LoopList.empty()) {
765 Loop *L = LoopList.pop_back_val();
768 Preheaders.
insert(Preheader);
771 bool MadeChange =
false;
777 Blocks.push_back(&Block);
779 for (
auto &Block : Blocks) {
783 BasicBlock *DestBB = findDestBlockOfMergeableEmptyBlock(
BB);
785 !isMergingEmptyBlockProfitable(
BB, DestBB, Preheaders.
count(
BB)))
788 eliminateMostlyEmptyBlock(
BB);
794 bool CodeGenPrepare::isMergingEmptyBlockProfitable(
BasicBlock *
BB,
802 !(
BB->getSinglePredecessor() &&
803 BB->getSinglePredecessor()->getSingleSuccessor()))
810 if (
auto *CBI = dyn_cast<CallBrInst>((Pred)->getTerminator()))
811 for (
unsigned i = 0,
e = CBI->getNumSuccessors();
i !=
e; ++
i)
812 if (DestBB == CBI->getSuccessor(
i))
830 if (
BB->getTerminator() !=
BB->getFirstNonPHIOrDbg())
844 if (!isa<PHINode>(DestBB->
begin()))
852 if (DestBBPred ==
BB)
856 return DestPN.getIncomingValueForBlock(BB) ==
857 DestPN.getIncomingValueForBlock(DestBBPred);
859 SameIncomingValueBBs.
insert(DestBBPred);
865 if (SameIncomingValueBBs.
count(Pred))
871 for (
auto *SameValueBB : SameIncomingValueBBs)
872 if (SameValueBB->getUniquePredecessor() == Pred &&
873 DestBB == findDestBlockOfMergeableEmptyBlock(SameValueBB))
874 BBFreq +=
BFI->getBlockFreq(SameValueBB);
883 bool CodeGenPrepare::canMergeBlocks(
const BasicBlock *
BB,
891 if (UI->
getParent() != DestBB || !isa<PHINode>(UI))
897 if (
const PHINode *UPN = dyn_cast<PHINode>(UI))
898 for (
unsigned I = 0,
E = UPN->getNumIncomingValues();
I !=
E; ++
I) {
899 Instruction *Insn = dyn_cast<Instruction>(UPN->getIncomingValue(
I));
901 Insn->
getParent() != UPN->getIncomingBlock(
I))
911 const PHINode *DestBBPN = dyn_cast<PHINode>(DestBB->
begin());
912 if (!DestBBPN)
return true;
916 if (
const PHINode *BBPN = dyn_cast<PHINode>(
BB->begin())) {
918 for (
unsigned i = 0,
e = BBPN->getNumIncomingValues();
i !=
e; ++
i)
919 BBPreds.
insert(BBPN->getIncomingBlock(
i));
927 if (BBPreds.
count(Pred)) {
929 const Value *V1 = PN.getIncomingValueForBlock(Pred);
930 const Value *
V2 = PN.getIncomingValueForBlock(
BB);
933 if (
const PHINode *V2PN = dyn_cast<PHINode>(
V2))
934 if (V2PN->getParent() ==
BB)
935 V2 = V2PN->getIncomingValueForBlock(Pred);
938 if (V1 !=
V2)
return false;
948 void CodeGenPrepare::eliminateMostlyEmptyBlock(
BasicBlock *
BB) {
958 if (SinglePred != DestBB) {
960 "Single predecessor not the same as predecessor");
974 Value *InVal = PN.removeIncomingValue(
BB,
false);
978 PHINode *InValPhi = dyn_cast<PHINode>(InVal);
987 if (
PHINode *BBPN = dyn_cast<PHINode>(
BB->begin())) {
988 for (
unsigned i = 0,
e = BBPN->getNumIncomingValues();
i !=
e; ++
i)
989 PN.addIncoming(InVal, BBPN->getIncomingBlock(
i));
992 PN.addIncoming(InVal, Pred);
999 BB->replaceAllUsesWith(DestBB);
1000 BB->eraseFromParent();
1016 for (
auto *ThisRelocate : AllRelocateCalls) {
1017 auto K = std::make_pair(ThisRelocate->getBasePtrIndex(),
1018 ThisRelocate->getDerivedPtrIndex());
1019 RelocateIdxMap.
insert(std::make_pair(K, ThisRelocate));
1021 for (
auto &Item : RelocateIdxMap) {
1022 std::pair<unsigned, unsigned>
Key = Item.first;
1023 if (
Key.first ==
Key.second)
1028 auto BaseKey = std::make_pair(
Key.first,
Key.first);
1031 auto MaybeBase = RelocateIdxMap.find(BaseKey);
1032 if (MaybeBase == RelocateIdxMap.end())
1037 RelocateInstMap[MaybeBase->second].push_back(
I);
1045 for (
unsigned i = 1;
i <
GEP->getNumOperands();
i++) {
1047 auto *
Op = dyn_cast<ConstantInt>(
GEP->getOperand(
i));
1048 if (!
Op ||
Op->getZExtValue() > 20)
1052 for (
unsigned i = 1;
i <
GEP->getNumOperands();
i++)
1053 OffsetV.push_back(
GEP->getOperand(
i));
1062 bool MadeChange =
false;
1070 &*R != RelocatedBase; ++R)
1071 if (
auto *RI = dyn_cast<GCRelocateInst>(R))
1080 "Not relocating a derived object of the original base object");
1081 if (ToReplace->getBasePtrIndex() == ToReplace->getDerivedPtrIndex()) {
1094 Value *Base = ToReplace->getBasePtr();
1095 auto *Derived = dyn_cast<GetElementPtrInst>(ToReplace->getDerivedPtr());
1096 if (!Derived || Derived->getPointerOperand() != Base)
1105 "Should always have one since it's not a terminator");
1109 Builder.SetCurrentDebugLocation(ToReplace->getDebugLoc());
1132 Value *ActualRelocatedBase = RelocatedBase;
1134 ActualRelocatedBase =
1138 Derived->getSourceElementType(), ActualRelocatedBase,
makeArrayRef(OffsetV));
1142 Value *ActualReplacement = Replacement;
1143 if (Replacement->
getType() != ToReplace->getType()) {
1147 ToReplace->replaceAllUsesWith(ActualReplacement);
1148 ToReplace->eraseFromParent();
1173 bool MadeChange =
false;
1175 for (
auto *U :
I.users())
1178 AllRelocateCalls.push_back(Relocate);
1182 if (AllRelocateCalls.size() < 2)
1189 if (RelocateInstMap.
empty())
1192 for (
auto &Item : RelocateInstMap)
1206 bool MadeChange =
false;
1209 Use &TheUse = UI.getUse();
1216 UserBB = PN->getIncomingBlock(TheUse);
1224 if (
User->isEHPad())
1234 if (UserBB == DefBB)
continue;
1237 CastInst *&InsertedCast = InsertedCasts[UserBB];
1239 if (!InsertedCast) {
1243 CI->
getType(),
"", &*InsertPt);
1248 TheUse = InsertedCast;
1272 if (
auto *ASC = dyn_cast<AddrSpaceCastInst>(CI)) {
1274 ASC->getDestAddressSpace()))
1288 if (SrcVT.
bitsLT(DstVT))
return false;
1313 match(IVInc, m_ExtractValue<0>(m_Intrinsic<Intrinsic::uadd_with_overflow>(
1317 match(IVInc, m_ExtractValue<0>(m_Intrinsic<Intrinsic::usub_with_overflow>(
1335 if (!IVInc || LI->
getLoopFor(IVInc->getParent()) != L)
1340 return std::make_pair(IVInc, Step);
1345 auto *
I = dyn_cast<Instruction>(V);
1352 if (
auto *PN = dyn_cast<PHINode>(LHS))
1354 return IVInc->first ==
I;
1358 bool CodeGenPrepare::replaceMathCmpWithIntrinsic(
BinaryOperator *BO,
1366 assert(L &&
"L should not be null after isIVIncrement()");
1383 if (BO->
getParent() !=
Cmp->getParent() && !IsReplacableIVIncrement(BO)) {
1407 IID == Intrinsic::usub_with_overflow) {
1408 assert(isa<Constant>(Arg1) &&
"Unexpected input for usubo");
1417 if ((BO->
getOpcode() != Instruction::Xor && &Iter == BO) || &Iter == Cmp) {
1422 assert(InsertPt !=
nullptr &&
"Parent block did not contain cmp or binop");
1425 Value *MathOV =
Builder.CreateBinaryIntrinsic(IID, Arg0, Arg1);
1426 if (BO->
getOpcode() != Instruction::Xor) {
1427 Value *Math =
Builder.CreateExtractValue(MathOV, 0,
"math");
1431 "Patterns with XOr should use the BO only in the compare");
1432 Value *OV =
Builder.CreateExtractValue(MathOV, 1,
"ov");
1433 Cmp->replaceAllUsesWith(OV);
1434 Cmp->eraseFromParent();
1444 Value *A = Cmp->getOperand(0), *
B = Cmp->getOperand(1);
1447 if (isa<Constant>(A))
1460 for (
User *U : A->users()) {
1462 Add = cast<BinaryOperator>(U);
1471 bool CodeGenPrepare::combineToUAddWithOverflow(
CmpInst *Cmp,
1479 A =
Add->getOperand(0);
1480 B =
Add->getOperand(1);
1485 Add->hasNUsesOrMore(2)))
1491 if (
Add->getParent() !=
Cmp->getParent() && !
Add->hasOneUse())
1494 if (!replaceMathCmpWithIntrinsic(Add, A,
B, Cmp,
1495 Intrinsic::uadd_with_overflow))
1503 bool CodeGenPrepare::combineToUSubWithOverflow(
CmpInst *Cmp,
1507 if (isa<Constant>(A) && isa<Constant>(
B))
1532 Value *CmpVariableOperand = isa<Constant>(A) ?
B :
A;
1534 for (
User *U : CmpVariableOperand->
users()) {
1537 Sub = cast<BinaryOperator>(U);
1542 const APInt *CmpC, *AddC;
1545 Sub = cast<BinaryOperator>(U);
1558 Cmp, Intrinsic::usub_with_overflow))
1583 bool MadeChange =
false;
1586 Use &TheUse = UI.getUse();
1593 if (isa<PHINode>(
User))
1601 if (UserBB == DefBB)
continue;
1604 CmpInst *&InsertedCmp = InsertedCmps[UserBB];
1611 Cmp->getOperand(0), Cmp->getOperand(1),
"",
1618 TheUse = InsertedCmp;
1624 if (Cmp->use_empty()) {
1625 Cmp->eraseFromParent();
1662 for (
User *U : Cmp->users()) {
1663 if (isa<BranchInst>(U))
1665 if (isa<SelectInst>(U) && cast<SelectInst>(U)->getCondition() == Cmp)
1684 if (CmpBB != FalseBB)
1687 Value *CmpOp0 = Cmp->getOperand(0), *CmpOp1 = Cmp->getOperand(1);
1701 for (
User *U : Cmp->users()) {
1702 if (
auto *BI = dyn_cast<BranchInst>(U)) {
1707 if (
auto *
SI = dyn_cast<SelectInst>(U)) {
1710 SI->swapProfMetadata();
1719 bool CodeGenPrepare::optimizeCmp(
CmpInst *Cmp,
bool &ModifiedDT) {
1723 if (combineToUAddWithOverflow(Cmp, ModifiedDT))
1726 if (combineToUSubWithOverflow(Cmp, ModifiedDT))
1742 SetOfInstrs &InsertedInsts) {
1745 assert(!InsertedInsts.count(AndI) &&
1746 "Attempting to optimize already optimized and instruction");
1747 (void) InsertedInsts;
1756 if (!isa<ConstantInt>(AndI->
getOperand(0)) &&
1761 for (
auto *U : AndI->
users()) {
1765 if (!isa<ICmpInst>(
User))
1769 if (!CmpC || !CmpC->isZero())
1784 Use &TheUse = UI.getUse();
1802 TheUse = InsertedAnd;
1818 if (!isa<TruncInst>(
User)) {
1819 if (
User->getOpcode() != Instruction::And ||
1825 if ((Cimm & (Cimm + 1)).getBoolValue())
1838 auto *TruncI = cast<TruncInst>(
User);
1839 bool MadeChange =
false;
1842 TruncE = TruncI->user_end();
1843 TruncUI != TruncE;) {
1845 Use &TruncTheUse = TruncUI.getUse();
1846 Instruction *TruncUser = cast<Instruction>(*TruncUI);
1865 if (isa<PHINode>(TruncUser))
1870 if (UserBB == TruncUserBB)
1874 CastInst *&InsertedTrunc = InsertedTruncs[TruncUserBB];
1876 if (!InsertedShift && !InsertedTrunc) {
1880 if (ShiftI->
getOpcode() == Instruction::AShr)
1881 InsertedShift = BinaryOperator::CreateAShr(ShiftI->
getOperand(0), CI,
1884 InsertedShift = BinaryOperator::CreateLShr(ShiftI->
getOperand(0), CI,
1891 assert(TruncInsertPt != TruncUserBB->
end());
1894 TruncI->
getType(),
"", &*TruncInsertPt);
1895 InsertedTrunc->
setDebugLoc(TruncI->getDebugLoc());
1899 TruncTheUse = InsertedTrunc;
1932 bool MadeChange =
false;
1935 Use &TheUse = UI.getUse();
1941 if (isa<PHINode>(
User))
1949 if (UserBB == DefBB) {
1964 if (isa<TruncInst>(
User) && shiftIsLegal
1977 if (!InsertedShift) {
1981 if (ShiftI->
getOpcode() == Instruction::AShr)
1982 InsertedShift = BinaryOperator::CreateAShr(ShiftI->
getOperand(0), CI,
1985 InsertedShift = BinaryOperator::CreateLShr(ShiftI->
getOperand(0), CI,
1993 TheUse = InsertedShift;
2040 if (Ty->
isVectorTy() || SizeInBits >
DL->getLargestLegalIntTypeSizeInBits())
2062 Builder.CreateCondBr(Cmp, EndBlock, CallBlock);
2091 CurInstIterator =
BB->begin();
2098 if (optimizeInlineAsmInst(CI))
2104 unsigned MinSize, PrefAlign;
2111 if (!
Arg->getType()->isPointerTy())
2114 cast<PointerType>(
Arg->getType())->getAddressSpace()),
2117 uint64_t Offset2 =
Offset.getLimitedValue();
2118 if ((Offset2 & (PrefAlign-1)) != 0)
2121 if ((AI = dyn_cast<AllocaInst>(Val)) && AI->
getAlignment() < PrefAlign &&
2140 if (!MIDestAlign || DestAlign > *MIDestAlign)
2141 MI->setDestAlignment(DestAlign);
2143 MaybeAlign MTISrcAlign = MTI->getSourceAlign();
2145 if (!MTISrcAlign || SrcAlign > *MTISrcAlign)
2146 MTI->setSourceAlignment(SrcAlign);
2158 if (!
Arg->getType()->isPointerTy())
2160 unsigned AS =
Arg->getType()->getPointerAddressSpace();
2161 return optimizeMemoryInst(CI,
Arg,
Arg->getType(), AS);
2168 case Intrinsic::assume:
2170 case Intrinsic::experimental_widenable_condition: {
2179 resetIteratorIfInvalidatedWhileCalling(
BB, [&]() {
2184 case Intrinsic::objectsize:
2186 case Intrinsic::is_constant:
2188 case Intrinsic::aarch64_stlxr:
2189 case Intrinsic::aarch64_stxr: {
2198 InsertedInsts.insert(ExtVal);
2202 case Intrinsic::launder_invariant_group:
2203 case Intrinsic::strip_invariant_group: {
2205 auto it = LargeOffsetGEPMap.
find(II);
2206 if (
it != LargeOffsetGEPMap.
end()) {
2211 LargeOffsetGEPMap[ArgVal].append(GEPs.begin(), GEPs.end());
2212 LargeOffsetGEPMap.
erase(II);
2219 case Intrinsic::cttz:
2220 case Intrinsic::ctlz:
2223 case Intrinsic::fshl:
2224 case Intrinsic::fshr:
2225 return optimizeFunnelShift(II);
2226 case Intrinsic::dbg_value:
2227 return fixupDbgValue(II);
2228 case Intrinsic::vscale: {
2232 Type *ScalableVectorTy =
2234 if (
DL->getTypeAllocSize(ScalableVectorTy).getKnownMinSize() == 8) {
2245 case Intrinsic::masked_gather:
2247 case Intrinsic::masked_scatter:
2254 while (!PtrOps.empty()) {
2257 if (optimizeMemoryInst(II, PtrVal, AccessTy, AS))
2310 bool CodeGenPrepare::dupRetToEnableTailCallOpts(
BasicBlock *
BB,
bool &ModifiedDT) {
2311 ReturnInst *RetI = dyn_cast<ReturnInst>(
BB->getTerminator());
2320 BCI = dyn_cast<BitCastInst>(V);
2324 EVI = dyn_cast<ExtractValueInst>(V);
2331 PN = dyn_cast<PHINode>(V);
2339 auto isLifetimeEndOrBitCastFor = [](
const Instruction *Inst) {
2340 const BitCastInst *BC = dyn_cast<BitCastInst>(Inst);
2344 if (
const IntrinsicInst *II = dyn_cast<IntrinsicInst>(Inst))
2353 while (isa<DbgInfoIntrinsic>(BI) || BI == BCI || BI == EVI ||
2354 isa<PseudoProbeInst>(BI) || isLifetimeEndOrBitCastFor(BI))
2367 CallInst *CI = dyn_cast<CallInst>(IncomingVal);
2373 TailCallBBs.push_back(PredBB);
2378 if (!VisitedBBs.
insert(Pred).second)
2380 if (
Instruction *
I = Pred->rbegin()->getPrevNonDebugInstruction(
true)) {
2384 TailCallBBs.push_back(Pred);
2389 bool Changed =
false;
2390 for (
auto const &TailCallBB : TailCallBBs) {
2393 BranchInst *BI = dyn_cast<BranchInst>(TailCallBB->getTerminator());
2400 BFI->getBlockFreq(
BB) >=
BFI->getBlockFreq(TailCallBB));
2403 (
BFI->getBlockFreq(
BB) -
BFI->getBlockFreq(TailCallBB)).getFrequency());
2404 ModifiedDT = Changed =
true;
2410 BB->eraseFromParent();
2424 Value *BaseReg =
nullptr;
2425 Value *ScaledReg =
nullptr;
2426 Value *OriginalValue =
nullptr;
2427 bool InBounds =
true;
2431 BaseRegField = 0x01,
2433 BaseOffsField = 0x04,
2434 ScaledRegField = 0x08,
2436 MultipleFields = 0xff
2448 if (BaseReg && other.
BaseReg &&
2450 return MultipleFields;
2451 if (BaseGV && other.BaseGV &&
2452 BaseGV->getType() != other.BaseGV->getType())
2453 return MultipleFields;
2456 return MultipleFields;
2459 if (InBounds != other.InBounds)
2460 return MultipleFields;
2463 unsigned Result = NoField;
2466 if (BaseGV != other.BaseGV)
2468 if (BaseOffs != other.BaseOffs)
2471 Result |= ScaledRegField;
2474 if (Scale && other.
Scale && Scale != other.
Scale)
2478 return MultipleFields;
2480 return static_cast<FieldName
>(
Result);
2490 return !BaseOffs && !Scale && !(BaseGV && BaseReg);
2501 case ScaledRegField:
2508 void SetCombinedField(FieldName
Field,
Value *V,
2514 case ExtAddrMode::BaseRegField:
2517 case ExtAddrMode::BaseGVField:
2520 assert(BaseReg ==
nullptr);
2524 case ExtAddrMode::ScaledRegField:
2535 case ExtAddrMode::BaseOffsField:
2538 assert(ScaledReg ==
nullptr);
2556 #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
2558 bool NeedPlus =
false;
2563 OS << (NeedPlus ?
" + " :
"")
2565 BaseGV->printAsOperand(OS,
false);
2570 OS << (NeedPlus ?
" + " :
"")
2576 OS << (NeedPlus ?
" + " :
"")
2582 OS << (NeedPlus ?
" + " :
"")
2604 class TypePromotionTransaction {
2608 class TypePromotionAction {
2616 TypePromotionAction(
Instruction *Inst) : Inst(Inst) {}
2618 virtual ~TypePromotionAction() =
default;
2625 virtual void undo() = 0;
2630 virtual void commit() {
2636 class InsertionHandler {
2647 bool HasPrevInstruction;
2654 if (HasPrevInstruction)
2655 Point.PrevInst = &*--It;
2662 if (HasPrevInstruction) {
2667 Instruction *Position = &*Point.BB->getFirstInsertionPt();
2677 class InstructionMoveBefore :
public TypePromotionAction {
2679 InsertionHandler Position;
2684 : TypePromotionAction(Inst), Position(Inst) {
2685 LLVM_DEBUG(
dbgs() <<
"Do: move: " << *Inst <<
"\nbefore: " << *Before
2691 void undo()
override {
2693 Position.insert(Inst);
2698 class OperandSetter :
public TypePromotionAction {
2708 : TypePromotionAction(Inst), Idx(Idx) {
2710 <<
"for:" << *Inst <<
"\n"
2711 <<
"with:" << *NewVal <<
"\n");
2717 void undo()
override {
2719 <<
"for: " << *Inst <<
"\n"
2720 <<
"with: " << *Origin <<
"\n");
2727 class OperandsHider :
public TypePromotionAction {
2733 OperandsHider(
Instruction *Inst) : TypePromotionAction(Inst) {
2736 OriginalValues.
reserve(NumOpnds);
2737 for (
unsigned It = 0; It < NumOpnds; ++It) {
2740 OriginalValues.push_back(Val);
2749 void undo()
override {
2751 for (
unsigned It = 0, EndIt = OriginalValues.size(); It != EndIt; ++It)
2757 class TruncBuilder :
public TypePromotionAction {
2764 TruncBuilder(
Instruction *Opnd,
Type *Ty) : TypePromotionAction(Opnd) {
2767 Val =
Builder.CreateTrunc(Opnd, Ty,
"promoted");
2772 Value *getBuiltValue() {
return Val; }
2775 void undo()
override {
2777 if (
Instruction *IVal = dyn_cast<Instruction>(Val))
2778 IVal->eraseFromParent();
2783 class SExtBuilder :
public TypePromotionAction {
2791 : TypePromotionAction(InsertPt) {
2793 Val =
Builder.CreateSExt(Opnd, Ty,
"promoted");
2798 Value *getBuiltValue() {
return Val; }
2801 void undo()
override {
2803 if (
Instruction *IVal = dyn_cast<Instruction>(Val))
2804 IVal->eraseFromParent();
2809 class ZExtBuilder :
public TypePromotionAction {
2817 : TypePromotionAction(InsertPt) {
2820 Val =
Builder.CreateZExt(Opnd, Ty,
"promoted");
2825 Value *getBuiltValue() {
return Val; }
2828 void undo()
override {
2830 if (
Instruction *IVal = dyn_cast<Instruction>(Val))
2831 IVal->eraseFromParent();
2836 class TypeMutator :
public TypePromotionAction {
2843 : TypePromotionAction(Inst), OrigTy(Inst->
getType()) {
2844 LLVM_DEBUG(
dbgs() <<
"Do: MutateType: " << *Inst <<
" with " << *NewTy
2850 void undo()
override {
2851 LLVM_DEBUG(
dbgs() <<
"Undo: MutateType: " << *Inst <<
" with " << *OrigTy
2858 class UsesReplacer :
public TypePromotionAction {
2860 struct InstructionAndIdx {
2867 InstructionAndIdx(
Instruction *Inst,
unsigned Idx)
2868 : Inst(Inst), Idx(Idx) {}
2885 : TypePromotionAction(Inst),
New(
New) {
2886 LLVM_DEBUG(
dbgs() <<
"Do: UsersReplacer: " << *Inst <<
" with " << *New
2890 Instruction *UserI = cast<Instruction>(U.getUser());
2891 OriginalUses.push_back(InstructionAndIdx(UserI, U.getOperandNo()));
2902 void undo()
override {
2904 for (InstructionAndIdx &
Use : OriginalUses)
2905 Use.Inst->setOperand(
Use.Idx, Inst);
2910 for (
auto *DVI : DbgValues)
2911 DVI->replaceVariableLocationOp(New, Inst);
2916 class InstructionRemover :
public TypePromotionAction {
2918 InsertionHandler Inserter;
2922 OperandsHider Hider;
2925 UsesReplacer *Replacer =
nullptr;
2928 SetOfInstrs &RemovedInsts;
2935 InstructionRemover(
Instruction *Inst, SetOfInstrs &RemovedInsts,
2936 Value *New =
nullptr)
2937 : TypePromotionAction(Inst), Inserter(Inst), Hider(Inst),
2938 RemovedInsts(RemovedInsts) {
2940 Replacer =
new UsesReplacer(Inst, New);
2941 LLVM_DEBUG(
dbgs() <<
"Do: InstructionRemover: " << *Inst <<
"\n");
2942 RemovedInsts.insert(Inst);
2949 ~InstructionRemover()
override {
delete Replacer; }
2953 void undo()
override {
2954 LLVM_DEBUG(
dbgs() <<
"Undo: InstructionRemover: " << *Inst <<
"\n");
2955 Inserter.insert(Inst);
2959 RemovedInsts.erase(Inst);
2967 using ConstRestorationPt =
const TypePromotionAction *;
2969 TypePromotionTransaction(SetOfInstrs &RemovedInsts)
2970 : RemovedInsts(RemovedInsts) {}
2977 void rollback(ConstRestorationPt Point);
2980 ConstRestorationPt getRestorationPoint()
const;
3015 SetOfInstrs &RemovedInsts;
3020 void TypePromotionTransaction::setOperand(
Instruction *Inst,
unsigned Idx,
3022 Actions.push_back(std::make_unique<TypePromotionTransaction::OperandSetter>(
3023 Inst, Idx, NewVal));
3029 std::make_unique<TypePromotionTransaction::InstructionRemover>(
3030 Inst, RemovedInsts, NewVal));
3033 void TypePromotionTransaction::replaceAllUsesWith(
Instruction *Inst,
3036 std::make_unique<TypePromotionTransaction::UsesReplacer>(Inst, New));
3039 void TypePromotionTransaction::mutateType(
Instruction *Inst,
Type *NewTy) {
3041 std::make_unique<TypePromotionTransaction::TypeMutator>(Inst, NewTy));
3046 std::unique_ptr<TruncBuilder> Ptr(
new TruncBuilder(Opnd, Ty));
3047 Value *Val = Ptr->getBuiltValue();
3054 std::unique_ptr<SExtBuilder> Ptr(
new SExtBuilder(Inst, Opnd, Ty));
3055 Value *Val = Ptr->getBuiltValue();
3062 std::unique_ptr<ZExtBuilder> Ptr(
new ZExtBuilder(Inst, Opnd, Ty));
3063 Value *Val = Ptr->getBuiltValue();
3068 void TypePromotionTransaction::moveBefore(
Instruction *Inst,
3071 std::make_unique<TypePromotionTransaction::InstructionMoveBefore>(
3075 TypePromotionTransaction::ConstRestorationPt
3076 TypePromotionTransaction::getRestorationPoint()
const {
3077 return !Actions.empty() ? Actions.back().get() :
nullptr;
3080 bool TypePromotionTransaction::commit() {
3081 for (std::unique_ptr<TypePromotionAction> &Action : Actions)
3088 void TypePromotionTransaction::rollback(
3089 TypePromotionTransaction::ConstRestorationPt Point) {
3090 while (!Actions.empty() && Point != Actions.back().get()) {
3091 std::unique_ptr<TypePromotionAction> Curr = Actions.pop_back_val();
3101 class AddressingModeMatcher {
3120 const SetOfInstrs &InsertedInsts;
3123 InstrToOrigTy &PromotedInsts;
3126 TypePromotionTransaction &TPT;
3129 std::pair<AssertingVH<GetElementPtrInst>, int64_t> &LargeOffsetGEP;
3133 bool IgnoreProfitability;
3141 AddressingModeMatcher(
3146 const SetOfInstrs &InsertedInsts, InstrToOrigTy &PromotedInsts,
3147 TypePromotionTransaction &TPT,
3150 : AddrModeInsts(AMI), TLI(TLI),
TRI(
TRI),
3151 DL(
MI->getModule()->getDataLayout()), LI(LI), getDTFn(getDTFn),
3152 AccessTy(AT), AddrSpace(AS), MemoryInst(
MI),
AddrMode(AM),
3153 InsertedInsts(InsertedInsts), PromotedInsts(PromotedInsts), TPT(TPT),
3154 LargeOffsetGEP(LargeOffsetGEP), OptSize(OptSize), PSI(PSI),
BFI(
BFI) {
3155 IgnoreProfitability =
false;
3172 InstrToOrigTy &PromotedInsts, TypePromotionTransaction &TPT,
3177 bool Success = AddressingModeMatcher(
3178 AddrModeInsts, TLI,
TRI, LI, getDTFn, AccessTy, AS, MemoryInst, Result,
3179 InsertedInsts, PromotedInsts, TPT, LargeOffsetGEP, OptSize, PSI,
3180 BFI).matchAddr(V, 0);
3186 bool matchScaledValue(
Value *ScaleReg, int64_t Scale,
unsigned Depth);
3188 bool matchOperationAddr(
User *AddrInst,
unsigned Opcode,
unsigned Depth,
3189 bool *MovedAway =
nullptr);
3190 bool isProfitableToFoldIntoAddressingMode(
Instruction *
I,
3193 bool valueAlreadyLiveAtInst(
Value *Val,
Value *KnownLive1,
Value *KnownLive2);
3194 bool isPromotionProfitable(
unsigned NewCost,
unsigned OldCost,
3195 Value *PromotedOperand)
const;
3201 class PhiNodeSetIterator {
3202 PhiNodeSet *
const Set;
3203 size_t CurrentIndex = 0;
3208 PhiNodeSetIterator(PhiNodeSet *
const Set,
size_t Start);
3210 PhiNodeSetIterator& operator++();
3211 bool operator==(
const PhiNodeSetIterator &RHS)
const;
3212 bool operator!=(
const PhiNodeSetIterator &RHS)
const;
3226 friend class PhiNodeSetIterator;
3229 using iterator = PhiNodeSetIterator;
3244 size_t FirstValidElement = 0;
3251 if (NodeMap.insert(std::make_pair(Ptr,
NodeList.size())).second) {
3262 if (NodeMap.erase(Ptr)) {
3263 SkipRemovedElements(FirstValidElement);
3273 FirstValidElement = 0;
3279 if (FirstValidElement == 0)
3280 SkipRemovedElements(FirstValidElement);
3281 return PhiNodeSetIterator(
this, FirstValidElement);
3285 iterator
end() {
return PhiNodeSetIterator(
this,
NodeList.size()); }
3288 size_t size()
const {
3289 return NodeMap.size();
3294 return NodeMap.count(Ptr);
3303 void SkipRemovedElements(
size_t &CurrentIndex) {
3304 while (CurrentIndex <
NodeList.size()) {
3305 auto it = NodeMap.find(
NodeList[CurrentIndex]);
3308 if (
it != NodeMap.end() &&
it->second == CurrentIndex)
3315 PhiNodeSetIterator::PhiNodeSetIterator(PhiNodeSet *
const Set,
size_t Start)
3316 : Set(Set), CurrentIndex(Start) {}
3320 "PhiNodeSet access out of range");
3321 return Set->NodeList[CurrentIndex];
3324 PhiNodeSetIterator& PhiNodeSetIterator::operator++() {
3326 "PhiNodeSet access out of range");
3328 Set->SkipRemovedElements(CurrentIndex);
3333 return CurrentIndex == RHS.CurrentIndex;
3337 return !((*this) == RHS);
3343 class SimplificationTracker {
3348 PhiNodeSet AllPhiNodes;
3358 auto SV = Storage.
find(V);
3359 if (SV == Storage.
end())
3368 WorkList.push_back(Val);
3369 while (!WorkList.empty()) {
3371 if (!Visited.
insert(
P).second)
3373 if (
auto *PI = dyn_cast<Instruction>(
P))
3375 for (
auto *U : PI->users())
3376 WorkList.push_back(cast<Value>(U));
3378 PI->replaceAllUsesWith(V);
3379 if (
auto *PHI = dyn_cast<PHINode>(PI))
3380 AllPhiNodes.erase(PHI);
3381 if (
auto *
Select = dyn_cast<SelectInst>(PI))
3383 PI->eraseFromParent();
3395 while (OldReplacement !=
From) {
3397 To = dyn_cast<PHINode>(OldReplacement);
3398 OldReplacement = Get(
From);
3400 assert(To && Get(To) == To &&
"Replacement PHI node is already replaced.");
3402 From->replaceAllUsesWith(To);
3403 AllPhiNodes.erase(
From);
3404 From->eraseFromParent();
3407 PhiNodeSet& newPhiNodes() {
return AllPhiNodes; }
3409 void insertNewPhi(
PHINode *PN) { AllPhiNodes.insert(PN); }
3413 unsigned countNewPhiNodes()
const {
return AllPhiNodes.size(); }
3415 unsigned countNewSelectNodes()
const {
return AllSelectNodes.
size(); }
3417 void destroyNewNodes(
Type *CommonType) {
3420 for (
auto *
I : AllPhiNodes) {
3421 I->replaceAllUsesWith(
Dummy);
3422 I->eraseFromParent();
3424 AllPhiNodes.clear();
3425 for (
auto *
I : AllSelectNodes) {
3426 I->replaceAllUsesWith(
Dummy);
3427 I->eraseFromParent();
3429 AllSelectNodes.clear();
3434 class AddressingModeCombiner {
3436 typedef std::pair<PHINode *, PHINode *> PHIPair;
3443 ExtAddrMode::FieldName DifferentField = ExtAddrMode::NoField;
3446 bool AllAddrModesTrivial =
true;
3459 : CommonType(nullptr), SQ(_SQ), Original(OriginalValue) {}
3463 return AddrModes[0];
3473 AllAddrModesTrivial = AllAddrModesTrivial && NewAddrMode.isTrivial();
3476 if (AddrModes.empty()) {
3484 ExtAddrMode::FieldName ThisDifferentField =
3485 AddrModes[0].compare(NewAddrMode);
3486 if (DifferentField == ExtAddrMode::NoField)
3487 DifferentField = ThisDifferentField;
3488 else if (DifferentField != ThisDifferentField)
3489 DifferentField = ExtAddrMode::MultipleFields;
3492 bool CanHandle = DifferentField != ExtAddrMode::MultipleFields;
3495 CanHandle = CanHandle && DifferentField != ExtAddrMode::ScaleField;
3500 CanHandle = CanHandle && (DifferentField != ExtAddrMode::BaseOffsField ||
3505 CanHandle = CanHandle && (DifferentField != ExtAddrMode::BaseGVField ||
3506 !NewAddrMode.HasBaseReg);
3523 bool combineAddrModes() {
3525 if (AddrModes.size() == 0)
3529 if (AddrModes.size() == 1 || DifferentField == ExtAddrMode::NoField)
3534 if (AllAddrModesTrivial)
3537 if (!addrModeCombiningAllowed())
3543 FoldAddrToValueMapping
Map;
3544 if (!initializeMap(Map))
3547 Value *CommonValue = findCommon(Map);
3549 AddrModes[0].SetCombinedField(DifferentField, CommonValue, AddrModes);
3550 return CommonValue !=
nullptr;
3559 bool initializeMap(FoldAddrToValueMapping &Map) {
3564 for (
auto &AM : AddrModes) {
3565 Value *DV = AM.GetFieldAsValue(DifferentField, IntPtrTy);
3568 if (CommonType && CommonType !=
Type)
3571 Map[AM.OriginalValue] = DV;
3573 NullValue.push_back(AM.OriginalValue);
3576 assert(CommonType &&
"At least one non-null value must be!");
3577 for (
auto *V : NullValue)
3578 Map[V] = Constant::getNullValue(CommonType);
3605 Value *findCommon(FoldAddrToValueMapping &Map) {
3613 SimplificationTracker
ST(SQ);
3618 InsertPlaceholders(Map, TraverseOrder,
ST);
3621 FillPlaceholders(Map, TraverseOrder,
ST);
3624 ST.destroyNewNodes(CommonType);
3629 unsigned PhiNotMatchedCount = 0;
3631 ST.destroyNewNodes(CommonType);
3635 auto *
Result =
ST.Get(
Map.find(Original)->second);
3637 NumMemoryInstsPhiCreated +=
ST.countNewPhiNodes() + PhiNotMatchedCount;
3638 NumMemoryInstsSelectCreated +=
ST.countNewSelectNodes();
3647 PhiNodeSet &PhiNodesToMatch) {
3649 Matcher.
insert({ PHI, Candidate });
3652 WorkList.push_back({ PHI, Candidate });
3654 while (!WorkList.empty()) {
3656 if (!Visited.
insert(Item).second)
3663 for (
auto B : Item.first->blocks()) {
3664 Value *FirstValue = Item.first->getIncomingValueForBlock(
B);
3665 Value *SecondValue = Item.second->getIncomingValueForBlock(
B);
3666 if (FirstValue == SecondValue)
3669 PHINode *FirstPhi = dyn_cast<PHINode>(FirstValue);
3670 PHINode *SecondPhi = dyn_cast<PHINode>(SecondValue);
3676 if (!FirstPhi || !SecondPhi || !PhiNodesToMatch.count(FirstPhi) ||
3681 if (Matcher.
count({ FirstPhi, SecondPhi }))
3686 if (MatchedPHIs.
insert(FirstPhi).second)
3687 Matcher.
insert({ FirstPhi, SecondPhi });
3689 WorkList.push_back({ FirstPhi, SecondPhi });
3698 bool MatchPhiSet(SimplificationTracker &
ST,
bool AllowNewPhiNodes,
3699 unsigned &PhiNotMatchedCount) {
3705 PhiNodeSet &PhiNodesToMatch =
ST.newPhiNodes();
3706 while (PhiNodesToMatch.size()) {
3707 PHINode *PHI = *PhiNodesToMatch.begin();
3710 WillNotMatch.
clear();
3711 WillNotMatch.
insert(PHI);
3714 bool IsMatched =
false;
3718 if ((IsMatched = MatchPhiNode(PHI, &
P, Matched, PhiNodesToMatch)))
3723 for (
auto M : Matched)
3729 for (
auto MV : Matched)
3730 ST.ReplacePhi(MV.first, MV.second);
3735 if (!AllowNewPhiNodes)
3738 PhiNotMatchedCount += WillNotMatch.
size();
3739 for (
auto *
P : WillNotMatch)
3740 PhiNodesToMatch.erase(
P);
3745 void FillPlaceholders(FoldAddrToValueMapping &Map,
3747 SimplificationTracker &
ST) {
3748 while (!TraverseOrder.empty()) {
3750 assert(
Map.find(Current) !=
Map.end() &&
"No node to fill!!!");
3755 auto *CurrentSelect = cast<SelectInst>(Current);
3756 auto *TrueValue = CurrentSelect->getTrueValue();
3757 assert(
Map.find(TrueValue) !=
Map.end() &&
"No True Value!");
3758 Select->setTrueValue(
ST.Get(Map[TrueValue]));
3759 auto *FalseValue = CurrentSelect->getFalseValue();
3760 assert(
Map.find(FalseValue) !=
Map.end() &&
"No False Value!");
3761 Select->setFalseValue(
ST.Get(Map[FalseValue]));
3764 auto *PHI = cast<PHINode>(V);
3767 Value *PV = cast<PHINode>(Current)->getIncomingValueForBlock(
B);
3768 assert(
Map.find(PV) !=
Map.end() &&
"No predecessor Value!");
3772 Map[Current] =
ST.Simplify(V);
3781 void InsertPlaceholders(FoldAddrToValueMapping &Map,
3783 SimplificationTracker &
ST) {
3785 assert((isa<PHINode>(Original) || isa<SelectInst>(Original)) &&
3786 "Address must be a Phi or Select node");
3788 Worklist.push_back(Original);
3789 while (!Worklist.empty()) {
3792 if (
Map.find(Current) !=
Map.end())
3794 TraverseOrder.push_back(Current);
3798 if (
SelectInst *CurrentSelect = dyn_cast<SelectInst>(Current)) {
3803 CurrentSelect->getName(), CurrentSelect, CurrentSelect);
3807 Worklist.push_back(CurrentSelect->getTrueValue());
3808 Worklist.push_back(CurrentSelect->getFalseValue());
3811 PHINode *CurrentPhi = cast<PHINode>(Current);
3814 PHINode::Create(CommonType, PredCount,
"sunk_phi", CurrentPhi);
3816 ST.insertNewPhi(PHI);
3822 bool addrModeCombiningAllowed() {
3825 switch (DifferentField) {
3828 case ExtAddrMode::BaseRegField:
3830 case ExtAddrMode::BaseGVField:
3832 case ExtAddrMode::BaseOffsField:
3834 case ExtAddrMode::ScaledRegField:
3844 bool AddressingModeMatcher::matchScaledValue(
Value *ScaleReg, int64_t Scale,
3849 return matchAddr(ScaleReg,
Depth);
3864 TestAddrMode.
Scale += Scale;
3879 if (isa<Instruction>(ScaleReg) &&
3882 TestAddrMode.InBounds =
false;
3889 AddrModeInsts.push_back(cast<Instruction>(ScaleReg));
3899 auto GetConstantStep = [
this](
const Value * V)
3900 ->
Optional<std::pair<Instruction *, APInt> > {
3901 auto *PN = dyn_cast<PHINode>(V);
3913 if (
auto *OIVInc = dyn_cast<OverflowingBinaryOperator>(IVInc->first))
3914 if (OIVInc->hasNoSignedWrap() || OIVInc->hasNoUnsignedWrap())
3916 if (
auto *ConstantStep = dyn_cast<ConstantInt>(IVInc->second))
3917 return std::make_pair(IVInc->first, ConstantStep->getValue());
3933 if (
auto IVStep = GetConstantStep(ScaleReg)) {
3940 APInt Step = IVStep->second;
3942 if (
Offset.isSignedIntN(64)) {
3943 TestAddrMode.InBounds =
false;
3945 TestAddrMode.BaseOffs -=
Offset.getLimitedValue();
3950 getDTFn().
dominates(IVInc, MemoryInst)) {
3951 AddrModeInsts.push_back(cast<Instruction>(IVInc));
3970 switch (
I->getOpcode()) {
3971 case Instruction::BitCast:
3972 case Instruction::AddrSpaceCast:
3974 if (
I->getType() ==
I->getOperand(0)->getType())
3976 return I->getType()->isIntOrPtrTy();
3977 case Instruction::PtrToInt:
3980 case Instruction::IntToPtr:
3985 case Instruction::Mul:
3986 case Instruction::Shl:
3988 return isa<ConstantInt>(
I->getOperand(1));
3989 case Instruction::GetElementPtr:
4002 Instruction *PromotedInst = dyn_cast<Instruction>(Val);
4017 class TypePromotionHelper {
4020 static void addPromotedInst(InstrToOrigTy &PromotedInsts,
4023 ExtType ExtTy = IsSExt ? SignExtension : ZeroExtension;
4024 InstrToOrigTy::iterator It = PromotedInsts.find(ExtOpnd);
4025 if (It != PromotedInsts.end()) {
4028 if (It->second.getInt() == ExtTy)
4034 ExtTy = BothExtension;
4036 PromotedInsts[ExtOpnd] = TypeIsSExt(ExtOpnd->
getType(), ExtTy);
4043 static const Type *getOrigType(
const InstrToOrigTy &PromotedInsts,
4046 ExtType ExtTy = IsSExt ? SignExtension : ZeroExtension;
4047 InstrToOrigTy::const_iterator It = PromotedInsts.find(Opnd);
4048 if (It != PromotedInsts.end() && It->second.getInt() == ExtTy)
4049 return It->second.getPointer();
4064 static bool canGetThrough(
const Instruction *Inst,
Type *ConsideredExtType,
4065 const InstrToOrigTy &PromotedInsts,
bool IsSExt);
4069 static bool shouldExtOperand(
const Instruction *Inst,
int OpIdx) {
4070 return !(isa<SelectInst>(Inst) && OpIdx == 0);
4082 static Value *promoteOperandForTruncAndAnyExt(
4084 InstrToOrigTy &PromotedInsts,
unsigned &CreatedInstsCost,
4098 TypePromotionTransaction &TPT,
4099 InstrToOrigTy &PromotedInsts,
4100 unsigned &CreatedInstsCost,
4106 static Value *signExtendOperandForOther(
4108 InstrToOrigTy &PromotedInsts,
unsigned &CreatedInstsCost,
4111 return promoteOperandForOther(
Ext, TPT, PromotedInsts, CreatedInstsCost,
4112 Exts, Truncs, TLI,
true);
4116 static Value *zeroExtendOperandForOther(
4118 InstrToOrigTy &PromotedInsts,
unsigned &CreatedInstsCost,
4121 return promoteOperandForOther(
Ext, TPT, PromotedInsts, CreatedInstsCost,
4122 Exts, Truncs, TLI,
false);
4128 InstrToOrigTy &PromotedInsts,
4129 unsigned &CreatedInstsCost,
4143 static Action getAction(
Instruction *
Ext,
const SetOfInstrs &InsertedInsts,
4145 const InstrToOrigTy &PromotedInsts);
4150 bool TypePromotionHelper::canGetThrough(
const Instruction *Inst,
4151 Type *ConsideredExtType,
4152 const InstrToOrigTy &PromotedInsts,
4161 if (isa<ZExtInst>(Inst))
4165 if (IsSExt && isa<SExtInst>(Inst))
4171 if (isa_and_nonnull<OverflowingBinaryOperator>(BinOp) &&
4177 if ((Inst->
getOpcode() == Instruction::And ||
4182 if (Inst->
getOpcode() == Instruction::Xor) {
4194 if (Inst->
getOpcode() == Instruction::LShr && !IsSExt)
4203 const auto *ExtInst = cast<const Instruction>(*Inst->
user_begin());
4204 if (ExtInst->hasOneUse()) {
4205 const auto *AndInst = dyn_cast<const Instruction>(*ExtInst->user_begin());
4206 if (AndInst && AndInst->getOpcode() == Instruction::And) {
4207 const auto *Cst = dyn_cast<ConstantInt>(AndInst->getOperand(1));
4217 if (!isa<TruncInst>(Inst))
4231 Instruction *Opnd = dyn_cast<Instruction>(OpndVal);
4239 const Type *OpndType = getOrigType(PromotedInsts, Opnd, IsSExt);
4242 else if ((IsSExt && isa<SExtInst>(Opnd)) || (!IsSExt && isa<ZExtInst>(Opnd)))
4252 TypePromotionHelper::Action TypePromotionHelper::getAction(
4256 "Unexpected instruction type");
4257 Instruction *ExtOpnd = dyn_cast<Instruction>(
Ext->getOperand(0));
4259 bool IsSExt = isa<SExtInst>(
Ext);
4263 if (!ExtOpnd || !canGetThrough(ExtOpnd, ExtTy, PromotedInsts, IsSExt))
4269 if (isa<TruncInst>(ExtOpnd) && InsertedInsts.count(ExtOpnd))
4274 if (isa<SExtInst>(ExtOpnd) || isa<TruncInst>(ExtOpnd) ||
4275 isa<ZExtInst>(ExtOpnd))
4276 return promoteOperandForTruncAndAnyExt;
4282 return IsSExt ? signExtendOperandForOther : zeroExtendOperandForOther;
4285 Value *TypePromotionHelper::promoteOperandForTruncAndAnyExt(
4287 InstrToOrigTy &PromotedInsts,
unsigned &CreatedInstsCost,
4293 Value *ExtVal = SExt;
4294 bool HasMergedNonFreeExt =
false;
4295 if (isa<ZExtInst>(SExtOpnd)) {
4298 HasMergedNonFreeExt = !TLI.
isExtFree(SExtOpnd);
4301 TPT.replaceAllUsesWith(SExt, ZExt);
4302 TPT.eraseInstruction(SExt);
4307 TPT.setOperand(SExt, 0, SExtOpnd->
getOperand(0));
4309 CreatedInstsCost = 0;
4313 TPT.eraseInstruction(SExtOpnd);
4316 Instruction *ExtInst = dyn_cast<Instruction>(ExtVal);
4320 Exts->push_back(ExtInst);
4321 CreatedInstsCost = !TLI.
isExtFree(ExtInst) && !HasMergedNonFreeExt;
4329 TPT.eraseInstruction(ExtInst, NextVal);
4333 Value *TypePromotionHelper::promoteOperandForOther(
4335 InstrToOrigTy &PromotedInsts,
unsigned &CreatedInstsCost,
4342 CreatedInstsCost = 0;
4349 if (
Instruction *ITrunc = dyn_cast<Instruction>(Trunc)) {
4351 ITrunc->moveAfter(ExtOpnd);
4353 Truncs->push_back(ITrunc);
4356 TPT.replaceAllUsesWith(ExtOpnd, Trunc);
4359 TPT.setOperand(
Ext, 0, ExtOpnd);
4369 addPromotedInst(PromotedInsts, ExtOpnd, IsSExt);
4371 TPT.mutateType(ExtOpnd,
Ext->getType());
4373 TPT.replaceAllUsesWith(
Ext, ExtOpnd);
4378 for (
int OpIdx = 0, EndOpIdx = ExtOpnd->
getNumOperands(); OpIdx != EndOpIdx;
4382 !shouldExtOperand(ExtOpnd, OpIdx)) {
4388 if (
const ConstantInt *Cst = dyn_cast<ConstantInt>(Opnd)) {
4390 unsigned BitWidth =
Ext->getType()->getIntegerBitWidth();
4397 if (isa<UndefValue>(Opnd)) {
4408 Value *ValForExtOpnd = IsSExt ? TPT.createSExt(
Ext, Opnd,
Ext->getType())
4409 : TPT.createZExt(
Ext, Opnd,
Ext->getType());
4410 if (!isa<Instruction>(ValForExtOpnd)) {
4411 TPT.setOperand(ExtOpnd, OpIdx, ValForExtOpnd);
4414 ExtForOpnd = cast<Instruction>(ValForExtOpnd);
4417 Exts->push_back(ExtForOpnd);
4418 TPT.setOperand(ExtForOpnd, 0, Opnd);
4421 TPT.moveBefore(ExtForOpnd, ExtOpnd);
4422 TPT.setOperand(ExtOpnd, OpIdx, ExtForOpnd);
4423 CreatedInstsCost += !TLI.
isExtFree(ExtForOpnd);
4425 ExtForOpnd =
nullptr;
4427 if (ExtForOpnd ==
Ext) {
4429 TPT.eraseInstruction(
Ext);
4442 bool AddressingModeMatcher::isPromotionProfitable(
4443 unsigned NewCost,
unsigned OldCost,
Value *PromotedOperand)
const {
4444 LLVM_DEBUG(
dbgs() <<
"OldCost: " << OldCost <<
"\tNewCost: " << NewCost
4449 if (NewCost > OldCost)
4451 if (NewCost < OldCost)
4470 bool AddressingModeMatcher::matchOperationAddr(
User *AddrInst,
unsigned Opcode,
4474 if (
Depth >= 5)
return false;
4481 case Instruction::PtrToInt:
4484 case Instruction::IntToPtr: {
4486 auto PtrTy = MVT::getIntegerVT(
DL.getPointerSizeInBits(AS));
4492 case Instruction::BitCast:
4502 case Instruction::AddrSpaceCast: {
4513 unsigned OldSize = AddrModeInsts.size();
4518 TypePromotionTransaction::ConstRestorationPt LastKnownGood =
4519 TPT.getRestorationPoint();
4528 AddrModeInsts.
resize(OldSize);
4529 TPT.rollback(LastKnownGood);
4538 AddrModeInsts.
resize(OldSize);
4539 TPT.rollback(LastKnownGood);
4545 case Instruction::Mul:
4546 case Instruction::Shl: {
4553 if (Opcode == Instruction::Shl)
4554 Scale = 1LL << Scale;
4558 case Instruction::GetElementPtr: {
4561 int VariableOperand = -1;
4562 unsigned VariableScale = 0;
4564 int64_t ConstantOffset = 0;
4570 cast<ConstantInt>(AddrInst->
getOperand(
i))->getZExtValue();
4588 if (VariableOperand != -1)
4592 VariableOperand =
i;
4600 if (VariableOperand == -1) {
4601 AddrMode.BaseOffs += ConstantOffset;
4602 if (ConstantOffset == 0 ||
4606 if (!cast<GEPOperator>(AddrInst)->isInBounds())
4612 ConstantOffset > 0) {
4618 auto *BaseI = dyn_cast<Instruction>(Base);
4619 auto *
GEP = cast<GetElementPtrInst>(AddrInst);
4620 if (isa<Argument>(Base) || isa<GlobalValue>(Base) ||
4621 (BaseI && !isa<CastInst>(BaseI) &&
4622 !isa<GetElementPtrInst>(BaseI))) {
4626 BaseI ? BaseI->
getParent() : &
GEP->getFunction()->getEntryBlock();
4628 LargeOffsetGEP = std::make_pair(
GEP, ConstantOffset);
4631 AddrMode.BaseOffs -= ConstantOffset;
4637 unsigned OldSize = AddrModeInsts.size();
4640 AddrMode.BaseOffs += ConstantOffset;
4641 if (!cast<GEPOperator>(AddrInst)->isInBounds())
4649 AddrModeInsts.
resize(OldSize);
4657 if (!matchScaledValue(AddrInst->
getOperand(VariableOperand), VariableScale,
4662 AddrModeInsts.
resize(OldSize);
4667 AddrMode.BaseOffs += ConstantOffset;
4668 if (!matchScaledValue(AddrInst->
getOperand(VariableOperand),
4669 VariableScale,
Depth)) {
4672 AddrModeInsts.
resize(OldSize);
4679 case Instruction::SExt:
4680 case Instruction::ZExt: {
4687 TypePromotionHelper::Action TPH =
4688 TypePromotionHelper::getAction(
Ext, InsertedInsts, TLI, PromotedInsts);
4692 TypePromotionTransaction::ConstRestorationPt LastKnownGood =
4693 TPT.getRestorationPoint();
4694 unsigned CreatedInstsCost = 0;
4696 Value *PromotedOperand =
4697 TPH(
Ext, TPT, PromotedInsts, CreatedInstsCost,
nullptr,
nullptr, TLI);
4712 assert(PromotedOperand &&
4713 "TypePromotionHelper should have filtered out those cases");
4716 unsigned OldSize = AddrModeInsts.size();
4718 if (!matchAddr(PromotedOperand,
Depth) ||
4723 !isPromotionProfitable(CreatedInstsCost,
4724 ExtCost + (AddrModeInsts.size() - OldSize),
4727 AddrModeInsts.
resize(OldSize);
4728 LLVM_DEBUG(
dbgs() <<
"Sign extension does not pay off: rollback\n");
4729 TPT.rollback(LastKnownGood);
4743 bool AddressingModeMatcher::matchAddr(
Value *
Addr,
unsigned Depth) {
4746 TypePromotionTransaction::ConstRestorationPt LastKnownGood =
4747 TPT.getRestorationPoint();
4766 unsigned OldSize = AddrModeInsts.size();
4769 bool MovedAway =
false;
4770 if (matchOperationAddr(
I,
I->getOpcode(),
Depth, &MovedAway)) {
4778 if (
I->hasOneUse() ||
4779 isProfitableToFoldIntoAddressingMode(
I, BackupAddrMode,
AddrMode)) {
4780 AddrModeInsts.push_back(
I);
4787 AddrModeInsts.
resize(OldSize);
4788 TPT.rollback(LastKnownGood);
4791 if (matchOperationAddr(CE,
CE->getOpcode(),
Depth))
4793 TPT.rollback(LastKnownGood);
4794 }
else if (isa<ConstantPointerNull>(
Addr)) {
4820 TPT.rollback(LastKnownGood);
4833 for (
unsigned i = 0,
e = TargetConstraints.size();
i !=
e; ++
i) {
4864 if (!ConsideredInsts.
insert(
I).second)
4872 for (
Use &U :
I->uses()) {
4878 Instruction *UserI = cast<Instruction>(U.getUser());
4879 if (
LoadInst *LI = dyn_cast<LoadInst>(UserI)) {
4880 MemoryUses.push_back(std::make_pair(LI, U.getOperandNo()));
4885 unsigned opNo = U.getOperandNo();
4888 MemoryUses.push_back(std::make_pair(
SI, opNo));
4893 unsigned opNo = U.getOperandNo();
4896 MemoryUses.push_back(std::make_pair(RMW, opNo));
4901 unsigned opNo = U.getOperandNo();
4904 MemoryUses.push_back(std::make_pair(CmpX, opNo));
4908 if (
CallInst *CI = dyn_cast<CallInst>(UserI)) {
4912 bool OptForSize = OptSize ||
4918 InlineAsm *IA = dyn_cast<InlineAsm>(CI->getCalledOperand());
4919 if (!IA)
return true;
4928 PSI,
BFI, SeenInsts))
4939 bool AddressingModeMatcher::valueAlreadyLiveAtInst(
Value *Val,
Value *KnownLive1,
4940 Value *KnownLive2) {
4942 if (Val ==
nullptr || Val == KnownLive1 || Val == KnownLive2)
4946 if (!isa<Instruction>(Val) && !isa<Argument>(Val))
return true;
4951 if (
AllocaInst *AI = dyn_cast<AllocaInst>(Val))
4982 bool AddressingModeMatcher::
4985 if (IgnoreProfitability)
return true;
5002 if (valueAlreadyLiveAtInst(ScaledReg, AMBefore.
BaseReg, AMBefore.
ScaledReg))
5003 ScaledReg =
nullptr;
5007 if (!BaseReg && !ScaledReg)
5030 for (
unsigned i = 0,
e = MemoryUses.size();
i !=
e; ++
i) {
5032 unsigned OpNo = MemoryUses[
i].second;
5047 std::pair<AssertingVH<GetElementPtrInst>, int64_t> LargeOffsetGEP(
nullptr,
5049 TypePromotionTransaction::ConstRestorationPt LastKnownGood =
5050 TPT.getRestorationPoint();
5051 AddressingModeMatcher Matcher(MatchedAddrModeInsts, TLI,
TRI, LI, getDTFn,
5052 AddressAccessTy, AS, MemoryInst, Result,
5053 InsertedInsts, PromotedInsts, TPT,
5054 LargeOffsetGEP, OptSize, PSI,
BFI);
5055 Matcher.IgnoreProfitability =
true;
5056 bool Success = Matcher.matchAddr(Address, 0);
5062 TPT.rollback(LastKnownGood);
5068 MatchedAddrModeInsts.
clear();
5078 return I->getParent() !=
BB;
5102 Type *AccessTy,
unsigned AddrSpace) {
5109 worklist.push_back(
Addr);
5114 bool PhiOrSelectSeen =
false;
5117 AddressingModeCombiner AddrModes(SQ,
Addr);
5118 TypePromotionTransaction TPT(RemovedInsts);
5119 TypePromotionTransaction::ConstRestorationPt LastKnownGood =
5120 TPT.getRestorationPoint();
5121 while (!worklist.empty()) {
5122 Value *V = worklist.back();
5123 worklist.pop_back();
5134 if (!Visited.
insert(V).second)
5138 if (
PHINode *
P = dyn_cast<PHINode>(V)) {
5140 PhiOrSelectSeen =
true;
5145 worklist.push_back(
SI->getFalseValue());
5146 worklist.push_back(
SI->getTrueValue());
5147 PhiOrSelectSeen =
true;
5154 AddrModeInsts.
clear();
5155 std::pair<AssertingVH<GetElementPtrInst>, int64_t> LargeOffsetGEP(
nullptr,
5160 auto getDTFn = [MemoryInst,
this]() ->
const DominatorTree & {
5162 return this->getDT(*
F);
5164 ExtAddrMode NewAddrMode = AddressingModeMatcher::Match(
5165 V, AccessTy, AddrSpace, MemoryInst, AddrModeInsts, *TLI, *LI, getDTFn,
5166 *
TRI, InsertedInsts, PromotedInsts, TPT, LargeOffsetGEP, OptSize, PSI,
5174 LargeOffsetGEPMap[
GEP->getPointerOperand()].push_back(LargeOffsetGEP);
5175 if (LargeOffsetGEPID.
find(
GEP) == LargeOffsetGEPID.
end())
5176 LargeOffsetGEPID[
GEP] = LargeOffsetGEPID.
size();
5179 NewAddrMode.OriginalValue = V;
5180 if (!AddrModes.addNewAddrMode(NewAddrMode))
5187 if (!AddrModes.combineAddrModes()) {
5188 TPT.rollback(LastKnownGood);
5200 if (!PhiOrSelectSeen &&
none_of(AddrModeInsts, [&](
Value *V) {
5224 <<
" for " << *MemoryInst <<
"\n");
5226 SunkAddr =
Builder.CreatePointerCast(SunkAddr,
Addr->getType());
5228 SubtargetInfo->addrSinkUsingGEPs())) {
5232 <<
" for " << *MemoryInst <<
"\n");
5233 Type *IntPtrTy =
DL->getIntPtrType(
Addr->getType());
5234 Value *ResultPtr =
nullptr, *ResultIndex =
nullptr;
5245 if (ResultPtr ||
AddrMode.Scale != 1)
5276 if (!
DL->isNonIntegralPointerType(
Addr->getType())) {
5277 if (!ResultPtr &&
AddrMode.BaseReg) {
5281 }
else if (!ResultPtr &&
AddrMode.Scale == 1) {
5290 SunkAddr = Constant::getNullValue(
Addr->getType());
5291 }
else if (!ResultPtr) {
5295 Builder.getInt8PtrTy(
Addr->getType()->getPointerAddressSpace());
5306 V =
Builder.CreateIntCast(V, IntPtrTy,
true,
"sunkaddr");
5314 if (V->
getType() == IntPtrTy) {
5318 cast<IntegerType>(V->
getType())->getBitWidth() &&
5319 "We can't transform if ScaledReg is too narrow");
5320 V =
Builder.CreateTrunc(V, IntPtrTy,
"sunkaddr");
5327 ResultIndex =
Builder.CreateAdd(ResultIndex, V,
"sunkaddr");
5338 if (ResultPtr->
getType() != I8PtrTy)
5339 ResultPtr =
Builder.CreatePointerCast(ResultPtr, I8PtrTy);
5342 ?
Builder.CreateInBoundsGEP(I8Ty, ResultPtr, ResultIndex,
5344 :
Builder.CreateGEP(I8Ty, ResultPtr, ResultIndex,
"sunkaddr");
5351 SunkAddr = ResultPtr;
5353 if (ResultPtr->
getType() != I8PtrTy)
5354 ResultPtr =
Builder.CreatePointerCast(ResultPtr, I8PtrTy);
5357 ?
Builder.CreateInBoundsGEP(I8Ty, ResultPtr, ResultIndex,
5359 :
Builder.CreateGEP(I8Ty, ResultPtr, ResultIndex,
"sunkaddr");
5363 SunkAddr =
Builder.CreatePointerCast(SunkAddr,
Addr->getType());
5370 PointerType *BasePtrTy = dyn_cast_or_null<PointerType>(BaseTy);
5371 PointerType *ScalePtrTy = dyn_cast_or_null<PointerType>(ScaleTy);
5372 if (
DL->isNonIntegralPointerType(
Addr->getType()) ||
5373 (BasePtrTy &&
DL->isNonIntegralPointerType(BasePtrTy)) ||
5374 (ScalePtrTy &&
DL->isNonIntegralPointerType(ScalePtrTy)) ||
5376 DL->isNonIntegralPointerType(
AddrMode.BaseGV->getType())))
5380 <<
" for " << *MemoryInst <<
"\n");
5381 Type *IntPtrTy =
DL->getIntPtrType(
Addr->getType());
5392 V =
Builder.CreatePtrToInt(V, IntPtrTy,
"sunkaddr");
5394 V =
Builder.CreateIntCast(V, IntPtrTy,
true,
"sunkaddr");
5401 if (V->
getType() == IntPtrTy) {
5404 V =
Builder.CreatePtrToInt(V, IntPtrTy,
"sunkaddr");
5405 }
else if (cast<IntegerType>(IntPtrTy)->
getBitWidth() <
5406 cast<IntegerType>(V->
getType())->getBitWidth()) {
5407 V =
Builder.CreateTrunc(V, IntPtrTy,
"sunkaddr");
5414 Instruction *
I = dyn_cast_or_null<Instruction>(Result);
5416 I->eraseFromParent();