136 #define DEBUG_TYPE "infer-address-spaces"
138 using namespace llvm;
142 cl::desc(
"The default address space is assumed as the flat address space. "
143 "This is mainly for test purpose."));
157 using PredicatedAddrSpaceMapTy =
162 unsigned FlatAddrSpace = 0;
167 InferAddressSpaces() :
169 InferAddressSpaces(
unsigned AS) :
FunctionPass(
ID), FlatAddrSpace(AS) {}
181 class InferAddressSpacesImpl {
189 unsigned FlatAddrSpace = 0;
193 bool updateAddressSpace(
const Value &V,
194 ValueToAddrSpaceMapTy &InferredAddrSpace,
195 PredicatedAddrSpaceMapTy &PredicatedAS)
const;
200 ValueToAddrSpaceMapTy &InferredAddrSpace,
201 PredicatedAddrSpaceMapTy &PredicatedAS)
const;
203 bool isSafeToCastConstAddrSpace(
Constant *
C,
unsigned NewAS)
const;
205 Value *cloneInstructionWithNewAddressSpace(
208 const PredicatedAddrSpaceMapTy &PredicatedAS,
216 const ValueToAddrSpaceMapTy &InferredAddrSpace,
217 const PredicatedAddrSpaceMapTy &PredicatedAS,
220 void appendsFlatAddressExpressionToPostorderStack(
221 Value *V, PostorderStackTy &PostorderStack,
227 PostorderStackTy &PostorderStack,
230 std::vector<WeakTrackingVH> collectFlatAddressExpressions(
Function &
F)
const;
232 Value *cloneValueWithNewAddressSpace(
233 Value *V,
unsigned NewAddrSpace,
235 const PredicatedAddrSpaceMapTy &PredicatedAS,
237 unsigned joinAddressSpaces(
unsigned AS1,
unsigned AS2)
const;
239 unsigned getPredicatedAddrSpace(
const Value &V,
Value *Opnd)
const;
244 : AC(AC), DT(DT),
TTI(
TTI), FlatAddrSpace(FlatAddrSpace) {}
264 assert(I2P->getOpcode() == Instruction::IntToPtr);
265 auto *P2I = dyn_cast<Operator>(I2P->getOperand(0));
266 if (!P2I || P2I->getOpcode() != Instruction::PtrToInt)
282 unsigned P2IOp0AS = P2I->getOperand(0)->getType()->getPointerAddressSpace();
283 unsigned I2PAS = I2P->getType()->getPointerAddressSpace();
285 I2P->getOperand(0)->getType(), I2P->getType(),
288 P2I->getOperand(0)->getType(), P2I->getType(),
302 switch (
Op->getOpcode()) {
303 case Instruction::PHI:
304 assert(
Op->getType()->isPointerTy());
306 case Instruction::BitCast:
307 case Instruction::AddrSpaceCast:
308 case Instruction::GetElementPtr:
311 return Op->getType()->isPointerTy();
316 case Instruction::IntToPtr:
331 switch (
Op.getOpcode()) {
332 case Instruction::PHI: {
333 auto IncomingValues = cast<PHINode>(
Op).incoming_values();
334 return {IncomingValues.begin(), IncomingValues.end()};
336 case Instruction::BitCast:
337 case Instruction::AddrSpaceCast:
338 case Instruction::GetElementPtr:
339 return {
Op.getOperand(0)};
341 return {
Op.getOperand(1),
Op.getOperand(2)};
345 "unexpected intrinsic call");
348 case Instruction::IntToPtr: {
350 auto *P2I = cast<Operator>(
Op.getOperand(0));
351 return {P2I->getOperand(0)};
358 bool InferAddressSpacesImpl::rewriteIntrinsicOperands(
IntrinsicInst *II,
364 case Intrinsic::objectsize: {
373 case Intrinsic::ptrmask:
387 void InferAddressSpacesImpl::collectRewritableIntrinsicOperands(
392 case Intrinsic::ptrmask:
393 case Intrinsic::objectsize:
394 appendsFlatAddressExpressionToPostorderStack(II->
getArgOperand(0),
395 PostorderStack, Visited);
400 for (
int Idx : OpIndexes) {
401 appendsFlatAddressExpressionToPostorderStack(II->
getArgOperand(Idx),
402 PostorderStack, Visited);
412 void InferAddressSpacesImpl::appendsFlatAddressExpressionToPostorderStack(
413 Value *V, PostorderStackTy &PostorderStack,
422 PostorderStack.emplace_back(CE,
false);
429 if (Visited.
insert(V).second) {
430 PostorderStack.emplace_back(V,
false);
433 for (
unsigned I = 0,
E =
Op->getNumOperands();
I !=
E; ++
I) {
436 PostorderStack.emplace_back(CE,
false);
445 std::vector<WeakTrackingVH>
446 InferAddressSpacesImpl::collectFlatAddressExpressions(
Function &
F)
const {
449 PostorderStackTy PostorderStack;
453 auto PushPtrOperand = [&](
Value *Ptr) {
454 appendsFlatAddressExpressionToPostorderStack(Ptr, PostorderStack,
462 if (
auto *
GEP = dyn_cast<GetElementPtrInst>(&
I)) {
463 if (!
GEP->getType()->isVectorTy())
464 PushPtrOperand(
GEP->getPointerOperand());
465 }
else if (
auto *LI = dyn_cast<LoadInst>(&
I))
466 PushPtrOperand(LI->getPointerOperand());
467 else if (
auto *
SI = dyn_cast<StoreInst>(&
I))
468 PushPtrOperand(
SI->getPointerOperand());
469 else if (
auto *RMW = dyn_cast<AtomicRMWInst>(&
I))
470 PushPtrOperand(RMW->getPointerOperand());
471 else if (
auto *CmpX = dyn_cast<AtomicCmpXchgInst>(&
I))
472 PushPtrOperand(CmpX->getPointerOperand());
473 else if (
auto *
MI = dyn_cast<MemIntrinsic>(&
I)) {
475 PushPtrOperand(
MI->getRawDest());
478 if (
auto *MTI = dyn_cast<MemTransferInst>(
MI))
479 PushPtrOperand(MTI->getRawSource());
480 }
else if (
auto *II = dyn_cast<IntrinsicInst>(&
I))
481 collectRewritableIntrinsicOperands(II, PostorderStack, Visited);
482 else if (
ICmpInst *Cmp = dyn_cast<ICmpInst>(&
I)) {
484 if (
Cmp->getOperand(0)->getType()->isPointerTy()) {
485 PushPtrOperand(
Cmp->getOperand(0));
486 PushPtrOperand(
Cmp->getOperand(1));
488 }
else if (
auto *ASC = dyn_cast<AddrSpaceCastInst>(&
I)) {
489 if (!ASC->getType()->isVectorTy())
490 PushPtrOperand(ASC->getPointerOperand());
491 }
else if (
auto *I2P = dyn_cast<IntToPtrInst>(&
I)) {
494 cast<Operator>(I2P->getOperand(0))->getOperand(0));
498 std::vector<WeakTrackingVH> Postorder;
499 while (!PostorderStack.empty()) {
500 Value *TopVal = PostorderStack.back().getPointer();
503 if (PostorderStack.back().getInt()) {
505 Postorder.push_back(TopVal);
506 PostorderStack.pop_back();
510 PostorderStack.back().setInt(
true);
514 appendsFlatAddressExpressionToPostorderStack(PtrOperand, PostorderStack,
526 const Use &OperandUse,
unsigned NewAddrSpace,
528 const PredicatedAddrSpaceMapTy &PredicatedAS,
533 cast<PointerType>(Operand->
getType()), NewAddrSpace);
535 if (
Constant *
C = dyn_cast<Constant>(Operand))
538 if (
Value *NewOperand = ValueWithNewAddrSpace.
lookup(Operand))
542 auto I = PredicatedAS.find(std::make_pair(Inst, Operand));
543 if (
I != PredicatedAS.end()) {
545 unsigned NewAS =
I->second;
547 cast<PointerType>(Operand->
getType()), NewAS);
549 NewI->insertBefore(Inst);
553 UndefUsesToFix->push_back(&OperandUse);
569 Value *InferAddressSpacesImpl::cloneInstructionWithNewAddressSpace(
572 const PredicatedAddrSpaceMapTy &PredicatedAS,
575 cast<PointerType>(
I->getType()), NewAddrSpace);
577 if (
I->getOpcode() == Instruction::AddrSpaceCast) {
578 Value *Src =
I->getOperand(0);
582 assert(Src->getType()->getPointerAddressSpace() == NewAddrSpace);
583 if (Src->getType() != NewPtrType)
594 PredicatedAS, UndefUsesToFix);
598 assert(Rewrite != II &&
"cannot modify this pointer operation in place");
610 cast<PointerType>(
I->getType()), AS);
612 NewI->insertAfter(
I);
618 for (
const Use &OperandUse :
I->operands()) {
619 if (!OperandUse.get()->getType()->isPointerTy())
620 NewPointerOperands.push_back(
nullptr);
623 OperandUse, NewAddrSpace, ValueWithNewAddrSpace, PredicatedAS,
627 switch (
I->getOpcode()) {
628 case Instruction::BitCast:
629 return new BitCastInst(NewPointerOperands[0], NewPtrType);
630 case Instruction::PHI: {
631 assert(
I->getType()->isPointerTy());
641 case Instruction::GetElementPtr: {
644 GEP->getSourceElementType(), NewPointerOperands[0],
650 assert(
I->getType()->isPointerTy());
652 NewPointerOperands[2],
"",
nullptr,
I);
653 case Instruction::IntToPtr: {
655 Value *Src = cast<Operator>(
I->getOperand(0))->getOperand(0);
656 if (Src->getType() == NewPtrType)
676 Type *TargetType = CE->getType()->isPointerTy()
678 cast<PointerType>(CE->getType()), NewAddrSpace)
681 if (CE->getOpcode() == Instruction::AddrSpaceCast) {
685 assert(CE->getOperand(0)->getType()->getPointerAddressSpace() ==
690 if (CE->getOpcode() == Instruction::BitCast) {
691 if (
Value *NewOperand = ValueWithNewAddrSpace.
lookup(CE->getOperand(0)))
708 if (CE->getOpcode() == Instruction::IntToPtr) {
710 Constant *Src = cast<ConstantExpr>(CE->getOperand(0))->getOperand(0);
711 assert(Src->getType()->getPointerAddressSpace() == NewAddrSpace);
718 for (
unsigned Index = 0; Index < CE->getNumOperands(); ++Index) {
719 Constant *Operand = CE->getOperand(Index);
725 if (
Value *NewOperand = ValueWithNewAddrSpace.
lookup(Operand)) {
727 NewOperands.push_back(cast<Constant>(NewOperand));
730 if (
auto *CExpr = dyn_cast<ConstantExpr>(Operand))
732 CExpr, NewAddrSpace, ValueWithNewAddrSpace,
DL,
TTI)) {
734 NewOperands.push_back(cast<Constant>(NewOperand));
738 NewOperands.push_back(Operand);
746 if (CE->getOpcode() == Instruction::GetElementPtr) {
749 return CE->getWithOperands(NewOperands, TargetType,
false,
750 cast<GEPOperator>(CE)->getSourceElementType());
753 return CE->getWithOperands(NewOperands, TargetType);
761 Value *InferAddressSpacesImpl::cloneValueWithNewAddressSpace(
762 Value *V,
unsigned NewAddrSpace,
764 const PredicatedAddrSpaceMapTy &PredicatedAS,
771 Value *NewV = cloneInstructionWithNewAddressSpace(
772 I, NewAddrSpace, ValueWithNewAddrSpace, PredicatedAS, UndefUsesToFix);
773 if (
Instruction *NewI = dyn_cast_or_null<Instruction>(NewV)) {
774 if (NewI->getParent() ==
nullptr) {
775 NewI->insertBefore(
I);
783 cast<ConstantExpr>(V), NewAddrSpace, ValueWithNewAddrSpace,
DL,
TTI);
788 unsigned InferAddressSpacesImpl::joinAddressSpaces(
unsigned AS1,
789 unsigned AS2)
const {
790 if (AS1 == FlatAddrSpace || AS2 == FlatAddrSpace)
791 return FlatAddrSpace;
799 return (AS1 == AS2) ? AS1 : FlatAddrSpace;
803 DL = &
F.getParent()->getDataLayout();
815 std::vector<WeakTrackingVH> Postorder = collectFlatAddressExpressions(
F);
819 ValueToAddrSpaceMapTy InferredAddrSpace;
820 PredicatedAddrSpaceMapTy PredicatedAS;
821 inferAddressSpaces(Postorder, InferredAddrSpace, PredicatedAS);
825 return rewriteWithNewAddressSpaces(Postorder, InferredAddrSpace, PredicatedAS,
831 void InferAddressSpacesImpl::inferAddressSpaces(
833 ValueToAddrSpaceMapTy &InferredAddrSpace,
834 PredicatedAddrSpaceMapTy &PredicatedAS)
const {
837 for (
Value *V : Postorder)
840 while (!Worklist.empty()) {
841 Value *V = Worklist.pop_back_val();
845 if (!updateAddressSpace(*V, InferredAddrSpace, PredicatedAS))
850 if (Worklist.count(
User))
853 auto Pos = InferredAddrSpace.find(
User);
856 if (Pos == InferredAddrSpace.end())
862 if (Pos->second == FlatAddrSpace)
865 Worklist.insert(
User);
870 unsigned InferAddressSpacesImpl::getPredicatedAddrSpace(
const Value &V,
877 for (
auto &AssumeVH : AC.assumptionsFor(Opnd)) {
880 CallInst *CI = cast<CallInst>(AssumeVH);
894 bool InferAddressSpacesImpl::updateAddressSpace(
895 const Value &V, ValueToAddrSpaceMapTy &InferredAddrSpace,
896 PredicatedAddrSpaceMapTy &PredicatedAS)
const {
897 assert(InferredAddrSpace.count(&V));
899 LLVM_DEBUG(
dbgs() <<
"Updating the address space of\n " << V <<
'\n');
907 Value *Src0 =
Op.getOperand(1);
908 Value *Src1 =
Op.getOperand(2);
910 auto I = InferredAddrSpace.find(Src0);
911 unsigned Src0AS = (
I != InferredAddrSpace.end()) ?
914 auto J = InferredAddrSpace.find(Src1);
915 unsigned Src1AS = (J != InferredAddrSpace.end()) ?
918 auto *C0 = dyn_cast<Constant>(Src0);
919 auto *
C1 = dyn_cast<Constant>(Src1);
928 if (C0 && isSafeToCastConstAddrSpace(C0, Src1AS))
930 else if (
C1 && isSafeToCastConstAddrSpace(
C1, Src0AS))
933 NewAS = joinAddressSpaces(Src0AS, Src1AS);
942 auto I = InferredAddrSpace.find(PtrOperand);
944 if (
I == InferredAddrSpace.end()) {
945 OperandAS = PtrOperand->getType()->getPointerAddressSpace();
946 if (OperandAS == FlatAddrSpace) {
948 unsigned AS = getPredicatedAddrSpace(V, PtrOperand);
951 <<
" deduce operand AS from the predicate addrspace "
955 PredicatedAS[std::make_pair(&V, PtrOperand)] = OperandAS;
959 OperandAS =
I->second;
962 NewAS = joinAddressSpaces(NewAS, OperandAS);
963 if (NewAS == FlatAddrSpace)
969 unsigned OldAS = InferredAddrSpace.lookup(&V);
970 assert(OldAS != FlatAddrSpace);
977 InferredAddrSpace[&V] = NewAS;
987 Use &U,
unsigned AddrSpace) {
990 bool VolatileIsAllowed =
false;
991 if (
auto *
I = dyn_cast<Instruction>(Inst))
994 if (
auto *LI = dyn_cast<LoadInst>(Inst))
996 (VolatileIsAllowed || !LI->isVolatile());
998 if (
auto *
SI = dyn_cast<StoreInst>(Inst))
1000 (VolatileIsAllowed || !
SI->isVolatile());
1002 if (
auto *RMW = dyn_cast<AtomicRMWInst>(Inst))
1004 (VolatileIsAllowed || !RMW->isVolatile());
1006 if (
auto *CmpX = dyn_cast<AtomicCmpXchgInst>(Inst))
1008 (VolatileIsAllowed || !CmpX->isVolatile());
1019 MDNode *TBAA =
MI->getMetadata(LLVMContext::MD_tbaa);
1020 MDNode *ScopeMD =
MI->getMetadata(LLVMContext::MD_alias_scope);
1021 MDNode *NoAliasMD =
MI->getMetadata(LLVMContext::MD_noalias);
1023 if (
auto *MSI = dyn_cast<MemSetInst>(
MI)) {
1024 B.CreateMemSet(NewV, MSI->getValue(), MSI->getLength(),
1027 TBAA, ScopeMD, NoAliasMD);
1028 }
else if (
auto *MTI = dyn_cast<MemTransferInst>(
MI)) {
1029 Value *Src = MTI->getRawSource();
1030 Value *Dest = MTI->getRawDest();
1039 if (isa<MemCpyInlineInst>(MTI)) {
1040 MDNode *TBAAStruct = MTI->getMetadata(LLVMContext::MD_tbaa_struct);
1041 B.CreateMemCpyInline(Dest, MTI->getDestAlign(), Src,
1042 MTI->getSourceAlign(), MTI->getLength(),
1044 TBAA, TBAAStruct, ScopeMD, NoAliasMD);
1045 }
else if (isa<MemCpyInst>(MTI)) {
1046 MDNode *TBAAStruct = MTI->getMetadata(LLVMContext::MD_tbaa_struct);
1047 B.CreateMemCpy(Dest, MTI->getDestAlign(), Src, MTI->getSourceAlign(),
1050 TBAA, TBAAStruct, ScopeMD, NoAliasMD);
1052 assert(isa<MemMoveInst>(MTI));
1053 B.CreateMemMove(Dest, MTI->getDestAlign(), Src, MTI->getSourceAlign(),
1056 TBAA, ScopeMD, NoAliasMD);
1061 MI->eraseFromParent();
1067 bool InferAddressSpacesImpl::isSafeToCastConstAddrSpace(
Constant *
C,
1068 unsigned NewAS)
const {
1071 unsigned SrcAS =
C->getType()->getPointerAddressSpace();
1072 if (SrcAS == NewAS || isa<UndefValue>(
C))
1076 if (SrcAS != FlatAddrSpace && NewAS != FlatAddrSpace)
1079 if (isa<ConstantPointerNull>(
C))
1082 if (
auto *
Op = dyn_cast<Operator>(
C)) {
1085 if (
Op->getOpcode() == Instruction::AddrSpaceCast)
1086 return isSafeToCastConstAddrSpace(cast<Constant>(
Op->getOperand(0)), NewAS);
1088 if (
Op->getOpcode() == Instruction::IntToPtr &&
1089 Op->getType()->getPointerAddressSpace() == FlatAddrSpace)
1098 User *CurUser =
I->getUser();
1101 while (
I != End &&
I->getUser() == CurUser)
1107 bool InferAddressSpacesImpl::rewriteWithNewAddressSpaces(
1109 const ValueToAddrSpaceMapTy &InferredAddrSpace,
1110 const PredicatedAddrSpaceMapTy &PredicatedAS,
Function *
F)
const {
1117 for (
Value* V : Postorder) {
1118 unsigned NewAddrSpace = InferredAddrSpace.lookup(V);
1127 cloneValueWithNewAddressSpace(V, NewAddrSpace, ValueWithNewAddrSpace,
1128 PredicatedAS, &UndefUsesToFix);
1130 ValueWithNewAddrSpace[V] =
New;
1134 if (ValueWithNewAddrSpace.
empty())
1138 for (
const Use *UndefUse : UndefUsesToFix) {
1139 User *V = UndefUse->getUser();
1140 User *NewV = cast_or_null<User>(ValueWithNewAddrSpace.
lookup(V));
1144 unsigned OperandNo = UndefUse->getOperandNo();
1146 NewV->
setOperand(OperandNo, ValueWithNewAddrSpace.
lookup(UndefUse->get()));
1153 assert(WVH &&
"value was unexpectedly deleted");
1156 if (NewV ==
nullptr)
1159 LLVM_DEBUG(
dbgs() <<
"Replacing the uses of " << *V <<
"\n with\n "
1162 if (
Constant *
C = dyn_cast<Constant>(V)) {
1166 LLVM_DEBUG(
dbgs() <<
"Inserting replacement const cast: " << Replace
1167 <<
": " << *Replace <<
'\n');
1168 C->replaceAllUsesWith(Replace);
1192 if (CurUser == NewV)
1195 if (
auto *
MI = dyn_cast<MemIntrinsic>(CurUser)) {
1200 if (
auto *II = dyn_cast<IntrinsicInst>(CurUser)) {
1201 if (rewriteIntrinsicOperands(II, V, NewV))
1205 if (isa<Instruction>(CurUser)) {
1206 if (
ICmpInst *Cmp = dyn_cast<ICmpInst>(CurUser)) {
1215 int OtherIdx = (SrcIdx == 0) ? 1 : 0;
1216 Value *OtherSrc =
Cmp->getOperand(OtherIdx);
1218 if (
Value *OtherNewV = ValueWithNewAddrSpace.
lookup(OtherSrc)) {
1219 if (OtherNewV->getType()->getPointerAddressSpace() == NewAS) {
1220 Cmp->setOperand(OtherIdx, OtherNewV);
1221 Cmp->setOperand(SrcIdx, NewV);
1227 if (
auto *KOtherSrc = dyn_cast<Constant>(OtherSrc)) {
1228 if (isSafeToCastConstAddrSpace(KOtherSrc, NewAS)) {
1229 Cmp->setOperand(SrcIdx, NewV);
1230 Cmp->setOperand(OtherIdx,
1239 if (ASC->getDestAddressSpace() == NewAS) {
1240 if (!cast<PointerType>(ASC->getType())
1241 ->hasSameElementTypeAs(
1242 cast<PointerType>(NewV->
getType()))) {
1244 if (
Instruction *NewVInst = dyn_cast<Instruction>(NewV))
1245 InsertPos = std::next(NewVInst->getIterator());
1246 else if (
Instruction *VInst = dyn_cast<Instruction>(V))
1247 InsertPos = std::next(VInst->getIterator());
1249 InsertPos = ASC->getIterator();
1252 ASC->getType(),
"", &*InsertPos);
1254 ASC->replaceAllUsesWith(NewV);
1255 DeadInstructions.push_back(ASC);
1261 if (
Instruction *VInst = dyn_cast<Instruction>(V)) {
1263 if (U == V && isa<AddrSpaceCastInst>(V))
1268 if (
Instruction *NewVInst = dyn_cast<Instruction>(NewV))
1269 InsertPos = std::next(NewVInst->getIterator());
1271 InsertPos = std::next(VInst->getIterator());
1273 while (isa<PHINode>(InsertPos))
1285 DeadInstructions.push_back(
I);
1296 if (skipFunction(
F))
1299 auto *DTWP = getAnalysisIfAvailable<DominatorTreeWrapperPass>();
1301 return InferAddressSpacesImpl(
1302 getAnalysis<AssumptionCacheTracker>().getAssumptionCache(
F), DT,
1303 &getAnalysis<TargetTransformInfoWrapperPass>().getTTI(
F),