23#include "llvm/IR/IntrinsicsSPIRV.h"
50#define GET_BuiltinGroup_DECL
51#include "SPIRVGenTables.inc"
64class SPIRVEmitIntrinsics
66 public InstVisitor<SPIRVEmitIntrinsics, Instruction *> {
70 bool TrackConstants =
true;
74 SPIRV::InstructionSet::InstructionSet InstrSet;
81 enum WellKnownTypes { Event };
84 Type *deduceElementType(
Value *
I,
bool UnknownElemTypeI8);
85 Type *deduceElementTypeHelper(
Value *
I,
bool UnknownElemTypeI8);
86 Type *deduceElementTypeHelper(
Value *
I, std::unordered_set<Value *> &Visited,
87 bool UnknownElemTypeI8);
88 Type *deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
89 bool UnknownElemTypeI8);
90 Type *deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
91 std::unordered_set<Value *> &Visited,
92 bool UnknownElemTypeI8);
94 std::unordered_set<Value *> &Visited,
95 bool UnknownElemTypeI8);
97 bool UnknownElemTypeI8);
100 Type *deduceNestedTypeHelper(
User *U,
bool UnknownElemTypeI8);
102 std::unordered_set<Value *> &Visited,
103 bool UnknownElemTypeI8);
116 Args.push_back(Arg2);
117 Args.push_back(buildMD(Arg));
118 for (
auto *Imm : Imms)
120 return B.CreateIntrinsic(IntrID, {
Types},
Args);
130 bool UnknownElemTypeI8);
135 Type *ExpectedElementType,
136 unsigned OperandToReplace,
143 Type *deduceFunParamElementType(
Function *F,
unsigned OpIdx);
144 Type *deduceFunParamElementType(
Function *F,
unsigned OpIdx,
145 std::unordered_set<Function *> &FVisited);
174 bool postprocessTypes();
182 const auto *
II = dyn_cast<IntrinsicInst>(
I);
186 return II->getIntrinsicID() == Intrinsic::experimental_convergence_entry ||
187 II->getIntrinsicID() == Intrinsic::experimental_convergence_loop ||
188 II->getIntrinsicID() == Intrinsic::experimental_convergence_anchor;
192char SPIRVEmitIntrinsics::ID = 0;
198 return isa<IntrinsicInst>(
I) &&
199 cast<IntrinsicInst>(
I)->getIntrinsicID() == Intrinsic::spv_assign_type;
203 return isa<StoreInst>(
I) || isa<LoadInst>(
I) || isa<InsertValueInst>(
I) ||
204 isa<ExtractValueInst>(
I) || isa<AtomicCmpXchgInst>(
I);
208 return isa<ConstantArray>(V) || isa<ConstantStruct>(V) ||
209 isa<ConstantDataArray>(V) ||
210 (isa<ConstantAggregateZero>(V) && !V->getType()->isVectorTy());
215 B.SetInsertPoint(
I->getParent()->getFirstNonPHIOrDbgOrAlloca());
221 B.SetCurrentDebugLocation(
I->getDebugLoc());
222 if (
I->getType()->isVoidTy())
223 B.SetInsertPoint(
I->getNextNode());
225 B.SetInsertPoint(*
I->getInsertionPointAfterDef());
231 switch (
Intr->getIntrinsicID()) {
232 case Intrinsic::invariant_start:
233 case Intrinsic::invariant_end:
241 if (
I->getType()->isTokenTy())
243 "does not support token type",
250 isa<Argument>(SI->getValueOperand());
279 return cast<ConstantAsMetadata>(MD->
getMetadata())->getType();
285 CallInst *AssignCI = buildIntrWithMD(Intrinsic::spv_assign_type,
286 {Arg->
getType()}, OfType, Arg, {},
B);
294 if (AssignPtrTyCI ==
nullptr ||
295 AssignPtrTyCI->
getParent()->getParent() !=
F) {
296 AssignPtrTyCI = buildIntrWithMD(
297 Intrinsic::spv_assign_ptr_type, {Arg->
getType()}, OfType, Arg,
303 updateAssignType(AssignPtrTyCI, Arg, OfType);
307void SPIRVEmitIntrinsics::updateAssignType(
CallInst *AssignCI,
Value *Arg,
311 Intrinsic::spv_assign_ptr_type)
323SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
324 bool UnknownElemTypeI8) {
325 std::unordered_set<Value *> Visited;
326 return deduceElementTypeByValueDeep(ValueTy, Operand, Visited,
330Type *SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
331 Type *ValueTy,
Value *Operand, std::unordered_set<Value *> &Visited,
332 bool UnknownElemTypeI8) {
335 if (
auto *PtrTy = dyn_cast<PointerType>(Ty)) {
337 deduceElementTypeHelper(Operand, Visited, UnknownElemTypeI8))
340 Ty = deduceNestedTypeHelper(dyn_cast<User>(Operand), Ty, Visited,
348Type *SPIRVEmitIntrinsics::deduceElementTypeByUsersDeep(
349 Value *
Op, std::unordered_set<Value *> &Visited,
bool UnknownElemTypeI8) {
360 for (
User *OpU :
Op->users()) {
361 if (
Instruction *Inst = dyn_cast<Instruction>(OpU)) {
362 if (
Type *Ty = deduceElementTypeHelper(Inst, Visited, UnknownElemTypeI8))
374 Function *CalledF,
unsigned OpIdx) {
375 if ((DemangledName.
starts_with(
"__spirv_ocl_printf(") ||
378 return IntegerType::getInt8Ty(CalledF->
getContext());
384Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
Value *
I,
385 bool UnknownElemTypeI8) {
386 std::unordered_set<Value *> Visited;
387 return deduceElementTypeHelper(
I, Visited, UnknownElemTypeI8);
390void SPIRVEmitIntrinsics::maybeAssignPtrType(
Type *&Ty,
Value *
Op,
Type *RefTy,
391 bool UnknownElemTypeI8) {
393 if (!UnknownElemTypeI8)
395 if (
auto *
I = dyn_cast<Instruction>(
Op)) {
403Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
404 Value *
I, std::unordered_set<Value *> &Visited,
bool UnknownElemTypeI8) {
414 if (Visited.find(
I) != Visited.end())
421 if (
auto *
Ref = dyn_cast<AllocaInst>(
I)) {
422 maybeAssignPtrType(Ty,
I,
Ref->getAllocatedType(), UnknownElemTypeI8);
423 }
else if (
auto *
Ref = dyn_cast<GetElementPtrInst>(
I)) {
424 Ty =
Ref->getResultElementType();
425 }
else if (
auto *
Ref = dyn_cast<GlobalValue>(
I)) {
426 Ty = deduceElementTypeByValueDeep(
428 Ref->getNumOperands() > 0 ?
Ref->getOperand(0) :
nullptr, Visited,
430 }
else if (
auto *
Ref = dyn_cast<AddrSpaceCastInst>(
I)) {
431 Type *RefTy = deduceElementTypeHelper(
Ref->getPointerOperand(), Visited,
433 maybeAssignPtrType(Ty,
I, RefTy, UnknownElemTypeI8);
434 }
else if (
auto *
Ref = dyn_cast<BitCastInst>(
I)) {
435 if (
Type *Src =
Ref->getSrcTy(), *Dest =
Ref->getDestTy();
437 Ty = deduceElementTypeHelper(
Ref->getOperand(0), Visited,
439 }
else if (
auto *
Ref = dyn_cast<AtomicCmpXchgInst>(
I)) {
442 Ty = deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8);
443 }
else if (
auto *
Ref = dyn_cast<AtomicRMWInst>(
I)) {
446 Ty = deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8);
447 }
else if (
auto *
Ref = dyn_cast<PHINode>(
I)) {
448 for (
unsigned i = 0; i <
Ref->getNumIncomingValues(); i++) {
449 Ty = deduceElementTypeByUsersDeep(
Ref->getIncomingValue(i), Visited,
454 }
else if (
auto *
Ref = dyn_cast<SelectInst>(
I)) {
455 for (
Value *
Op : {
Ref->getTrueValue(),
Ref->getFalseValue()}) {
456 Ty = deduceElementTypeByUsersDeep(
Op, Visited, UnknownElemTypeI8);
460 }
else if (
auto *CI = dyn_cast<CallInst>(
I)) {
465 {
"__spirv_GenericCastToPtr_ToGlobal", 0},
466 {
"__spirv_GenericCastToPtr_ToLocal", 0},
467 {
"__spirv_GenericCastToPtr_ToPrivate", 0},
468 {
"__spirv_GenericCastToPtrExplicit_ToGlobal", 0},
469 {
"__spirv_GenericCastToPtrExplicit_ToLocal", 0},
470 {
"__spirv_GenericCastToPtrExplicit_ToPrivate", 0}};
472 if (
Function *CalledF = CI->getCalledFunction()) {
473 std::string DemangledName =
475 if (DemangledName.length() > 0)
477 auto AsArgIt = ResTypeByArg.
find(DemangledName);
478 if (AsArgIt != ResTypeByArg.
end()) {
479 Ty = deduceElementTypeHelper(CI->getArgOperand(AsArgIt->second),
480 Visited, UnknownElemTypeI8);
497Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(
User *U,
498 bool UnknownElemTypeI8) {
499 std::unordered_set<Value *> Visited;
500 return deduceNestedTypeHelper(U,
U->getType(), Visited, UnknownElemTypeI8);
503Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(
504 User *U,
Type *OrigTy, std::unordered_set<Value *> &Visited,
505 bool UnknownElemTypeI8) {
514 if (Visited.find(U) != Visited.end())
518 if (dyn_cast<StructType>(OrigTy)) {
521 for (
unsigned i = 0; i <
U->getNumOperands(); ++i) {
523 Type *OpTy =
Op->getType();
526 if (
auto *PtrTy = dyn_cast<PointerType>(OpTy)) {
528 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
531 Ty = deduceNestedTypeHelper(dyn_cast<User>(
Op), OpTy, Visited,
536 Change |= Ty != OpTy;
543 }
else if (
auto *ArrTy = dyn_cast<ArrayType>(OrigTy)) {
544 if (
Value *
Op =
U->getNumOperands() > 0 ?
U->getOperand(0) :
nullptr) {
545 Type *OpTy = ArrTy->getElementType();
547 if (
auto *PtrTy = dyn_cast<PointerType>(OpTy)) {
549 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
552 Ty = deduceNestedTypeHelper(dyn_cast<User>(
Op), OpTy, Visited,
556 Type *NewTy = ArrayType::get(Ty, ArrTy->getNumElements());
561 }
else if (
auto *VecTy = dyn_cast<VectorType>(OrigTy)) {
562 if (
Value *
Op =
U->getNumOperands() > 0 ?
U->getOperand(0) :
nullptr) {
563 Type *OpTy = VecTy->getElementType();
565 if (
auto *PtrTy = dyn_cast<PointerType>(OpTy)) {
567 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
570 Ty = deduceNestedTypeHelper(dyn_cast<User>(
Op), OpTy, Visited,
574 Type *NewTy = VectorType::get(Ty, VecTy->getElementCount());
584Type *SPIRVEmitIntrinsics::deduceElementType(
Value *
I,
bool UnknownElemTypeI8) {
585 if (
Type *Ty = deduceElementTypeHelper(
I, UnknownElemTypeI8))
587 if (!UnknownElemTypeI8)
589 if (
auto *Instr = dyn_cast<Instruction>(
I)) {
590 UncompleteTypeInfo.
insert(Instr);
593 return IntegerType::getInt8Ty(
I->getContext());
597 Value *PointerOperand) {
601 auto *PtrTy = dyn_cast<PointerType>(
I->getType());
613void SPIRVEmitIntrinsics::deduceOperandElementType(
Instruction *
I,
618 Type *KnownElemTy =
nullptr;
620 if (
auto *
Ref = dyn_cast<PHINode>(
I)) {
624 for (
unsigned i = 0; i <
Ref->getNumIncomingValues(); i++) {
629 }
else if (
auto *
Ref = dyn_cast<AddrSpaceCastInst>(
I)) {
633 Ops.
push_back(std::make_pair(
Ref->getPointerOperand(), 0));
634 }
else if (
auto *
Ref = dyn_cast<GetElementPtrInst>(
I)) {
635 KnownElemTy =
Ref->getSourceElementType();
643 }
else if (
auto *
Ref = dyn_cast<LoadInst>(
I)) {
644 KnownElemTy =
I->getType();
652 }
else if (
auto *
Ref = dyn_cast<StoreInst>(
I)) {
662 }
else if (
auto *
Ref = dyn_cast<AtomicCmpXchgInst>(
I)) {
668 }
else if (
auto *
Ref = dyn_cast<AtomicRMWInst>(
I)) {
674 }
else if (
auto *
Ref = dyn_cast<SelectInst>(
I)) {
678 for (
unsigned i = 0; i <
Ref->getNumOperands(); i++) {
683 }
else if (
auto *
Ref = dyn_cast<ReturnInst>(
I)) {
700 }
else if (
auto *
Ref = dyn_cast<ICmpInst>(
I)) {
708 KnownElemTy = ElemTy0;
710 }
else if (ElemTy1) {
711 KnownElemTy = ElemTy1;
714 }
else if (
auto *CI = dyn_cast<CallInst>(
I)) {
715 if (
Function *CalledF = CI->getCalledFunction()) {
716 std::string DemangledName =
718 if (DemangledName.length() > 0 &&
720 auto [Grp, Opcode, ExtNo] =
722 if (Opcode == SPIRV::OpGroupAsyncCopy) {
723 for (
unsigned i = 0, PtrCnt = 0; i < CI->arg_size() && PtrCnt < 2;
725 Value *
Op = CI->getArgOperand(i);
730 KnownElemTy = ElemTy;
733 }
else if (Grp == SPIRV::Atomic || Grp == SPIRV::AtomicFloating) {
734 if (CI->arg_size() < 2)
736 Value *
Op = CI->getArgOperand(0);
740 case SPIRV::OpAtomicLoad:
741 case SPIRV::OpAtomicCompareExchangeWeak:
742 case SPIRV::OpAtomicCompareExchange:
743 case SPIRV::OpAtomicExchange:
744 case SPIRV::OpAtomicIAdd:
745 case SPIRV::OpAtomicISub:
746 case SPIRV::OpAtomicOr:
747 case SPIRV::OpAtomicXor:
748 case SPIRV::OpAtomicAnd:
749 case SPIRV::OpAtomicUMin:
750 case SPIRV::OpAtomicUMax:
751 case SPIRV::OpAtomicSMin:
752 case SPIRV::OpAtomicSMax: {
765 if (!KnownElemTy || Ops.
size() == 0)
770 for (
auto &OpIt : Ops) {
772 if (
Op->use_empty() || (AskOp &&
Op != AskOp))
775 if (Ty == KnownElemTy)
778 Type *OpTy =
Op->getType();
784 if (AssignCI ==
nullptr) {
788 buildIntrWithMD(Intrinsic::spv_assign_ptr_type, {OpTy}, OpTyVal,
Op,
792 updateAssignType(AssignCI,
Op, OpTyVal);
795 if (
auto *OpI = dyn_cast<Instruction>(
Op)) {
798 B.SetInsertPoint(*OpI->getInsertionPointAfterDef());
799 B.SetCurrentDebugLocation(OpI->getDebugLoc());
800 }
else if (
auto *OpA = dyn_cast<Argument>(
Op)) {
801 B.SetInsertPointPastAllocas(OpA->getParent());
804 B.SetInsertPoint(
F->getEntryBlock().getFirstNonPHIOrDbgOrAlloca());
810 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
811 I->setOperand(OpIt.second, PtrCastI);
816void SPIRVEmitIntrinsics::replaceMemInstrUses(
Instruction *Old,
821 if (isAssignTypeInstr(U)) {
825 B.CreateIntrinsic(Intrinsic::spv_assign_type, {
New->getType()},
Args);
827 U->eraseFromParent();
830 U->replaceUsesOfWith(Old, New);
838void SPIRVEmitIntrinsics::preprocessUndefs(
IRBuilder<> &
B) {
839 std::queue<Instruction *> Worklist;
843 while (!Worklist.empty()) {
845 bool BPrepared =
false;
848 for (
auto &
Op :
I->operands()) {
849 auto *AggrUndef = dyn_cast<UndefValue>(
Op);
850 if (!AggrUndef || !
Op->getType()->isAggregateType())
857 auto *IntrUndef =
B.CreateIntrinsic(Intrinsic::spv_undef, {}, {});
858 Worklist.push(IntrUndef);
859 I->replaceUsesOfWith(
Op, IntrUndef);
860 AggrConsts[IntrUndef] = AggrUndef;
861 AggrConstTypes[IntrUndef] = AggrUndef->getType();
866void SPIRVEmitIntrinsics::preprocessCompositeConstants(
IRBuilder<> &
B) {
867 std::queue<Instruction *> Worklist;
871 while (!Worklist.empty()) {
872 auto *
I = Worklist.front();
873 bool IsPhi = isa<PHINode>(
I), BPrepared =
false;
875 bool KeepInst =
false;
876 for (
const auto &
Op :
I->operands()) {
878 Type *ResTy =
nullptr;
879 if (
auto *COp = dyn_cast<ConstantVector>(
Op)) {
880 AggrConst = cast<Constant>(COp);
881 ResTy = COp->getType();
882 }
else if (
auto *COp = dyn_cast<ConstantArray>(
Op)) {
883 AggrConst = cast<Constant>(COp);
884 ResTy =
B.getInt32Ty();
885 }
else if (
auto *COp = dyn_cast<ConstantStruct>(
Op)) {
886 AggrConst = cast<Constant>(COp);
887 ResTy =
B.getInt32Ty();
888 }
else if (
auto *COp = dyn_cast<ConstantDataArray>(
Op)) {
889 AggrConst = cast<Constant>(COp);
890 ResTy =
B.getInt32Ty();
891 }
else if (
auto *COp = dyn_cast<ConstantAggregateZero>(
Op)) {
892 AggrConst = cast<Constant>(COp);
893 ResTy =
Op->getType()->isVectorTy() ? COp->getType() :
B.getInt32Ty();
897 if (
auto *COp = dyn_cast<ConstantDataSequential>(
Op))
898 for (
unsigned i = 0; i < COp->getNumElements(); ++i)
899 Args.push_back(COp->getElementAsConstant(i));
901 for (
auto &COp : AggrConst->
operands())
904 IsPhi ?
B.SetInsertPointPastAllocas(
I->getParent()->getParent())
905 :
B.SetInsertPoint(
I);
909 B.CreateIntrinsic(Intrinsic::spv_const_composite, {ResTy}, {
Args});
911 I->replaceUsesOfWith(
Op, CI);
913 AggrConsts[CI] = AggrConst;
914 AggrConstTypes[CI] = deduceNestedTypeHelper(AggrConst,
false);
923 if (!
Call.isInlineAsm())
934 for (
unsigned OpIdx = 0; OpIdx <
Call.arg_size(); OpIdx++)
935 Args.push_back(
Call.getArgOperand(OpIdx));
938 B.SetInsertPoint(&Call);
939 B.CreateIntrinsic(Intrinsic::spv_inline_asm, {}, {
Args});
946 B.SetInsertPoint(&
I);
949 for (
auto &
Op :
I.operands()) {
950 if (
Op.get()->getType()->isSized()) {
952 }
else if (
BasicBlock *BB = dyn_cast<BasicBlock>(
Op.get())) {
959 CallInst *NewI =
B.CreateIntrinsic(Intrinsic::spv_switch,
963 I.replaceAllUsesWith(NewI);
967 B.SetInsertPoint(ParentBB);
978 B.SetInsertPoint(&
I);
981 Args.push_back(
B.getInt1(
I.isInBounds()));
982 for (
auto &
Op :
I.operands())
984 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_gep, {
Types}, {
Args});
985 I.replaceAllUsesWith(NewI);
992 B.SetInsertPoint(&
I);
1000 I.replaceAllUsesWith(Source);
1001 I.eraseFromParent();
1007 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_bitcast, {
Types}, {
Args});
1008 std::string InstName =
I.hasName() ?
I.getName().str() :
"";
1009 I.replaceAllUsesWith(NewI);
1010 I.eraseFromParent();
1015void SPIRVEmitIntrinsics::insertAssignPtrTypeTargetExt(
1017 Type *VTy =
V->getType();
1021 if (
auto PType = dyn_cast<TypedPointerType>(VTy))
1022 if (PType->getElementType() != AssignedType)
1027 buildAssignType(
B, AssignedType, V);
1032 dyn_cast<ConstantAsMetadata>(
1033 cast<MetadataAsValue>(AssignCI->
getOperand(1))->getMetadata())
1035 if (CurrentType == AssignedType)
1042 " for value " +
V->getName(),
1050void SPIRVEmitIntrinsics::replacePointerOperandWithPtrCast(
1055 while (
BitCastInst *BC = dyn_cast<BitCastInst>(Pointer))
1059 Type *PointerElemTy = deduceElementTypeHelper(Pointer,
false);
1060 if (PointerElemTy == ExpectedElementType ||
1067 bool FirstPtrCastOrAssignPtrType =
true;
1073 auto *
II = dyn_cast<IntrinsicInst>(
User);
1075 (
II->getIntrinsicID() != Intrinsic::spv_assign_ptr_type &&
1076 II->getIntrinsicID() != Intrinsic::spv_ptrcast) ||
1077 II->getOperand(0) != Pointer)
1082 FirstPtrCastOrAssignPtrType =
false;
1083 if (
II->getOperand(1) != VMD ||
1084 dyn_cast<ConstantInt>(
II->getOperand(2))->getSExtValue() !=
1090 if (
II->getIntrinsicID() != Intrinsic::spv_ptrcast)
1095 if (
II->getParent() !=
I->getParent())
1098 I->setOperand(OperandToReplace,
II);
1109 if (FirstPtrCastOrAssignPtrType &&
1110 (isa<Instruction>(Pointer) || isa<Argument>(Pointer))) {
1111 buildAssignPtr(
B, ExpectedElementType, Pointer);
1118 auto *PtrCastI =
B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
1119 I->setOperand(OperandToReplace, PtrCastI);
1122void SPIRVEmitIntrinsics::insertPtrCastOrAssignTypeInstr(
Instruction *
I,
1127 return replacePointerOperandWithPtrCast(
1128 I,
SI->getValueOperand(), IntegerType::getInt8Ty(
F->getContext()), 0,
1132 Type *OpTy =
Op->getType();
1133 if (
auto *OpI = dyn_cast<Instruction>(
Op))
1135 if (OpTy ==
Op->getType())
1136 OpTy = deduceElementTypeByValueDeep(OpTy,
Op,
false);
1137 return replacePointerOperandWithPtrCast(
I,
SI->getPointerOperand(), OpTy, 1,
1139 }
else if (
LoadInst *LI = dyn_cast<LoadInst>(
I)) {
1140 return replacePointerOperandWithPtrCast(
I, LI->getPointerOperand(),
1141 LI->getType(), 0,
B);
1143 return replacePointerOperandWithPtrCast(
I, GEPI->getPointerOperand(),
1144 GEPI->getSourceElementType(), 0,
B);
1154 std::string DemangledName =
1158 bool HaveTypes =
false;
1159 for (
unsigned OpIdx = 0; OpIdx < CalledF->
arg_size(); ++OpIdx) {
1165 CalledArgTys.
push_back(cast<TypedPointerType>(ArgType)->getElementType());
1177 if (
Instruction *Inst = dyn_cast<Instruction>(U)) {
1178 if ((ElemTy = deduceElementTypeHelper(Inst,
false)) !=
nullptr)
1184 HaveTypes |= ElemTy !=
nullptr;
1189 if (DemangledName.empty() && !HaveTypes)
1192 for (
unsigned OpIdx = 0; OpIdx < CI->
arg_size(); OpIdx++) {
1198 if (!isa<Instruction>(ArgOperand) && !isa<Argument>(ArgOperand)) {
1207 Type *ExpectedType =
1208 OpIdx < CalledArgTys.
size() ? CalledArgTys[OpIdx] :
nullptr;
1209 if (!ExpectedType && !DemangledName.empty())
1211 DemangledName, OpIdx,
I->getContext());
1212 if (!ExpectedType || ExpectedType->
isVoidTy())
1216 insertAssignPtrTypeTargetExt(cast<TargetExtType>(ExpectedType),
1219 replacePointerOperandWithPtrCast(CI, ArgOperand, ExpectedType, OpIdx,
B);
1225 I.getOperand(1)->getType(),
1226 I.getOperand(2)->getType()};
1228 B.SetInsertPoint(&
I);
1230 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_insertelt, {
Types}, {
Args});
1231 std::string InstName =
I.hasName() ?
I.getName().str() :
"";
1232 I.replaceAllUsesWith(NewI);
1233 I.eraseFromParent();
1241 B.SetInsertPoint(&
I);
1243 I.getIndexOperand()->getType()};
1245 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_extractelt, {
Types}, {
Args});
1246 std::string InstName =
I.hasName() ?
I.getName().str() :
"";
1247 I.replaceAllUsesWith(NewI);
1248 I.eraseFromParent();
1255 B.SetInsertPoint(&
I);
1258 for (
auto &
Op :
I.operands())
1259 if (isa<UndefValue>(
Op))
1263 for (
auto &
Op :
I.indices())
1264 Args.push_back(
B.getInt32(
Op));
1266 B.CreateIntrinsic(Intrinsic::spv_insertv, {
Types}, {
Args});
1267 replaceMemInstrUses(&
I, NewI,
B);
1273 B.SetInsertPoint(&
I);
1275 for (
auto &
Op :
I.operands())
1277 for (
auto &
Op :
I.indices())
1278 Args.push_back(
B.getInt32(
Op));
1280 B.CreateIntrinsic(Intrinsic::spv_extractv, {
I.getType()}, {
Args});
1281 I.replaceAllUsesWith(NewI);
1282 I.eraseFromParent();
1287 if (!
I.getType()->isAggregateType())
1290 B.SetInsertPoint(&
I);
1291 TrackConstants =
false;
1292 const auto *TLI =
TM->getSubtargetImpl()->getTargetLowering();
1294 TLI->getLoadMemOperandFlags(
I,
F->getDataLayout());
1296 B.CreateIntrinsic(Intrinsic::spv_load, {
I.getOperand(0)->
getType()},
1297 {
I.getPointerOperand(),
B.getInt16(Flags),
1298 B.getInt8(
I.getAlign().value())});
1299 replaceMemInstrUses(&
I, NewI,
B);
1307 B.SetInsertPoint(&
I);
1308 TrackConstants =
false;
1309 const auto *TLI =
TM->getSubtargetImpl()->getTargetLowering();
1311 TLI->getStoreMemOperandFlags(
I,
F->getDataLayout());
1312 auto *PtrOp =
I.getPointerOperand();
1313 auto *NewI =
B.CreateIntrinsic(
1314 Intrinsic::spv_store, {
I.getValueOperand()->
getType(), PtrOp->getType()},
1315 {
I.getValueOperand(), PtrOp,
B.getInt16(Flags),
1316 B.getInt8(
I.getAlign().value())});
1317 I.eraseFromParent();
1322 Value *ArraySize =
nullptr;
1323 if (
I.isArrayAllocation()) {
1326 SPIRV::Extension::SPV_INTEL_variable_length_array))
1328 "array allocation: this instruction requires the following "
1329 "SPIR-V extension: SPV_INTEL_variable_length_array",
1331 ArraySize =
I.getArraySize();
1334 B.SetInsertPoint(&
I);
1335 TrackConstants =
false;
1336 Type *PtrTy =
I.getType();
1338 ArraySize ?
B.CreateIntrinsic(Intrinsic::spv_alloca_array,
1339 {PtrTy, ArraySize->
getType()}, {ArraySize})
1340 :
B.CreateIntrinsic(Intrinsic::spv_alloca, {PtrTy}, {});
1341 std::string InstName =
I.hasName() ?
I.getName().str() :
"";
1342 I.replaceAllUsesWith(NewI);
1343 I.eraseFromParent();
1349 assert(
I.getType()->isAggregateType() &&
"Aggregate result is expected");
1351 B.SetInsertPoint(&
I);
1353 for (
auto &
Op :
I.operands())
1355 Args.push_back(
B.getInt32(
I.getSyncScopeID()));
1356 Args.push_back(
B.getInt32(
1358 Args.push_back(
B.getInt32(
1360 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_cmpxchg,
1362 replaceMemInstrUses(&
I, NewI,
B);
1368 B.SetInsertPoint(&
I);
1369 B.CreateIntrinsic(Intrinsic::spv_unreachable, {}, {});
1376 if (GV.
getName() ==
"llvm.global.annotations")
1382 deduceElementTypeHelper(&GV,
false);
1386 auto *InitInst =
B.CreateIntrinsic(Intrinsic::spv_init_global,
1388 InitInst->setArgOperand(1,
Init);
1392 B.CreateIntrinsic(Intrinsic::spv_unref_global, GV.
getType(), &GV);
1398bool SPIRVEmitIntrinsics::insertAssignPtrTypeIntrs(
Instruction *
I,
1400 bool UnknownElemTypeI8) {
1403 isa<BitCastInst>(
I))
1407 if (
Type *ElemTy = deduceElementType(
I, UnknownElemTypeI8)) {
1408 buildAssignPtr(
B, ElemTy,
I);
1414void SPIRVEmitIntrinsics::insertAssignTypeIntrs(
Instruction *
I,
1418 {
"async_work_group_copy", WellKnownTypes::Event},
1419 {
"async_work_group_strided_copy", WellKnownTypes::Event},
1420 {
"__spirv_GroupAsyncCopy", WellKnownTypes::Event}};
1424 bool IsKnown =
false;
1425 if (
auto *CI = dyn_cast<CallInst>(
I)) {
1429 std::string DemangledName =
1431 if (DemangledName.length() > 0)
1433 auto ResIt = ResTypeWellKnown.
find(DemangledName);
1434 if (ResIt != ResTypeWellKnown.
end()) {
1437 switch (ResIt->second) {
1438 case WellKnownTypes::Event:
1447 Type *Ty =
I->getType();
1450 Type *TypeToAssign = Ty;
1451 if (
auto *
II = dyn_cast<IntrinsicInst>(
I)) {
1452 if (
II->getIntrinsicID() == Intrinsic::spv_const_composite ||
1453 II->getIntrinsicID() == Intrinsic::spv_undef) {
1454 auto It = AggrConstTypes.
find(
II);
1455 if (It == AggrConstTypes.
end())
1457 TypeToAssign = It->second;
1461 buildAssignType(
B, TypeToAssign,
I);
1463 for (
const auto &
Op :
I->operands()) {
1464 if (isa<ConstantPointerNull>(
Op) || isa<UndefValue>(
Op) ||
1466 (isa<ConstantExpr>(
Op) && isa<GEPOperator>(
Op))) {
1468 Type *OpTy =
Op->getType();
1471 buildIntrWithMD(Intrinsic::spv_assign_type, {
B.getInt32Ty()},
Op,
1474 }
else if (!isa<Instruction>(
Op)) {
1475 Type *OpTy =
Op->getType();
1476 if (
auto PType = dyn_cast<TypedPointerType>(OpTy)) {
1477 buildAssignPtr(
B, PType->getElementType(),
Op);
1480 buildAssignPtr(
B, ElemTy ? ElemTy : deduceElementType(
Op,
true),
Op);
1482 CallInst *AssignCI = buildIntrWithMD(Intrinsic::spv_assign_type,
1483 {OpTy},
Op,
Op, {},
B);
1491void SPIRVEmitIntrinsics::insertSpirvDecorations(
Instruction *
I,
1493 if (
MDNode *MD =
I->getMetadata(
"spirv.Decorations")) {
1495 B.CreateIntrinsic(Intrinsic::spv_assign_decoration, {
I->getType()},
1500void SPIRVEmitIntrinsics::processInstrAfterVisit(
Instruction *
I,
1502 auto *
II = dyn_cast<IntrinsicInst>(
I);
1503 if (
II &&
II->getIntrinsicID() == Intrinsic::spv_const_composite &&
1506 auto t = AggrConsts.
find(
I);
1509 buildIntrWithMD(Intrinsic::spv_track_constant,
1510 {
II->getType(),
II->getType()}, t->second,
I, {},
B);
1511 I->replaceAllUsesWith(NewOp);
1512 NewOp->setArgOperand(0,
I);
1514 bool IsPhi = isa<PHINode>(
I), BPrepared =
false;
1515 for (
const auto &
Op :
I->operands()) {
1516 if (isa<PHINode>(
I) || isa<SwitchInst>(
I))
1517 TrackConstants =
false;
1518 if ((isa<ConstantData>(
Op) || isa<ConstantExpr>(
Op)) && TrackConstants) {
1519 unsigned OpNo =
Op.getOperandNo();
1520 if (
II && ((
II->getIntrinsicID() == Intrinsic::spv_gep && OpNo == 0) ||
1521 (
II->paramHasAttr(OpNo, Attribute::ImmArg))))
1524 IsPhi ?
B.SetInsertPointPastAllocas(
I->getParent()->getParent())
1525 :
B.SetInsertPoint(
I);
1529 if (
Op->getType()->isTargetExtTy())
1531 auto *NewOp = buildIntrWithMD(Intrinsic::spv_track_constant,
1534 I->setOperand(OpNo, NewOp);
1540 std::vector<Value *>
Args = {
I};
1542 B.CreateIntrinsic(Intrinsic::spv_assign_name, {
I->getType()},
Args);
1546Type *SPIRVEmitIntrinsics::deduceFunParamElementType(
Function *
F,
1548 std::unordered_set<Function *> FVisited;
1549 return deduceFunParamElementType(
F, OpIdx, FVisited);
1552Type *SPIRVEmitIntrinsics::deduceFunParamElementType(
1553 Function *
F,
unsigned OpIdx, std::unordered_set<Function *> &FVisited) {
1555 if (FVisited.find(
F) != FVisited.end())
1559 std::unordered_set<Value *> Visited;
1562 for (
User *U :
F->users()) {
1563 CallInst *CI = dyn_cast<CallInst>(U);
1564 if (!CI || OpIdx >= CI->
arg_size())
1574 if (
Type *Ty = deduceElementTypeHelper(OpArg, Visited,
false))
1579 if (!Inst || Inst == CI)
1582 if (
Type *Ty = deduceElementTypeHelper(Inst, Visited,
false))
1589 if (FVisited.find(OuterF) != FVisited.end())
1591 for (
unsigned i = 0; i < OuterF->
arg_size(); ++i) {
1592 if (OuterF->
getArg(i) == OpArg) {
1593 Lookup.push_back(std::make_pair(OuterF, i));
1600 for (
auto &Pair :
Lookup) {
1601 if (
Type *Ty = deduceFunParamElementType(Pair.first, Pair.second, FVisited))
1608void SPIRVEmitIntrinsics::processParamTypesByFunHeader(
Function *
F,
1610 B.SetInsertPointPastAllocas(
F);
1611 for (
unsigned OpIdx = 0; OpIdx <
F->arg_size(); ++OpIdx) {
1618 buildAssignPtr(
B, ElemTy, Arg);
1623 B.SetInsertPointPastAllocas(
F);
1624 for (
unsigned OpIdx = 0; OpIdx <
F->arg_size(); ++OpIdx) {
1629 if (!ElemTy && (ElemTy = deduceFunParamElementType(
F, OpIdx)) !=
nullptr)
1630 buildAssignPtr(
B, ElemTy, Arg);
1634bool SPIRVEmitIntrinsics::runOnFunction(
Function &Func) {
1635 if (
Func.isDeclaration())
1639 GR =
ST.getSPIRVGlobalRegistry();
1640 InstrSet =
ST.isOpenCLEnv() ? SPIRV::InstructionSet::OpenCL_std
1641 : SPIRV::InstructionSet::GLSL_std_450;
1646 AggrConstTypes.
clear();
1649 processParamTypesByFunHeader(
F,
B);
1657 Type *ElTy =
SI->getValueOperand()->getType();
1662 B.SetInsertPoint(&
Func.getEntryBlock(),
Func.getEntryBlock().begin());
1663 for (
auto &GV :
Func.getParent()->globals())
1664 processGlobalValue(GV,
B);
1666 preprocessUndefs(
B);
1667 preprocessCompositeConstants(
B);
1672 for (
auto &
I : Worklist) {
1674 if (isConvergenceIntrinsic(
I))
1677 bool Postpone = insertAssignPtrTypeIntrs(
I,
B,
false);
1679 insertAssignTypeIntrs(
I,
B);
1680 insertPtrCastOrAssignTypeInstr(
I,
B);
1684 if (Postpone && !GR->findAssignPtrTypeInstr(
I))
1685 insertAssignPtrTypeIntrs(
I,
B,
true);
1689 deduceOperandElementType(&
I);
1691 for (
auto *
I : Worklist) {
1692 TrackConstants =
true;
1693 if (!
I->getType()->isVoidTy() || isa<StoreInst>(
I))
1702 if (isConvergenceIntrinsic(
I))
1705 processInstrAfterVisit(
I,
B);
1712bool SPIRVEmitIntrinsics::postprocessTypes() {
1713 bool Changed =
false;
1716 for (
auto IB = PostprocessWorklist.
rbegin(), IE = PostprocessWorklist.
rend();
1718 CallInst *AssignCI = GR->findAssignPtrTypeInstr(*IB);
1719 Type *KnownTy = GR->findDeducedElementType(*IB);
1720 if (!KnownTy || !AssignCI || !isa<Instruction>(AssignCI->
getArgOperand(0)))
1723 for (
User *U :
I->users()) {
1725 if (!Inst || isa<IntrinsicInst>(Inst))
1727 deduceOperandElementType(Inst,
I, KnownTy, AssignCI);
1728 if (KnownTy != GR->findDeducedElementType(
I)) {
1737bool SPIRVEmitIntrinsics::runOnModule(
Module &M) {
1738 bool Changed =
false;
1740 UncompleteTypeInfo.
clear();
1741 PostprocessWorklist.
clear();
1747 if (!
F.isDeclaration() && !
F.isIntrinsic()) {
1749 GR =
ST.getSPIRVGlobalRegistry();
1751 processParamTypes(&
F,
B);
1755 Changed |= postprocessTypes();
1761 return new SPIRVEmitIntrinsics(
TM);
static unsigned getIntrinsicID(const SDNode *N)
Expand Atomic instructions
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static bool runOnFunction(Function &F, bool PostInlining)
uint64_t IntrinsicInst * II
const char LLVMTargetMachineRef TM
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
static bool isMemInstrToReplace(Instruction *I)
static bool isAggrConstForceInt32(const Value *V)
static Type * getAtomicElemTy(SPIRVGlobalRegistry *GR, Instruction *I, Value *PointerOperand)
static void reportFatalOnTokenType(const Instruction *I)
static void setInsertPointAfterDef(IRBuilder<> &B, Instruction *I)
static Type * getPointeeTypeByCallInst(StringRef DemangledName, Function *CalledF, unsigned OpIdx)
static Type * reconstructType(SPIRVGlobalRegistry *GR, Value *Op)
static bool IsKernelArgInt8(Function *F, StoreInst *SI)
static void setInsertPointSkippingPhis(IRBuilder<> &B, Instruction *I)
static Type * restoreMutatedType(SPIRVGlobalRegistry *GR, Instruction *I, Type *Ty)
static bool requireAssignType(Instruction *I)
static void insertSpirvDecorations(MachineFunction &MF, MachineIRBuilder MIB)
static SymbolRef::Type getType(const Symbol *Sym)
static int Lookup(ArrayRef< TableEntry > Table, unsigned Opcode)
an instruction to allocate memory on the stack
Represent the analysis usage information of a pass.
This class represents an incoming formal argument to a Function.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
An instruction that atomically checks whether a specified value is in a memory location,...
static unsigned getPointerOperandIndex()
static unsigned getPointerOperandIndex()
LLVM Basic Block Representation.
LLVMContext & getContext() const
Get the context in which this basic block lives.
This class represents a no-op cast from one type to another.
static BlockAddress * get(Function *F, BasicBlock *BB)
Return a BlockAddress for the specified function and basic block.
bool isInlineAsm() const
Check if this call is an inline asm statement.
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
bool isIndirectCall() const
Return true if the callsite is an indirect call.
Value * getArgOperand(unsigned i) const
void setArgOperand(unsigned i, Value *v)
unsigned arg_size() const
This class represents a function call, abstracting a target machine's calling convention.
This is an important base class in LLVM.
static Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
This class represents an Operation in the Expression.
iterator find(const_arg_type_t< KeyT > Val)
Implements a dense probed hash-table based set.
bool isIntrinsic() const
isIntrinsic - Returns true if the function's name starts with "llvm.".
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
Argument * getArg(unsigned i) const
an instruction for type-safe pointer arithmetic to access elements of arrays and structs
static unsigned getPointerOperandIndex()
PointerType * getType() const
Global values are always pointers.
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
bool hasInitializer() const
Definitions have initializers, declarations don't.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
Indirect Branch Instruction.
void addDestination(BasicBlock *Dest)
Add a destination.
This instruction inserts a single (scalar) element into a VectorType value.
This instruction inserts a struct field of array element value into an aggregate value.
Base class for instruction visitors.
RetTy visitExtractElementInst(ExtractElementInst &I)
RetTy visitInsertValueInst(InsertValueInst &I)
RetTy visitUnreachableInst(UnreachableInst &I)
RetTy visitAtomicCmpXchgInst(AtomicCmpXchgInst &I)
RetTy visitBitCastInst(BitCastInst &I)
RetTy visitSwitchInst(SwitchInst &I)
RetTy visitExtractValueInst(ExtractValueInst &I)
RetTy visitStoreInst(StoreInst &I)
RetTy visitInsertElementInst(InsertElementInst &I)
RetTy visitAllocaInst(AllocaInst &I)
RetTy visitCallInst(CallInst &I)
RetTy visitGetElementPtrInst(GetElementPtrInst &I)
void visitInstruction(Instruction &I)
RetTy visitLoadInst(LoadInst &I)
InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
Instruction * user_back()
Specialize the methods defined in Value, as we know that an instruction can only be used by other ins...
A wrapper class for inspecting calls to intrinsic functions.
This is an important class for using LLVM in a threaded context.
An instruction for reading from memory.
static unsigned getPointerOperandIndex()
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
static MDString * get(LLVMContext &Context, StringRef Str)
Flags
Flags values. These may be or'd together.
ModulePass class - This class is used to implement unstructured interprocedural optimizations and ana...
virtual bool runOnModule(Module &M)=0
runOnModule - Virtual method overriden by subclasses to process the module being operated on.
A Module instance is used to store all the information related to an LLVM module.
PassRegistry - This class manages the registration and intitialization of the pass subsystem as appli...
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
virtual void getAnalysisUsage(AnalysisUsage &) const
getAnalysisUsage - This function should be overriden by passes that need analysis information to do t...
virtual StringRef getPassName() const
getPassName - Return a nice clean name for a pass.
static PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
void addAssignPtrTypeInstr(Value *Val, CallInst *AssignPtrTyCI)
Type * findDeducedCompositeType(const Value *Val)
void addDeducedElementType(Value *Val, Type *Ty)
void addReturnType(const Function *ArgF, TypedPointerType *DerivedTy)
Type * findMutated(const Value *Val)
void addDeducedCompositeType(Value *Val, Type *Ty)
Type * findDeducedElementType(const Value *Val)
CallInst * findAssignPtrTypeInstr(const Value *Val)
bool canUseExtension(SPIRV::Extension::Extension E) const
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
bool contains(ConstPtrType Ptr) const
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
void push_back(const T &Elt)
reverse_iterator rbegin()
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
An instruction for storing to memory.
static unsigned getPointerOperandIndex()
StringMap - This is an unconventional map that is specialized for handling keys that are "strings",...
iterator find(StringRef Key)
StringRef - Represent a constant reference to a string, i.e.
bool starts_with(StringRef Prefix) const
Check if this string starts with the given Prefix.
static StructType * create(LLVMContext &Context, StringRef Name)
This creates an identified struct.
Class to represent target extensions types, which are generally unintrospectable from target-independ...
static TargetExtType * get(LLVMContext &Context, StringRef Name, ArrayRef< Type * > Types=std::nullopt, ArrayRef< unsigned > Ints=std::nullopt)
Return a target extension type having the specified name and optional type and integer parameters.
The instances of the Type class are immutable: once they are created, they are never changed.
bool isVectorTy() const
True if this is an instance of VectorType.
StringRef getTargetExtName() const
bool isTargetExtTy() const
Return true if this is a target extension type.
bool isAggregateType() const
Return true if the type is an aggregate type.
bool isVoidTy() const
Return true if this is 'void'.
A few GPU targets, such as DXIL and SPIR-V, have typed pointers.
static TypedPointerType * get(Type *ElementType, unsigned AddressSpace)
This constructs a pointer to an object of the specified type in a numbered address space.
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
This function has undefined behavior.
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
void setName(const Twine &Name)
Change the name of the value.
iterator_range< user_iterator > users()
LLVMContext & getContext() const
All values hold a context through their type.
unsigned getNumUses() const
This method computes the number of uses of this Value.
StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
bool contains(const_arg_type_t< ValueT > V) const
Check if the set contains the given element.
const ParentTy * getParent() const
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
@ SPIR_KERNEL
Used for SPIR kernel functions.
std::tuple< int, unsigned, unsigned > mapBuiltinToOpcode(const StringRef DemangledCall, SPIRV::InstructionSet::InstructionSet Set)
Helper function for finding a builtin function attributes by a demangled function name.
Type * parseBuiltinCallArgumentBaseType(const StringRef DemangledCall, unsigned ArgIdx, LLVMContext &Ctx)
Parses the provided ArgIdx argument base type in the DemangledCall skeleton.
std::string lookupBuiltinNameHelper(StringRef DemangledCall)
Parses the name part of the demangled builtin call.
NodeAddr< FuncNode * > Func
This is an optimization pass for GlobalISel generic memory operations.
void initializeSPIRVEmitIntrinsicsPass(PassRegistry &)
ModulePass * createSPIRVEmitIntrinsicsPass(SPIRVTargetMachine *TM)
unsigned getPointerAddressSpace(const Type *T)
std::string getOclOrSpirvBuiltinDemangledName(StringRef Name)
bool isTypedPointerTy(const Type *T)
Type * getTypedPointerWrapper(Type *ElemTy, unsigned AS)
bool isPointerTy(const Type *T)
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
@ Ref
The access may reference the value stored in memory.
DWARFExpression::Operation Op
Type * getPointeeTypeByAttr(Argument *Arg)
bool hasPointeeTypeAttr(Argument *Arg)
bool isEquivalentTypes(Type *Ty1, Type *Ty2)
Type * getPointeeType(Type *Ty)
void addStringImm(const StringRef &Str, MCInst &Inst)
bool isUntypedPointerTy(const Type *T)
SPIRV::MemorySemantics::MemorySemantics getMemSemantics(AtomicOrdering Ord)