24#include "llvm/IR/IntrinsicsSPIRV.h"
31#include <unordered_set>
54#define GET_BuiltinGroup_DECL
55#include "SPIRVGenTables.inc"
60class SPIRVEmitIntrinsics
62 public InstVisitor<SPIRVEmitIntrinsics, Instruction *> {
63 SPIRVTargetMachine *TM =
nullptr;
64 SPIRVGlobalRegistry *GR =
nullptr;
66 bool TrackConstants =
true;
67 bool HaveFunPtrs =
false;
68 DenseMap<Instruction *, Constant *> AggrConsts;
69 DenseMap<Instruction *, Type *> AggrConstTypes;
70 DenseSet<Instruction *> AggrStores;
71 std::unordered_set<Value *> Named;
74 DenseMap<Function *, SmallVector<std::pair<unsigned, Type *>>> FDeclPtrTys;
77 bool CanTodoType =
true;
78 unsigned TodoTypeSz = 0;
79 DenseMap<Value *, bool> TodoType;
80 void insertTodoType(
Value *
Op) {
83 auto It = TodoType.try_emplace(
Op,
true);
89 auto It = TodoType.find(
Op);
90 if (It != TodoType.end() && It->second) {
98 auto It = TodoType.find(
Op);
99 return It != TodoType.end() && It->second;
103 std::unordered_set<Instruction *> TypeValidated;
106 enum WellKnownTypes { Event };
109 Type *deduceElementType(
Value *
I,
bool UnknownElemTypeI8);
110 Type *deduceElementTypeHelper(
Value *
I,
bool UnknownElemTypeI8);
111 Type *deduceElementTypeHelper(
Value *
I, std::unordered_set<Value *> &Visited,
112 bool UnknownElemTypeI8,
113 bool IgnoreKnownType =
false);
114 Type *deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
115 bool UnknownElemTypeI8);
116 Type *deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
117 std::unordered_set<Value *> &Visited,
118 bool UnknownElemTypeI8);
120 std::unordered_set<Value *> &Visited,
121 bool UnknownElemTypeI8);
123 bool UnknownElemTypeI8);
126 Type *deduceNestedTypeHelper(
User *
U,
bool UnknownElemTypeI8);
128 std::unordered_set<Value *> &Visited,
129 bool UnknownElemTypeI8);
133 SmallPtrSet<Instruction *, 4> *IncompleteRets,
134 const SmallPtrSet<Value *, 4> *AskOps =
nullptr,
135 bool IsPostprocessing =
false);
140 Type *reconstructType(
Value *
Op,
bool UnknownElemTypeI8,
141 bool IsPostprocessing);
146 bool UnknownElemTypeI8);
148 void insertAssignPtrTypeTargetExt(TargetExtType *AssignedType,
Value *
V,
151 Type *ExpectedElementType,
152 unsigned OperandToReplace,
155 bool shouldTryToAddMemAliasingDecoration(
Instruction *Inst);
157 void insertConstantsForFPFastMathDefault(
Module &
M);
158 void processGlobalValue(GlobalVariable &GV,
IRBuilder<> &
B);
163 std::unordered_set<Function *> &FVisited);
165 bool deduceOperandElementTypeCalledFunction(
167 Type *&KnownElemTy,
bool &Incomplete);
168 void deduceOperandElementTypeFunctionPointer(
170 Type *&KnownElemTy,
bool IsPostprocessing);
171 bool deduceOperandElementTypeFunctionRet(
172 Instruction *
I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
173 const SmallPtrSet<Value *, 4> *AskOps,
bool IsPostprocessing,
178 DenseMap<Function *, CallInst *> Ptrcasts);
180 DenseSet<std::pair<Value *, Value *>> &VisitedSubst);
183 DenseSet<std::pair<Value *, Value *>> &VisitedSubst);
184 void propagateElemTypeRec(
Value *
Op,
Type *PtrElemTy,
Type *CastElemTy,
185 DenseSet<std::pair<Value *, Value *>> &VisitedSubst,
186 std::unordered_set<Value *> &Visited,
187 DenseMap<Function *, CallInst *> Ptrcasts);
195 GetElementPtrInst *simplifyZeroLengthArrayGepInst(GetElementPtrInst *
GEP);
198 bool postprocessTypes(
Module &
M);
199 bool processFunctionPointers(
Module &
M);
200 void parseFunDeclarations(
Module &
M);
202 void useRoundingMode(ConstrainedFPIntrinsic *FPI,
IRBuilder<> &
B);
218 bool walkLogicalAccessChain(
219 GetElementPtrInst &
GEP,
220 const std::function<
void(
Type *PointedType, uint64_t
Index)>
229 Type *getGEPType(GetElementPtrInst *
GEP);
236 Type *getGEPTypeLogical(GetElementPtrInst *
GEP);
238 Instruction *buildLogicalAccessChainFromGEP(GetElementPtrInst &
GEP);
242 SPIRVEmitIntrinsics(SPIRVTargetMachine *TM =
nullptr)
243 : ModulePass(ID), TM(TM) {}
246 Instruction *visitGetElementPtrInst(GetElementPtrInst &
I);
248 Instruction *visitInsertElementInst(InsertElementInst &
I);
249 Instruction *visitExtractElementInst(ExtractElementInst &
I);
251 Instruction *visitExtractValueInst(ExtractValueInst &
I);
255 Instruction *visitAtomicCmpXchgInst(AtomicCmpXchgInst &
I);
259 StringRef getPassName()
const override {
return "SPIRV emit intrinsics"; }
261 bool runOnModule(
Module &
M)
override;
263 void getAnalysisUsage(AnalysisUsage &AU)
const override {
264 ModulePass::getAnalysisUsage(AU);
273 return II->getIntrinsicID() == Intrinsic::experimental_convergence_entry ||
274 II->getIntrinsicID() == Intrinsic::experimental_convergence_loop ||
275 II->getIntrinsicID() == Intrinsic::experimental_convergence_anchor;
278bool expectIgnoredInIRTranslation(
const Instruction *
I) {
282 switch (
II->getIntrinsicID()) {
283 case Intrinsic::invariant_start:
284 case Intrinsic::spv_resource_handlefrombinding:
285 case Intrinsic::spv_resource_getpointer:
295 if (
II->getIntrinsicID() == Intrinsic::spv_ptrcast) {
296 Value *V =
II->getArgOperand(0);
297 return getPointerRoot(V);
305char SPIRVEmitIntrinsics::ID = 0;
328 B.SetInsertPoint(
I->getParent()->getFirstNonPHIOrDbgOrAlloca());
334 B.SetCurrentDebugLocation(
I->getDebugLoc());
335 if (
I->getType()->isVoidTy())
336 B.SetInsertPoint(
I->getNextNode());
338 B.SetInsertPoint(*
I->getInsertionPointAfterDef());
343 switch (Intr->getIntrinsicID()) {
344 case Intrinsic::invariant_start:
345 case Intrinsic::invariant_end:
353 if (
I->getType()->isTokenTy())
355 "does not support token type",
360 if (!
I->hasName() ||
I->getType()->isAggregateType() ||
361 expectIgnoredInIRTranslation(
I))
366 std::vector<Value *> Args = {
369 B.CreateIntrinsic(Intrinsic::spv_assign_name, {
I->getType()}, Args);
372void SPIRVEmitIntrinsics::replaceAllUsesWith(
Value *Src,
Value *Dest,
376 if (isTodoType(Src)) {
379 insertTodoType(Dest);
383void SPIRVEmitIntrinsics::replaceAllUsesWithAndErase(
IRBuilder<> &
B,
388 std::string
Name = Src->hasName() ? Src->getName().str() :
"";
389 Src->eraseFromParent();
392 if (Named.insert(Dest).second)
417Type *SPIRVEmitIntrinsics::reconstructType(
Value *
Op,
bool UnknownElemTypeI8,
418 bool IsPostprocessing) {
433 if (UnknownElemTypeI8) {
434 if (!IsPostprocessing)
442CallInst *SPIRVEmitIntrinsics::buildSpvPtrcast(Function *
F,
Value *
Op,
450 B.SetInsertPointPastAllocas(OpA->getParent());
453 B.SetInsertPoint(
F->getEntryBlock().getFirstNonPHIOrDbgOrAlloca());
455 Type *OpTy =
Op->getType();
459 CallInst *PtrCasted =
460 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
465void SPIRVEmitIntrinsics::replaceUsesOfWithSpvPtrcast(
467 DenseMap<Function *, CallInst *> Ptrcasts) {
469 CallInst *PtrCastedI =
nullptr;
470 auto It = Ptrcasts.
find(
F);
471 if (It == Ptrcasts.
end()) {
472 PtrCastedI = buildSpvPtrcast(
F,
Op, ElemTy);
473 Ptrcasts[
F] = PtrCastedI;
475 PtrCastedI = It->second;
477 I->replaceUsesOfWith(
Op, PtrCastedI);
480void SPIRVEmitIntrinsics::propagateElemType(
482 DenseSet<std::pair<Value *, Value *>> &VisitedSubst) {
483 DenseMap<Function *, CallInst *> Ptrcasts;
485 for (
auto *U :
Users) {
488 if (!VisitedSubst.insert(std::make_pair(U,
Op)).second)
494 TypeValidated.find(UI) != TypeValidated.end())
495 replaceUsesOfWithSpvPtrcast(
Op, ElemTy, UI, Ptrcasts);
499void SPIRVEmitIntrinsics::propagateElemTypeRec(
501 DenseSet<std::pair<Value *, Value *>> &VisitedSubst) {
502 std::unordered_set<Value *> Visited;
503 DenseMap<Function *, CallInst *> Ptrcasts;
504 propagateElemTypeRec(
Op, PtrElemTy, CastElemTy, VisitedSubst, Visited,
505 std::move(Ptrcasts));
508void SPIRVEmitIntrinsics::propagateElemTypeRec(
510 DenseSet<std::pair<Value *, Value *>> &VisitedSubst,
511 std::unordered_set<Value *> &Visited,
512 DenseMap<Function *, CallInst *> Ptrcasts) {
513 if (!Visited.insert(
Op).second)
516 for (
auto *U :
Users) {
519 if (!VisitedSubst.insert(std::make_pair(U,
Op)).second)
525 TypeValidated.find(UI) != TypeValidated.end())
526 replaceUsesOfWithSpvPtrcast(
Op, CastElemTy, UI, Ptrcasts);
534SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
535 bool UnknownElemTypeI8) {
536 std::unordered_set<Value *> Visited;
537 return deduceElementTypeByValueDeep(ValueTy, Operand, Visited,
541Type *SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
542 Type *ValueTy,
Value *Operand, std::unordered_set<Value *> &Visited,
543 bool UnknownElemTypeI8) {
548 deduceElementTypeHelper(Operand, Visited, UnknownElemTypeI8))
559Type *SPIRVEmitIntrinsics::deduceElementTypeByUsersDeep(
560 Value *
Op, std::unordered_set<Value *> &Visited,
bool UnknownElemTypeI8) {
572 for (User *OpU :
Op->users()) {
574 if (
Type *Ty = deduceElementTypeHelper(Inst, Visited, UnknownElemTypeI8))
587 if ((DemangledName.
starts_with(
"__spirv_ocl_printf(") ||
596Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
Value *
I,
597 bool UnknownElemTypeI8) {
598 std::unordered_set<Value *> Visited;
599 return deduceElementTypeHelper(
I, Visited, UnknownElemTypeI8);
602void SPIRVEmitIntrinsics::maybeAssignPtrType(
Type *&Ty,
Value *
Op,
Type *RefTy,
603 bool UnknownElemTypeI8) {
605 if (!UnknownElemTypeI8)
612bool SPIRVEmitIntrinsics::walkLogicalAccessChain(
613 GetElementPtrInst &
GEP,
614 const std::function<
void(
Type *, uint64_t)> &OnLiteralIndexing,
615 const std::function<
void(
Type *,
Value *)> &OnDynamicIndexing) {
623 Value *Src = getPointerRoot(
GEP.getPointerOperand());
624 Type *CurType = deduceElementType(Src,
true);
633 OnDynamicIndexing(AT->getElementType(), Operand);
634 return AT ==
nullptr;
642 uint32_t EltTypeSize =
DL.getTypeSizeInBits(AT->getElementType()) / 8;
646 CurType = AT->getElementType();
647 OnLiteralIndexing(CurType, Index);
649 uint32_t StructSize =
DL.getTypeSizeInBits(ST) / 8;
652 const auto &STL =
DL.getStructLayout(ST);
653 unsigned Element = STL->getElementContainingOffset(
Offset);
654 Offset -= STL->getElementOffset(Element);
655 CurType =
ST->getElementType(Element);
656 OnLiteralIndexing(CurType, Element);
668SPIRVEmitIntrinsics::buildLogicalAccessChainFromGEP(GetElementPtrInst &
GEP) {
671 B.SetInsertPoint(&
GEP);
673 std::vector<Value *> Indices;
674 Indices.push_back(ConstantInt::get(
675 IntegerType::getInt32Ty(CurrF->
getContext()), 0,
false));
676 walkLogicalAccessChain(
678 [&Indices, &
B](
Type *EltType, uint64_t Index) {
680 ConstantInt::get(
B.getInt64Ty(), Index,
false));
683 uint32_t EltTypeSize =
DL.getTypeSizeInBits(EltType) / 8;
685 Offset, ConstantInt::get(
Offset->getType(), EltTypeSize,
687 Indices.push_back(Index);
692 Args.push_back(
B.getInt1(
GEP.isInBounds()));
693 Args.push_back(
GEP.getOperand(0));
695 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_gep, {
Types}, {
Args});
696 replaceAllUsesWithAndErase(
B, &
GEP, NewI);
700Type *SPIRVEmitIntrinsics::getGEPTypeLogical(GetElementPtrInst *
GEP) {
702 Type *CurType =
GEP->getResultElementType();
704 bool Interrupted = walkLogicalAccessChain(
705 *
GEP, [&CurType](
Type *EltType, uint64_t Index) { CurType = EltType; },
708 return Interrupted ?
GEP->getResultElementType() : CurType;
711Type *SPIRVEmitIntrinsics::getGEPType(GetElementPtrInst *
Ref) {
712 if (
Ref->getSourceElementType() ==
713 IntegerType::getInt8Ty(CurrF->
getContext()) &&
715 return getGEPTypeLogical(
Ref);
722 Ty =
Ref->getSourceElementType();
726 Ty =
Ref->getResultElementType();
731Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
732 Value *
I, std::unordered_set<Value *> &Visited,
bool UnknownElemTypeI8,
733 bool IgnoreKnownType) {
739 if (!IgnoreKnownType)
744 if (!Visited.insert(
I).second)
751 maybeAssignPtrType(Ty,
I,
Ref->getAllocatedType(), UnknownElemTypeI8);
753 Ty = getGEPType(
Ref);
758 KnownTy =
Op->getType();
760 maybeAssignPtrType(Ty,
I, ElemTy, UnknownElemTypeI8);
762 Ty = deduceElementTypeByValueDeep(
764 Ref->getNumOperands() > 0 ?
Ref->getOperand(0) :
nullptr, Visited,
767 Type *RefTy = deduceElementTypeHelper(
Ref->getPointerOperand(), Visited,
769 maybeAssignPtrType(Ty,
I, RefTy, UnknownElemTypeI8);
771 maybeAssignPtrType(Ty,
I,
Ref->getDestTy(), UnknownElemTypeI8);
773 if (
Type *Src =
Ref->getSrcTy(), *Dest =
Ref->getDestTy();
775 Ty = deduceElementTypeHelper(
Ref->getOperand(0), Visited,
780 Ty = deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8);
784 Ty = deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8);
786 Type *BestTy =
nullptr;
788 DenseMap<Type *, unsigned> PhiTys;
789 for (
int i =
Ref->getNumIncomingValues() - 1; i >= 0; --i) {
790 Ty = deduceElementTypeByUsersDeep(
Ref->getIncomingValue(i), Visited,
797 if (It.first->second > MaxN) {
798 MaxN = It.first->second;
806 for (
Value *
Op : {
Ref->getTrueValue(),
Ref->getFalseValue()}) {
807 Ty = deduceElementTypeByUsersDeep(
Op, Visited, UnknownElemTypeI8);
812 static StringMap<unsigned> ResTypeByArg = {
816 {
"__spirv_GenericCastToPtr_ToGlobal", 0},
817 {
"__spirv_GenericCastToPtr_ToLocal", 0},
818 {
"__spirv_GenericCastToPtr_ToPrivate", 0},
819 {
"__spirv_GenericCastToPtrExplicit_ToGlobal", 0},
820 {
"__spirv_GenericCastToPtrExplicit_ToLocal", 0},
821 {
"__spirv_GenericCastToPtrExplicit_ToPrivate", 0}};
825 if (
II &&
II->getIntrinsicID() == Intrinsic::spv_resource_getpointer) {
827 if (HandleType->getTargetExtName() ==
"spirv.Image" ||
828 HandleType->getTargetExtName() ==
"spirv.SignedImage") {
829 for (User *U :
II->users()) {
834 }
else if (HandleType->getTargetExtName() ==
"spirv.VulkanBuffer") {
836 Ty = HandleType->getTypeParameter(0);
847 }
else if (
II &&
II->getIntrinsicID() ==
848 Intrinsic::spv_generic_cast_to_ptr_explicit) {
849 Ty = deduceElementTypeHelper(CI->getArgOperand(0), Visited,
851 }
else if (Function *CalledF = CI->getCalledFunction()) {
852 std::string DemangledName =
854 if (DemangledName.length() > 0)
855 DemangledName = SPIRV::lookupBuiltinNameHelper(DemangledName);
856 auto AsArgIt = ResTypeByArg.
find(DemangledName);
857 if (AsArgIt != ResTypeByArg.
end())
858 Ty = deduceElementTypeHelper(CI->getArgOperand(AsArgIt->second),
859 Visited, UnknownElemTypeI8);
866 if (Ty && !IgnoreKnownType) {
877Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(User *U,
878 bool UnknownElemTypeI8) {
879 std::unordered_set<Value *> Visited;
880 return deduceNestedTypeHelper(U,
U->getType(), Visited, UnknownElemTypeI8);
883Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(
884 User *U,
Type *OrigTy, std::unordered_set<Value *> &Visited,
885 bool UnknownElemTypeI8) {
894 if (!Visited.insert(U).second)
900 for (
unsigned i = 0; i <
U->getNumOperands(); ++i) {
902 assert(
Op &&
"Operands should not be null.");
903 Type *OpTy =
Op->getType();
907 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
914 Change |= Ty != OpTy;
922 if (
Value *
Op =
U->getNumOperands() > 0 ?
U->getOperand(0) :
nullptr) {
923 Type *OpTy = ArrTy->getElementType();
927 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
934 Type *NewTy = ArrayType::get(Ty, ArrTy->getNumElements());
940 if (
Value *
Op =
U->getNumOperands() > 0 ?
U->getOperand(0) :
nullptr) {
941 Type *OpTy = VecTy->getElementType();
945 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
952 Type *NewTy = VectorType::get(Ty, VecTy->getElementCount());
962Type *SPIRVEmitIntrinsics::deduceElementType(
Value *
I,
bool UnknownElemTypeI8) {
963 if (
Type *Ty = deduceElementTypeHelper(
I, UnknownElemTypeI8))
965 if (!UnknownElemTypeI8)
968 return IntegerType::getInt8Ty(
I->getContext());
972 Value *PointerOperand) {
986bool SPIRVEmitIntrinsics::deduceOperandElementTypeCalledFunction(
988 Type *&KnownElemTy,
bool &Incomplete) {
992 std::string DemangledName =
994 if (DemangledName.length() > 0 &&
996 const SPIRVSubtarget &
ST = TM->
getSubtarget<SPIRVSubtarget>(*CalledF);
997 auto [Grp, Opcode, ExtNo] = SPIRV::mapBuiltinToOpcode(
998 DemangledName,
ST.getPreferredInstructionSet());
999 if (Opcode == SPIRV::OpGroupAsyncCopy) {
1000 for (
unsigned i = 0, PtrCnt = 0; i < CI->
arg_size() && PtrCnt < 2; ++i) {
1006 KnownElemTy = ElemTy;
1007 Ops.push_back(std::make_pair(
Op, i));
1009 }
else if (Grp == SPIRV::Atomic || Grp == SPIRV::AtomicFloating) {
1016 case SPIRV::OpAtomicFAddEXT:
1017 case SPIRV::OpAtomicFMinEXT:
1018 case SPIRV::OpAtomicFMaxEXT:
1019 case SPIRV::OpAtomicLoad:
1020 case SPIRV::OpAtomicCompareExchangeWeak:
1021 case SPIRV::OpAtomicCompareExchange:
1022 case SPIRV::OpAtomicExchange:
1023 case SPIRV::OpAtomicIAdd:
1024 case SPIRV::OpAtomicISub:
1025 case SPIRV::OpAtomicOr:
1026 case SPIRV::OpAtomicXor:
1027 case SPIRV::OpAtomicAnd:
1028 case SPIRV::OpAtomicUMin:
1029 case SPIRV::OpAtomicUMax:
1030 case SPIRV::OpAtomicSMin:
1031 case SPIRV::OpAtomicSMax: {
1036 Incomplete = isTodoType(
Op);
1037 Ops.push_back(std::make_pair(
Op, 0));
1039 case SPIRV::OpAtomicStore: {
1048 Incomplete = isTodoType(
Op);
1049 Ops.push_back(std::make_pair(
Op, 0));
1058void SPIRVEmitIntrinsics::deduceOperandElementTypeFunctionPointer(
1060 Type *&KnownElemTy,
bool IsPostprocessing) {
1064 Ops.push_back(std::make_pair(
Op, std::numeric_limits<unsigned>::max()));
1066 bool IsNewFTy =
false, IsIncomplete =
false;
1069 Type *ArgTy = Arg->getType();
1074 if (isTodoType(Arg))
1075 IsIncomplete =
true;
1077 IsIncomplete =
true;
1082 Type *RetTy = FTy->getReturnType();
1089 IsIncomplete =
true;
1091 IsIncomplete =
true;
1094 if (!IsPostprocessing && IsIncomplete)
1097 IsNewFTy ? FunctionType::get(RetTy, ArgTys, FTy->isVarArg()) : FTy;
1100bool SPIRVEmitIntrinsics::deduceOperandElementTypeFunctionRet(
1101 Instruction *
I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
1102 const SmallPtrSet<Value *, 4> *AskOps,
bool IsPostprocessing,
1114 DenseSet<std::pair<Value *, Value *>> VisitedSubst{std::make_pair(
I,
Op)};
1115 for (User *U :
F->users()) {
1123 propagateElemType(CI, PrevElemTy, VisitedSubst);
1133 for (Instruction *IncompleteRetI : *IncompleteRets)
1134 deduceOperandElementType(IncompleteRetI,
nullptr, AskOps,
1136 }
else if (IncompleteRets) {
1139 TypeValidated.insert(
I);
1147void SPIRVEmitIntrinsics::deduceOperandElementType(
1148 Instruction *
I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
1149 const SmallPtrSet<Value *, 4> *AskOps,
bool IsPostprocessing) {
1151 Type *KnownElemTy =
nullptr;
1152 bool Incomplete =
false;
1158 Incomplete = isTodoType(
I);
1159 for (
unsigned i = 0; i <
Ref->getNumIncomingValues(); i++) {
1162 Ops.push_back(std::make_pair(
Op, i));
1168 Incomplete = isTodoType(
I);
1169 Ops.push_back(std::make_pair(
Ref->getPointerOperand(), 0));
1176 Incomplete = isTodoType(
I);
1177 Ops.push_back(std::make_pair(
Ref->getOperand(0), 0));
1181 KnownElemTy =
Ref->getSourceElementType();
1182 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1185 KnownElemTy =
I->getType();
1191 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1195 reconstructType(
Ref->getValueOperand(),
false, IsPostprocessing)))
1200 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1208 Incomplete = isTodoType(
Ref->getPointerOperand());
1209 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1217 Incomplete = isTodoType(
Ref->getPointerOperand());
1218 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1224 Incomplete = isTodoType(
I);
1225 for (
unsigned i = 0; i <
Ref->getNumOperands(); i++) {
1228 Ops.push_back(std::make_pair(
Op, i));
1236 if (deduceOperandElementTypeFunctionRet(
I, IncompleteRets, AskOps,
1237 IsPostprocessing, KnownElemTy,
Op,
1240 Incomplete = isTodoType(CurrF);
1241 Ops.push_back(std::make_pair(
Op, 0));
1247 bool Incomplete0 = isTodoType(Op0);
1248 bool Incomplete1 = isTodoType(Op1);
1250 Type *ElemTy0 = (Incomplete0 && !Incomplete1 && ElemTy1)
1252 : GR->findDeducedElementType(Op0);
1254 KnownElemTy = ElemTy0;
1255 Incomplete = Incomplete0;
1256 Ops.push_back(std::make_pair(Op1, 1));
1257 }
else if (ElemTy1) {
1258 KnownElemTy = ElemTy1;
1259 Incomplete = Incomplete1;
1260 Ops.push_back(std::make_pair(Op0, 0));
1264 deduceOperandElementTypeCalledFunction(CI,
Ops, KnownElemTy, Incomplete);
1265 else if (HaveFunPtrs)
1266 deduceOperandElementTypeFunctionPointer(CI,
Ops, KnownElemTy,
1271 if (!KnownElemTy ||
Ops.size() == 0)
1276 for (
auto &OpIt :
Ops) {
1280 Type *AskTy =
nullptr;
1281 CallInst *AskCI =
nullptr;
1282 if (IsPostprocessing && AskOps) {
1288 if (Ty == KnownElemTy)
1291 Type *OpTy =
Op->getType();
1292 if (
Op->hasUseList() &&
1299 else if (!IsPostprocessing)
1303 if (AssignCI ==
nullptr) {
1312 DenseSet<std::pair<Value *, Value *>> VisitedSubst{
1313 std::make_pair(
I,
Op)};
1314 propagateElemTypeRec(
Op, KnownElemTy, PrevElemTy, VisitedSubst);
1318 CallInst *PtrCastI =
1319 buildSpvPtrcast(
I->getParent()->getParent(),
Op, KnownElemTy);
1320 if (OpIt.second == std::numeric_limits<unsigned>::max())
1323 I->setOperand(OpIt.second, PtrCastI);
1326 TypeValidated.insert(
I);
1329void SPIRVEmitIntrinsics::replaceMemInstrUses(Instruction *Old,
1334 if (isAssignTypeInstr(U)) {
1335 B.SetInsertPoint(U);
1336 SmallVector<Value *, 2>
Args = {
New,
U->getOperand(1)};
1337 CallInst *AssignCI =
1338 B.CreateIntrinsic(Intrinsic::spv_assign_type, {
New->getType()},
Args);
1340 U->eraseFromParent();
1343 U->replaceUsesOfWith(Old, New);
1348 New->copyMetadata(*Old);
1352void SPIRVEmitIntrinsics::preprocessUndefs(
IRBuilder<> &
B) {
1353 std::queue<Instruction *> Worklist;
1357 while (!Worklist.empty()) {
1359 bool BPrepared =
false;
1362 for (
auto &
Op :
I->operands()) {
1364 if (!AggrUndef || !
Op->getType()->isAggregateType())
1371 auto *IntrUndef =
B.CreateIntrinsic(Intrinsic::spv_undef, {});
1372 Worklist.push(IntrUndef);
1373 I->replaceUsesOfWith(
Op, IntrUndef);
1374 AggrConsts[IntrUndef] = AggrUndef;
1375 AggrConstTypes[IntrUndef] = AggrUndef->getType();
1380void SPIRVEmitIntrinsics::preprocessCompositeConstants(
IRBuilder<> &
B) {
1381 std::queue<Instruction *> Worklist;
1385 while (!Worklist.empty()) {
1386 auto *
I = Worklist.front();
1389 bool KeepInst =
false;
1390 for (
const auto &
Op :
I->operands()) {
1392 Type *ResTy =
nullptr;
1395 ResTy = COp->getType();
1407 ResTy =
Op->getType()->isVectorTy() ? COp->getType() :
B.getInt32Ty();
1412 for (
unsigned i = 0; i < COp->getNumElements(); ++i)
1413 Args.push_back(COp->getElementAsConstant(i));
1417 IsPhi ?
B.SetInsertPointPastAllocas(
I->getParent()->getParent())
1418 :
B.SetInsertPoint(
I);
1422 B.CreateIntrinsic(Intrinsic::spv_const_composite, {ResTy}, {
Args});
1426 AggrConsts[CI] = AggrConst;
1427 AggrConstTypes[CI] = deduceNestedTypeHelper(AggrConst,
false);
1439 B.CreateIntrinsic(Intrinsic::spv_assign_decoration, {
I->getType()},
1444 unsigned RoundingModeDeco,
1451 ConstantInt::get(
Int32Ty, SPIRV::Decoration::FPRoundingMode)),
1460 MDNode *SaturatedConversionNode =
1462 Int32Ty, SPIRV::Decoration::SaturatedConversion))});
1469 if (Fu->isIntrinsic()) {
1470 unsigned const int IntrinsicId = Fu->getIntrinsicID();
1471 switch (IntrinsicId) {
1472 case Intrinsic::fptosi_sat:
1473 case Intrinsic::fptoui_sat:
1492 MDString *ConstraintString =
MDString::get(Ctx,
IA->getConstraintString());
1500 B.SetInsertPoint(&
Call);
1501 B.CreateIntrinsic(Intrinsic::spv_inline_asm, {
Args});
1506void SPIRVEmitIntrinsics::useRoundingMode(ConstrainedFPIntrinsic *FPI,
1509 if (!
RM.has_value())
1511 unsigned RoundingModeDeco = std::numeric_limits<unsigned>::max();
1512 switch (
RM.value()) {
1516 case RoundingMode::NearestTiesToEven:
1517 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTE;
1519 case RoundingMode::TowardNegative:
1520 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTN;
1522 case RoundingMode::TowardPositive:
1523 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTP;
1525 case RoundingMode::TowardZero:
1526 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTZ;
1528 case RoundingMode::Dynamic:
1529 case RoundingMode::NearestTiesToAway:
1533 if (RoundingModeDeco == std::numeric_limits<unsigned>::max())
1539Instruction *SPIRVEmitIntrinsics::visitSwitchInst(SwitchInst &
I) {
1542 B.SetInsertPoint(&
I);
1545 for (
auto &
Op :
I.operands()) {
1546 if (
Op.get()->getType()->isSized()) {
1555 CallInst *NewI =
B.CreateIntrinsic(Intrinsic::spv_switch,
1556 {
I.getOperand(0)->getType()}, {
Args});
1560 I.eraseFromParent();
1563 B.SetInsertPoint(ParentBB);
1564 IndirectBrInst *BrI =
B.CreateIndirectBr(
1567 for (BasicBlock *BBCase : BBCases)
1572Instruction *SPIRVEmitIntrinsics::visitGetElementPtrInst(GetElementPtrInst &
I) {
1573 if (
I.getSourceElementType() == IntegerType::getInt8Ty(CurrF->
getContext()) &&
1581 B.SetInsertPoint(&
I);
1584 Args.push_back(
B.getInt1(
I.isInBounds()));
1586 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_gep, {
Types}, {
Args});
1587 replaceAllUsesWithAndErase(
B, &
I, NewI);
1591Instruction *SPIRVEmitIntrinsics::visitBitCastInst(BitCastInst &
I) {
1593 B.SetInsertPoint(&
I);
1602 I.eraseFromParent();
1608 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_bitcast, {
Types}, {
Args});
1609 replaceAllUsesWithAndErase(
B, &
I, NewI);
1613void SPIRVEmitIntrinsics::insertAssignPtrTypeTargetExt(
1615 Type *VTy =
V->getType();
1620 if (ElemTy != AssignedType)
1633 if (CurrentType == AssignedType)
1640 " for value " +
V->getName(),
1648void SPIRVEmitIntrinsics::replacePointerOperandWithPtrCast(
1649 Instruction *
I,
Value *Pointer,
Type *ExpectedElementType,
1651 TypeValidated.insert(
I);
1654 Type *PointerElemTy = deduceElementTypeHelper(Pointer,
false);
1655 if (PointerElemTy == ExpectedElementType ||
1661 MetadataAsValue *VMD =
buildMD(ExpectedElementVal);
1663 bool FirstPtrCastOrAssignPtrType =
true;
1669 for (
auto User :
Pointer->users()) {
1672 (
II->getIntrinsicID() != Intrinsic::spv_assign_ptr_type &&
1673 II->getIntrinsicID() != Intrinsic::spv_ptrcast) ||
1674 II->getOperand(0) != Pointer)
1679 FirstPtrCastOrAssignPtrType =
false;
1680 if (
II->getOperand(1) != VMD ||
1687 if (
II->getIntrinsicID() != Intrinsic::spv_ptrcast)
1692 if (
II->getParent() !=
I->getParent())
1695 I->setOperand(OperandToReplace,
II);
1701 if (FirstPtrCastOrAssignPtrType) {
1706 }
else if (isTodoType(Pointer)) {
1707 eraseTodoType(Pointer);
1714 DenseSet<std::pair<Value *, Value *>> VisitedSubst{
1715 std::make_pair(
I, Pointer)};
1717 propagateElemType(Pointer, PrevElemTy, VisitedSubst);
1729 auto *PtrCastI =
B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
1735void SPIRVEmitIntrinsics::insertPtrCastOrAssignTypeInstr(Instruction *
I,
1740 replacePointerOperandWithPtrCast(
1741 I,
SI->getValueOperand(), IntegerType::getInt8Ty(CurrF->
getContext()),
1747 Type *OpTy =
Op->getType();
1750 if (OpTy ==
Op->getType())
1751 OpTy = deduceElementTypeByValueDeep(OpTy,
Op,
false);
1752 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 1,
B);
1757 Type *OpTy = LI->getType();
1762 Type *NewOpTy = OpTy;
1763 OpTy = deduceElementTypeByValueDeep(OpTy, LI,
false);
1764 if (OpTy == NewOpTy)
1765 insertTodoType(Pointer);
1768 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 0,
B);
1773 Type *OpTy =
nullptr;
1780 bool IsRewrittenGEP =
1781 GEPI->getSourceElementType() == IntegerType::getInt8Ty(
I->getContext());
1783 Value *Src = getPointerRoot(Pointer);
1789 OpTy = GEPI->getSourceElementType();
1791 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 0,
B);
1793 insertTodoType(Pointer);
1805 std::string DemangledName =
1809 bool HaveTypes =
false;
1827 for (User *U : CalledArg->
users()) {
1829 if ((ElemTy = deduceElementTypeHelper(Inst,
false)) !=
nullptr)
1835 HaveTypes |= ElemTy !=
nullptr;
1840 if (DemangledName.empty() && !HaveTypes)
1858 Type *ExpectedType =
1860 if (!ExpectedType && !DemangledName.empty())
1861 ExpectedType = SPIRV::parseBuiltinCallArgumentBaseType(
1862 DemangledName,
OpIdx,
I->getContext());
1863 if (!ExpectedType || ExpectedType->
isVoidTy())
1871 replacePointerOperandWithPtrCast(CI, ArgOperand, ExpectedType,
OpIdx,
B);
1875Instruction *SPIRVEmitIntrinsics::visitInsertElementInst(InsertElementInst &
I) {
1882 I.getOperand(1)->getType(),
1883 I.getOperand(2)->getType()};
1885 B.SetInsertPoint(&
I);
1887 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_insertelt, {
Types}, {
Args});
1888 replaceAllUsesWithAndErase(
B, &
I, NewI);
1893SPIRVEmitIntrinsics::visitExtractElementInst(ExtractElementInst &
I) {
1900 B.SetInsertPoint(&
I);
1902 I.getIndexOperand()->getType()};
1903 SmallVector<Value *, 2>
Args = {
I.getVectorOperand(),
I.getIndexOperand()};
1904 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_extractelt, {
Types}, {
Args});
1905 replaceAllUsesWithAndErase(
B, &
I, NewI);
1909Instruction *SPIRVEmitIntrinsics::visitInsertValueInst(InsertValueInst &
I) {
1911 B.SetInsertPoint(&
I);
1914 Value *AggregateOp =
I.getAggregateOperand();
1918 Args.push_back(AggregateOp);
1919 Args.push_back(
I.getInsertedValueOperand());
1920 for (
auto &
Op :
I.indices())
1921 Args.push_back(
B.getInt32(
Op));
1923 B.CreateIntrinsic(Intrinsic::spv_insertv, {
Types}, {
Args});
1924 replaceMemInstrUses(&
I, NewI,
B);
1928Instruction *SPIRVEmitIntrinsics::visitExtractValueInst(ExtractValueInst &
I) {
1929 if (
I.getAggregateOperand()->getType()->isAggregateType())
1932 B.SetInsertPoint(&
I);
1934 for (
auto &
Op :
I.indices())
1935 Args.push_back(
B.getInt32(
Op));
1937 B.CreateIntrinsic(Intrinsic::spv_extractv, {
I.getType()}, {
Args});
1938 replaceAllUsesWithAndErase(
B, &
I, NewI);
1942Instruction *SPIRVEmitIntrinsics::visitLoadInst(LoadInst &
I) {
1943 if (!
I.getType()->isAggregateType())
1946 B.SetInsertPoint(&
I);
1947 TrackConstants =
false;
1952 B.CreateIntrinsic(Intrinsic::spv_load, {
I.getOperand(0)->getType()},
1953 {
I.getPointerOperand(),
B.getInt16(Flags),
1954 B.getInt8(
I.getAlign().value())});
1955 replaceMemInstrUses(&
I, NewI,
B);
1959Instruction *SPIRVEmitIntrinsics::visitStoreInst(StoreInst &
I) {
1963 B.SetInsertPoint(&
I);
1964 TrackConstants =
false;
1968 auto *PtrOp =
I.getPointerOperand();
1969 auto *NewI =
B.CreateIntrinsic(
1970 Intrinsic::spv_store, {
I.getValueOperand()->getType(), PtrOp->getType()},
1971 {
I.getValueOperand(), PtrOp,
B.getInt16(Flags),
1972 B.getInt8(
I.getAlign().value())});
1974 I.eraseFromParent();
1978Instruction *SPIRVEmitIntrinsics::visitAllocaInst(AllocaInst &
I) {
1979 Value *ArraySize =
nullptr;
1980 if (
I.isArrayAllocation()) {
1983 SPIRV::Extension::SPV_INTEL_variable_length_array))
1985 "array allocation: this instruction requires the following "
1986 "SPIR-V extension: SPV_INTEL_variable_length_array",
1988 ArraySize =
I.getArraySize();
1991 B.SetInsertPoint(&
I);
1992 TrackConstants =
false;
1993 Type *PtrTy =
I.getType();
1996 ?
B.CreateIntrinsic(Intrinsic::spv_alloca_array,
1997 {PtrTy, ArraySize->
getType()},
1998 {ArraySize,
B.getInt8(
I.getAlign().value())})
1999 :
B.CreateIntrinsic(
Intrinsic::spv_alloca, {PtrTy},
2000 {
B.getInt8(
I.getAlign().value())});
2001 replaceAllUsesWithAndErase(
B, &
I, NewI);
2005Instruction *SPIRVEmitIntrinsics::visitAtomicCmpXchgInst(AtomicCmpXchgInst &
I) {
2006 assert(
I.getType()->isAggregateType() &&
"Aggregate result is expected");
2008 B.SetInsertPoint(&
I);
2010 Args.push_back(
B.getInt32(
2011 static_cast<uint32_t
>(
getMemScope(
I.getContext(),
I.getSyncScopeID()))));
2012 Args.push_back(
B.getInt32(
2014 Args.push_back(
B.getInt32(
2016 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_cmpxchg,
2017 {
I.getPointerOperand()->getType()}, {
Args});
2018 replaceMemInstrUses(&
I, NewI,
B);
2022Instruction *SPIRVEmitIntrinsics::visitUnreachableInst(UnreachableInst &
I) {
2024 B.SetInsertPoint(&
I);
2025 B.CreateIntrinsic(Intrinsic::spv_unreachable, {});
2029void SPIRVEmitIntrinsics::processGlobalValue(GlobalVariable &GV,
2032 static const StringSet<> ArtificialGlobals{
"llvm.global.annotations",
2033 "llvm.compiler.used"};
2043 deduceElementTypeHelper(&GV,
false);
2047 auto *InitInst =
B.CreateIntrinsic(Intrinsic::spv_init_global,
2049 InitInst->setArgOperand(1, Init);
2052 B.CreateIntrinsic(Intrinsic::spv_unref_global, GV.
getType(), &GV);
2058bool SPIRVEmitIntrinsics::insertAssignPtrTypeIntrs(Instruction *
I,
2060 bool UnknownElemTypeI8) {
2066 if (
Type *ElemTy = deduceElementType(
I, UnknownElemTypeI8)) {
2073void SPIRVEmitIntrinsics::insertAssignTypeIntrs(Instruction *
I,
2076 static StringMap<unsigned> ResTypeWellKnown = {
2077 {
"async_work_group_copy", WellKnownTypes::Event},
2078 {
"async_work_group_strided_copy", WellKnownTypes::Event},
2079 {
"__spirv_GroupAsyncCopy", WellKnownTypes::Event}};
2083 bool IsKnown =
false;
2088 std::string DemangledName =
2091 if (DemangledName.length() > 0)
2093 SPIRV::lookupBuiltinNameHelper(DemangledName, &DecorationId);
2094 auto ResIt = ResTypeWellKnown.
find(DemangledName);
2095 if (ResIt != ResTypeWellKnown.
end()) {
2098 switch (ResIt->second) {
2099 case WellKnownTypes::Event:
2106 switch (DecorationId) {
2109 case FPDecorationId::SAT:
2112 case FPDecorationId::RTE:
2114 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTE,
B);
2116 case FPDecorationId::RTZ:
2118 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTZ,
B);
2120 case FPDecorationId::RTP:
2122 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTP,
B);
2124 case FPDecorationId::RTN:
2126 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTN,
B);
2132 Type *Ty =
I->getType();
2135 Type *TypeToAssign = Ty;
2137 if (
II->getIntrinsicID() == Intrinsic::spv_const_composite ||
2138 II->getIntrinsicID() == Intrinsic::spv_undef) {
2139 auto It = AggrConstTypes.find(
II);
2140 if (It == AggrConstTypes.end())
2142 TypeToAssign = It->second;
2148 for (
const auto &
Op :
I->operands()) {
2155 Type *OpTy =
Op->getType();
2157 CallInst *AssignCI =
2162 Type *OpTy =
Op->getType();
2177 CallInst *AssignCI =
2187bool SPIRVEmitIntrinsics::shouldTryToAddMemAliasingDecoration(
2188 Instruction *Inst) {
2190 if (!STI->
canUseExtension(SPIRV::Extension::SPV_INTEL_memory_access_aliasing))
2201 case Intrinsic::spv_load:
2202 case Intrinsic::spv_store:
2209 const std::string
Prefix =
"__spirv_Atomic";
2210 const bool IsAtomic =
Name.find(Prefix) == 0;
2218void SPIRVEmitIntrinsics::insertSpirvDecorations(Instruction *
I,
2220 if (MDNode *MD =
I->getMetadata(
"spirv.Decorations")) {
2222 B.CreateIntrinsic(Intrinsic::spv_assign_decoration, {
I->getType()},
2227 auto processMemAliasingDecoration = [&](
unsigned Kind) {
2228 if (MDNode *AliasListMD =
I->getMetadata(Kind)) {
2229 if (shouldTryToAddMemAliasingDecoration(
I)) {
2230 uint32_t Dec =
Kind == LLVMContext::MD_alias_scope
2231 ? SPIRV::Decoration::AliasScopeINTEL
2232 : SPIRV::Decoration::NoAliasINTEL;
2234 I, ConstantInt::get(
B.getInt32Ty(), Dec),
2237 B.CreateIntrinsic(Intrinsic::spv_assign_aliasing_decoration,
2238 {
I->getType()}, {
Args});
2242 processMemAliasingDecoration(LLVMContext::MD_alias_scope);
2243 processMemAliasingDecoration(LLVMContext::MD_noalias);
2246 if (MDNode *MD =
I->getMetadata(LLVMContext::MD_fpmath)) {
2248 bool AllowFPMaxError =
2250 if (!AllowFPMaxError)
2254 B.CreateIntrinsic(Intrinsic::spv_assign_fpmaxerror_decoration,
2263 &FPFastMathDefaultInfoMap,
2265 auto it = FPFastMathDefaultInfoMap.
find(
F);
2266 if (it != FPFastMathDefaultInfoMap.
end())
2274 SPIRV::FPFastMathMode::None);
2276 SPIRV::FPFastMathMode::None);
2278 SPIRV::FPFastMathMode::None);
2279 return FPFastMathDefaultInfoMap[
F] = std::move(FPFastMathDefaultInfoVec);
2285 size_t BitWidth = Ty->getScalarSizeInBits();
2289 assert(Index >= 0 && Index < 3 &&
2290 "Expected FPFastMathDefaultInfo for half, float, or double");
2291 assert(FPFastMathDefaultInfoVec.
size() == 3 &&
2292 "Expected FPFastMathDefaultInfoVec to have exactly 3 elements");
2293 return FPFastMathDefaultInfoVec[Index];
2296void SPIRVEmitIntrinsics::insertConstantsForFPFastMathDefault(
Module &M) {
2298 if (!
ST->canUseExtension(SPIRV::Extension::SPV_KHR_float_controls2))
2307 auto Node =
M.getNamedMetadata(
"spirv.ExecutionMode");
2309 if (!
M.getNamedMetadata(
"opencl.enable.FP_CONTRACT")) {
2317 ConstantInt::get(Type::getInt32Ty(
M.getContext()), 0);
2320 [[maybe_unused]] GlobalVariable *GV =
2321 new GlobalVariable(M,
2322 Type::getInt32Ty(
M.getContext()),
2336 DenseMap<Function *, SPIRV::FPFastMathDefaultInfoVector>
2337 FPFastMathDefaultInfoMap;
2339 for (
unsigned i = 0; i <
Node->getNumOperands(); i++) {
2348 if (EM == SPIRV::ExecutionMode::FPFastMathDefault) {
2350 "Expected 4 operands for FPFastMathDefault");
2356 SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec =
2358 SPIRV::FPFastMathDefaultInfo &
Info =
2361 Info.FPFastMathDefault =
true;
2362 }
else if (EM == SPIRV::ExecutionMode::ContractionOff) {
2364 "Expected no operands for ContractionOff");
2368 SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec =
2370 for (SPIRV::FPFastMathDefaultInfo &
Info : FPFastMathDefaultInfoVec) {
2371 Info.ContractionOff =
true;
2373 }
else if (EM == SPIRV::ExecutionMode::SignedZeroInfNanPreserve) {
2375 "Expected 1 operand for SignedZeroInfNanPreserve");
2376 unsigned TargetWidth =
2381 SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec =
2385 assert(Index >= 0 && Index < 3 &&
2386 "Expected FPFastMathDefaultInfo for half, float, or double");
2387 assert(FPFastMathDefaultInfoVec.
size() == 3 &&
2388 "Expected FPFastMathDefaultInfoVec to have exactly 3 elements");
2389 FPFastMathDefaultInfoVec[
Index].SignedZeroInfNanPreserve =
true;
2393 std::unordered_map<unsigned, GlobalVariable *> GlobalVars;
2394 for (
auto &[Func, FPFastMathDefaultInfoVec] : FPFastMathDefaultInfoMap) {
2395 if (FPFastMathDefaultInfoVec.
empty())
2398 for (
const SPIRV::FPFastMathDefaultInfo &
Info : FPFastMathDefaultInfoVec) {
2399 assert(
Info.Ty &&
"Expected target type for FPFastMathDefaultInfo");
2402 if (Flags == SPIRV::FPFastMathMode::None && !
Info.ContractionOff &&
2403 !
Info.SignedZeroInfNanPreserve && !
Info.FPFastMathDefault)
2407 if (
Info.ContractionOff && (Flags & SPIRV::FPFastMathMode::AllowContract))
2409 "and AllowContract");
2411 if (
Info.SignedZeroInfNanPreserve &&
2413 (SPIRV::FPFastMathMode::NotNaN | SPIRV::FPFastMathMode::NotInf |
2414 SPIRV::FPFastMathMode::NSZ))) {
2415 if (
Info.FPFastMathDefault)
2417 "SignedZeroInfNanPreserve but at least one of "
2418 "NotNaN/NotInf/NSZ is enabled.");
2421 if ((Flags & SPIRV::FPFastMathMode::AllowTransform) &&
2422 !((Flags & SPIRV::FPFastMathMode::AllowReassoc) &&
2423 (Flags & SPIRV::FPFastMathMode::AllowContract))) {
2425 "AllowTransform requires AllowReassoc and "
2426 "AllowContract to be set.");
2429 auto it = GlobalVars.find(Flags);
2430 GlobalVariable *GV =
nullptr;
2431 if (it != GlobalVars.end()) {
2437 ConstantInt::get(Type::getInt32Ty(
M.getContext()), Flags);
2440 GV =
new GlobalVariable(M,
2441 Type::getInt32Ty(
M.getContext()),
2446 GlobalVars[
Flags] = GV;
2452void SPIRVEmitIntrinsics::processInstrAfterVisit(Instruction *
I,
2455 bool IsConstComposite =
2456 II &&
II->getIntrinsicID() == Intrinsic::spv_const_composite;
2457 if (IsConstComposite && TrackConstants) {
2459 auto t = AggrConsts.find(
I);
2460 assert(t != AggrConsts.end());
2463 {
II->getType(),
II->getType()}, t->second,
I, {},
B);
2465 NewOp->setArgOperand(0,
I);
2468 for (
const auto &
Op :
I->operands()) {
2472 unsigned OpNo =
Op.getOperandNo();
2473 if (
II && ((
II->getIntrinsicID() == Intrinsic::spv_gep && OpNo == 0) ||
2474 (
II->paramHasAttr(OpNo, Attribute::ImmArg))))
2478 IsPhi ?
B.SetInsertPointPastAllocas(
I->getParent()->getParent())
2479 :
B.SetInsertPoint(
I);
2482 Type *OpTy =
Op->getType();
2490 {OpTy, OpTyVal->
getType()},
Op, OpTyVal, {},
B);
2492 if (!IsConstComposite &&
isPointerTy(OpTy) && OpElemTy !=
nullptr &&
2493 OpElemTy != IntegerType::getInt8Ty(
I->getContext())) {
2495 SmallVector<Value *, 2>
Args = {
2498 CallInst *PtrCasted =
2499 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
2504 I->setOperand(OpNo, NewOp);
2506 if (Named.insert(
I).second)
2510Type *SPIRVEmitIntrinsics::deduceFunParamElementType(Function *
F,
2512 std::unordered_set<Function *> FVisited;
2513 return deduceFunParamElementType(
F,
OpIdx, FVisited);
2516Type *SPIRVEmitIntrinsics::deduceFunParamElementType(
2517 Function *
F,
unsigned OpIdx, std::unordered_set<Function *> &FVisited) {
2519 if (!FVisited.insert(
F).second)
2522 std::unordered_set<Value *> Visited;
2525 for (User *U :
F->users()) {
2537 if (
Type *Ty = deduceElementTypeHelper(OpArg, Visited,
false))
2540 for (User *OpU : OpArg->
users()) {
2542 if (!Inst || Inst == CI)
2545 if (
Type *Ty = deduceElementTypeHelper(Inst, Visited,
false))
2552 if (FVisited.find(OuterF) != FVisited.end())
2554 for (
unsigned i = 0; i < OuterF->
arg_size(); ++i) {
2555 if (OuterF->
getArg(i) == OpArg) {
2556 Lookup.push_back(std::make_pair(OuterF, i));
2563 for (
auto &Pair :
Lookup) {
2564 if (
Type *Ty = deduceFunParamElementType(Pair.first, Pair.second, FVisited))
2571void SPIRVEmitIntrinsics::processParamTypesByFunHeader(Function *
F,
2573 B.SetInsertPointPastAllocas(
F);
2587 for (User *U :
F->users()) {
2603 for (User *U : Arg->
users()) {
2607 CI->
getParent()->getParent() == CurrF) {
2609 deduceOperandElementTypeFunctionPointer(CI,
Ops, ElemTy,
false);
2620void SPIRVEmitIntrinsics::processParamTypes(Function *
F,
IRBuilder<> &
B) {
2621 B.SetInsertPointPastAllocas(
F);
2627 if (!ElemTy && (ElemTy = deduceFunParamElementType(
F,
OpIdx)) !=
nullptr) {
2629 DenseSet<std::pair<Value *, Value *>> VisitedSubst;
2631 propagateElemType(Arg, IntegerType::getInt8Ty(
F->getContext()),
2643 bool IsNewFTy =
false;
2659bool SPIRVEmitIntrinsics::processFunctionPointers(
Module &M) {
2662 if (
F.isIntrinsic())
2664 if (
F.isDeclaration()) {
2665 for (User *U :
F.users()) {
2678 for (User *U :
F.users()) {
2680 if (!
II ||
II->arg_size() != 3 ||
II->getOperand(0) != &
F)
2682 if (
II->getIntrinsicID() == Intrinsic::spv_assign_ptr_type ||
2683 II->getIntrinsicID() == Intrinsic::spv_ptrcast) {
2690 if (Worklist.
empty())
2696 "cannot allocate a name for the internal service function");
2697 LLVMContext &Ctx =
M.getContext();
2705 for (Function *
F : Worklist) {
2707 for (
const auto &Arg :
F->args())
2709 IRB.CreateCall(
F, Args);
2711 IRB.CreateRetVoid();
2717void SPIRVEmitIntrinsics::applyDemangledPtrArgTypes(
IRBuilder<> &
B) {
2718 DenseMap<Function *, CallInst *> Ptrcasts;
2719 for (
auto It : FDeclPtrTys) {
2721 for (
auto *U :
F->users()) {
2726 for (
auto [Idx, ElemTy] : It.second) {
2734 B.SetInsertPointPastAllocas(Arg->
getParent());
2739 replaceUsesOfWithSpvPtrcast(Param,
normalizeType(ElemTy), CI,
2748 .getFirstNonPHIOrDbgOrAlloca());
2769SPIRVEmitIntrinsics::simplifyZeroLengthArrayGepInst(GetElementPtrInst *
GEP) {
2776 Type *SrcTy =
GEP->getSourceElementType();
2777 SmallVector<Value *, 8> Indices(
GEP->indices());
2779 if (ArrTy && ArrTy->getNumElements() == 0 &&
2782 Indices.erase(Indices.begin());
2783 SrcTy = ArrTy->getElementType();
2784 Value *NewGEP = Builder.CreateGEP(SrcTy,
GEP->getPointerOperand(), Indices,
2785 "",
GEP->getNoWrapFlags());
2792bool SPIRVEmitIntrinsics::runOnFunction(Function &Func) {
2793 if (
Func.isDeclaration())
2797 GR =
ST.getSPIRVGlobalRegistry();
2801 ST.canUseExtension(SPIRV::Extension::SPV_INTEL_function_pointers);
2806 AggrConstTypes.clear();
2811 SmallPtrSet<Instruction *, 4> DeadInsts;
2814 if (!
Ref || GR->findDeducedElementType(
Ref))
2817 GetElementPtrInst *NewGEP = simplifyZeroLengthArrayGepInst(
Ref);
2819 Ref->replaceAllUsesWith(NewGEP);
2823 if (
Type *GepTy = getGEPType(
Ref))
2827 for (
auto *
I : DeadInsts) {
2828 assert(
I->use_empty() &&
"Dead instruction should not have any uses left");
2829 I->eraseFromParent();
2832 processParamTypesByFunHeader(CurrF,
B);
2841 Type *ElTy =
SI->getValueOperand()->getType();
2846 B.SetInsertPoint(&
Func.getEntryBlock(),
Func.getEntryBlock().begin());
2847 for (
auto &GV :
Func.getParent()->globals())
2848 processGlobalValue(GV,
B);
2850 preprocessUndefs(
B);
2851 preprocessCompositeConstants(
B);
2855 applyDemangledPtrArgTypes(
B);
2858 for (
auto &
I : Worklist) {
2860 if (isConvergenceIntrinsic(
I))
2863 bool Postpone = insertAssignPtrTypeIntrs(
I,
B,
false);
2865 insertAssignTypeIntrs(
I,
B);
2866 insertPtrCastOrAssignTypeInstr(
I,
B);
2870 if (Postpone && !GR->findAssignPtrTypeInstr(
I))
2871 insertAssignPtrTypeIntrs(
I,
B,
true);
2874 useRoundingMode(FPI,
B);
2879 SmallPtrSet<Instruction *, 4> IncompleteRets;
2881 deduceOperandElementType(&
I, &IncompleteRets);
2885 for (BasicBlock &BB : Func)
2886 for (PHINode &Phi : BB.
phis())
2888 deduceOperandElementType(&Phi,
nullptr);
2890 for (
auto *
I : Worklist) {
2891 TrackConstants =
true;
2901 if (isConvergenceIntrinsic(
I))
2905 processInstrAfterVisit(
I,
B);
2912bool SPIRVEmitIntrinsics::postprocessTypes(
Module &M) {
2913 if (!GR || TodoTypeSz == 0)
2916 unsigned SzTodo = TodoTypeSz;
2917 DenseMap<Value *, SmallPtrSet<Value *, 4>> ToProcess;
2922 CallInst *AssignCI = GR->findAssignPtrTypeInstr(
Op);
2923 Type *KnownTy = GR->findDeducedElementType(
Op);
2924 if (!KnownTy || !AssignCI)
2930 std::unordered_set<Value *> Visited;
2931 if (
Type *ElemTy = deduceElementTypeHelper(
Op, Visited,
false,
true)) {
2932 if (ElemTy != KnownTy) {
2933 DenseSet<std::pair<Value *, Value *>> VisitedSubst;
2934 propagateElemType(CI, ElemTy, VisitedSubst);
2941 if (
Op->hasUseList()) {
2942 for (User *U :
Op->users()) {
2949 if (TodoTypeSz == 0)
2954 SmallPtrSet<Instruction *, 4> IncompleteRets;
2956 auto It = ToProcess.
find(&
I);
2957 if (It == ToProcess.
end())
2959 It->second.remove_if([
this](
Value *V) {
return !isTodoType(V); });
2960 if (It->second.size() == 0)
2962 deduceOperandElementType(&
I, &IncompleteRets, &It->second,
true);
2963 if (TodoTypeSz == 0)
2968 return SzTodo > TodoTypeSz;
2972void SPIRVEmitIntrinsics::parseFunDeclarations(
Module &M) {
2974 if (!
F.isDeclaration() ||
F.isIntrinsic())
2978 if (DemangledName.empty())
2982 auto [Grp, Opcode, ExtNo] = SPIRV::mapBuiltinToOpcode(
2983 DemangledName,
ST.getPreferredInstructionSet());
2984 if (Opcode != SPIRV::OpGroupAsyncCopy)
2987 SmallVector<unsigned> Idxs;
2996 LLVMContext &Ctx =
F.getContext();
2998 SPIRV::parseBuiltinTypeStr(TypeStrs, DemangledName, Ctx);
2999 if (!TypeStrs.
size())
3002 for (
unsigned Idx : Idxs) {
3003 if (Idx >= TypeStrs.
size())
3006 SPIRV::parseBuiltinCallArgumentType(TypeStrs[Idx].trim(), Ctx))
3009 FDeclPtrTys[&
F].push_back(std::make_pair(Idx, ElemTy));
3014bool SPIRVEmitIntrinsics::runOnModule(
Module &M) {
3017 parseFunDeclarations(M);
3018 insertConstantsForFPFastMathDefault(M);
3028 if (!
F.isDeclaration() && !
F.isIntrinsic()) {
3030 processParamTypes(&
F,
B);
3034 CanTodoType =
false;
3035 Changed |= postprocessTypes(M);
3038 Changed |= processFunctionPointers(M);
3044 return new SPIRVEmitIntrinsics(TM);
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Expand Atomic instructions
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
Analysis containing CSE Info
static void replaceAllUsesWith(Value *Old, Value *New, SmallPtrSet< BasicBlock *, 32 > &FreshBBs, bool IsHuge)
Replace all old uses with new ones, and push the updated BBs into FreshBBs.
This file defines the DenseSet and SmallDenseSet classes.
static bool runOnFunction(Function &F, bool PostInlining)
iv Induction Variable Users
const AbstractManglingParser< Derived, Alloc >::OperatorInfo AbstractManglingParser< Derived, Alloc >::Ops[]
Machine Check Debug Module
MachineInstr unsigned OpIdx
uint64_t IntrinsicInst * II
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
static unsigned getNumElements(Type *Ty)
static bool isMemInstrToReplace(Instruction *I)
static bool isAggrConstForceInt32(const Value *V)
static SPIRV::FPFastMathDefaultInfoVector & getOrCreateFPFastMathDefaultInfoVec(const Module &M, DenseMap< Function *, SPIRV::FPFastMathDefaultInfoVector > &FPFastMathDefaultInfoMap, Function *F)
static Type * getAtomicElemTy(SPIRVGlobalRegistry *GR, Instruction *I, Value *PointerOperand)
static void reportFatalOnTokenType(const Instruction *I)
static void setInsertPointAfterDef(IRBuilder<> &B, Instruction *I)
static void emitAssignName(Instruction *I, IRBuilder<> &B)
static Type * getPointeeTypeByCallInst(StringRef DemangledName, Function *CalledF, unsigned OpIdx)
static void createRoundingModeDecoration(Instruction *I, unsigned RoundingModeDeco, IRBuilder<> &B)
static void createDecorationIntrinsic(Instruction *I, MDNode *Node, IRBuilder<> &B)
static SPIRV::FPFastMathDefaultInfo & getFPFastMathDefaultInfo(SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec, const Type *Ty)
static bool IsKernelArgInt8(Function *F, StoreInst *SI)
static void addSaturatedDecorationToIntrinsic(Instruction *I, IRBuilder<> &B)
static void setInsertPointSkippingPhis(IRBuilder<> &B, Instruction *I)
static FunctionType * getFunctionPointerElemType(Function *F, SPIRVGlobalRegistry *GR)
static void createSaturatedConversionDecoration(Instruction *I, IRBuilder<> &B)
static Type * restoreMutatedType(SPIRVGlobalRegistry *GR, Instruction *I, Type *Ty)
static bool requireAssignType(Instruction *I)
void visit(MachineFunction &MF, MachineBasicBlock &Start, std::function< void(MachineBasicBlock *)> op)
static void insertSpirvDecorations(MachineFunction &MF, SPIRVGlobalRegistry *GR, MachineIRBuilder MIB)
#define SPIRV_BACKEND_SERVICE_FUN_NAME
StringSet - A set-like wrapper for the StringMap.
static SymbolRef::Type getType(const Symbol *Sym)
static std::optional< unsigned > getOpcode(ArrayRef< VPValue * > Values)
Returns the opcode of Values or ~0 if they do not all agree.
static int Lookup(ArrayRef< TableEntry > Table, unsigned Opcode)
This class represents an incoming formal argument to a Function.
const Function * getParent() const
static unsigned getPointerOperandIndex()
static unsigned getPointerOperandIndex()
iterator_range< const_phi_iterator > phis() const
Returns a range that iterates over the phis in the basic block.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
LLVM_ABI LLVMContext & getContext() const
Get the context in which this basic block lives.
static LLVM_ABI BlockAddress * get(Function *F, BasicBlock *BB)
Return a BlockAddress for the specified function and basic block.
bool isInlineAsm() const
Check if this call is an inline asm statement.
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
LLVM_ABI bool isIndirectCall() const
Return true if the callsite is an indirect call.
Value * getCalledOperand() const
Value * getArgOperand(unsigned i) const
FunctionType * getFunctionType() const
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
unsigned arg_size() const
This class represents a function call, abstracting a target machine's calling convention.
uint64_t getZExtValue() const
Return the constant as a 64-bit unsigned integer value after it has been zero extended as appropriate...
static LLVM_ABI Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
LLVM_ABI std::optional< RoundingMode > getRoundingMode() const
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
static LLVM_ABI FunctionType * get(Type *Result, ArrayRef< Type * > Params, bool isVarArg)
This static method is the primary way of constructing a FunctionType.
void addFnAttr(Attribute::AttrKind Kind)
Add function attributes to this function.
static Function * Create(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
const DataLayout & getDataLayout() const
Get the data layout of the module this function belongs to.
Intrinsic::ID getIntrinsicID() const LLVM_READONLY
getIntrinsicID - This method returns the ID number of the specified function, or Intrinsic::not_intri...
bool isIntrinsic() const
isIntrinsic - Returns true if the function's name starts with "llvm.".
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
Type * getReturnType() const
Returns the type of the ret val.
Argument * getArg(unsigned i) const
static LLVM_ABI Type * getTypeAtIndex(Type *Ty, Value *Idx)
Return the type of the element at the given index of an indexable type.
static unsigned getPointerOperandIndex()
PointerType * getType() const
Global values are always pointers.
@ PrivateLinkage
Like Internal, but omit from symbol table.
@ InternalLinkage
Rename collisions when linking (static functions).
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
LLVM_ABI void addDestination(BasicBlock *Dest)
Add a destination.
Base class for instruction visitors.
LLVM_ABI InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
Instruction * user_back()
Specialize the methods defined in Value, as we know that an instruction can only be used by other ins...
LLVM_ABI const Function * getFunction() const
Return the function this instruction belongs to.
LLVM_ABI void copyMetadata(const Instruction &SrcInst, ArrayRef< unsigned > WL=ArrayRef< unsigned >())
Copy metadata from SrcInst to this instruction.
This is an important class for using LLVM in a threaded context.
static unsigned getPointerOperandIndex()
const MDOperand & getOperand(unsigned I) const
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
unsigned getNumOperands() const
Return number of MDNode operands.
static LLVM_ABI MDString * get(LLVMContext &Context, StringRef Str)
Flags
Flags values. These may be or'd together.
ModulePass class - This class is used to implement unstructured interprocedural optimizations and ana...
A Module instance is used to store all the information related to an LLVM module.
void addAssignPtrTypeInstr(Value *Val, CallInst *AssignPtrTyCI)
void buildAssignPtr(IRBuilder<> &B, Type *ElemTy, Value *Arg)
Type * findDeducedCompositeType(const Value *Val)
void replaceAllUsesWith(Value *Old, Value *New, bool DeleteOld=true)
void addDeducedElementType(Value *Val, Type *Ty)
void addReturnType(const Function *ArgF, TypedPointerType *DerivedTy)
Type * findMutated(const Value *Val)
void addDeducedCompositeType(Value *Val, Type *Ty)
void buildAssignType(IRBuilder<> &B, Type *Ty, Value *Arg)
Type * findDeducedElementType(const Value *Val)
void updateAssignType(CallInst *AssignCI, Value *Arg, Value *OfType)
CallInst * findAssignPtrTypeInstr(const Value *Val)
const SPIRVTargetLowering * getTargetLowering() const override
bool isLogicalSPIRV() const
bool canUseExtension(SPIRV::Extension::Extension E) const
const SPIRVSubtarget * getSubtargetImpl() const
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
bool contains(ConstPtrType Ptr) const
reference emplace_back(ArgTypes &&... Args)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
An instruction for storing to memory.
static unsigned getPointerOperandIndex()
iterator find(StringRef Key)
StringRef - Represent a constant reference to a string, i.e.
bool starts_with(StringRef Prefix) const
Check if this string starts with the given Prefix.
bool contains(StringRef key) const
Check if the set contains the given key.
static LLVM_ABI StructType * create(LLVMContext &Context, StringRef Name)
This creates an identified struct.
static LLVM_ABI TargetExtType * get(LLVMContext &Context, StringRef Name, ArrayRef< Type * > Types={}, ArrayRef< unsigned > Ints={})
Return a target extension type having the specified name and optional type and integer parameters.
const STC & getSubtarget(const Function &F) const
This method returns a pointer to the specified type of TargetSubtargetInfo.
The instances of the Type class are immutable: once they are created, they are never changed.
bool isVectorTy() const
True if this is an instance of VectorType.
bool isArrayTy() const
True if this is an instance of ArrayType.
static LLVM_ABI IntegerType * getInt32Ty(LLVMContext &C)
bool isPointerTy() const
True if this is an instance of PointerType.
Type * getArrayElementType() const
LLVM_ABI StringRef getTargetExtName() const
static LLVM_ABI IntegerType * getInt8Ty(LLVMContext &C)
bool isStructTy() const
True if this is an instance of StructType.
bool isTargetExtTy() const
Return true if this is a target extension type.
bool isAggregateType() const
Return true if the type is an aggregate type.
static LLVM_ABI Type * getDoubleTy(LLVMContext &C)
static LLVM_ABI Type * getFloatTy(LLVMContext &C)
static LLVM_ABI Type * getHalfTy(LLVMContext &C)
bool isVoidTy() const
Return true if this is 'void'.
static LLVM_ABI bool isValidElementType(Type *ElemTy)
Return true if the specified type is valid as a element type.
static LLVM_ABI TypedPointerType * get(Type *ElementType, unsigned AddressSpace)
This constructs a pointer to an object of the specified type in a numbered address space.
static LLVM_ABI UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
void setOperand(unsigned i, Value *Val)
LLVM_ABI bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
LLVM_ABI void setName(const Twine &Name)
Change the name of the value.
iterator_range< user_iterator > users()
LLVM_ABI StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
bool contains(const_arg_type_t< ValueT > V) const
Check if the set contains the given element.
const ParentTy * getParent() const
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
@ SPIR_KERNEL
Used for SPIR kernel functions.
@ BasicBlock
Various leaf nodes.
bool match(Val *V, const Pattern &P)
is_zero m_Zero()
Match any null constant or a vector with all elements equal to 0.
DenseSetImpl< ValueT, DenseMap< ValueT, DenseSetEmpty, ValueInfoT, DenseSetPair< ValueT > >, ValueInfoT > DenseSet
ElementType
The element type of an SRV or UAV resource.
@ User
could "use" a pointer
NodeAddr< PhiNode * > Phi
NodeAddr< NodeBase * > Node
NodeAddr< FuncNode * > Func
friend class Instruction
Iterator for Instructions in a `BasicBlock.
This is an optimization pass for GlobalISel generic memory operations.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
bool getVacantFunctionName(Module &M, std::string &Name)
FunctionAddr VTableAddr Value
bool isTypedPointerWrapper(const TargetExtType *ExtTy)
ModulePass * createSPIRVEmitIntrinsicsPass(SPIRVTargetMachine *TM)
unsigned getPointerAddressSpace(const Type *T)
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
FunctionAddr VTableAddr uintptr_t uintptr_t Int32Ty
CallInst * buildIntrWithMD(Intrinsic::ID IntrID, ArrayRef< Type * > Types, Value *Arg, Value *Arg2, ArrayRef< Constant * > Imms, IRBuilder<> &B)
void append_range(Container &C, Range &&R)
Wrapper function to append range R to container C.
bool isNestedPointer(const Type *Ty)
MetadataAsValue * buildMD(Value *Arg)
std::string getOclOrSpirvBuiltinDemangledName(StringRef Name)
auto reverse(ContainerTy &&C)
Type * getTypedPointerWrapper(Type *ElemTy, unsigned AS)
bool isPointerTy(const Type *T)
LLVM_ABI void report_fatal_error(Error Err, bool gen_crash_diag=true)
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
SPIRV::Scope::Scope getMemScope(LLVMContext &Ctx, SyncScope::ID Id)
@ Ref
The access may reference the value stored in memory.
IRBuilder(LLVMContext &, FolderTy, InserterTy, MDNode *, ArrayRef< OperandBundleDef >) -> IRBuilder< FolderTy, InserterTy >
DWARFExpression::Operation Op
Type * getPointeeTypeByAttr(Argument *Arg)
bool hasPointeeTypeAttr(Argument *Arg)
constexpr unsigned BitWidth
bool isEquivalentTypes(Type *Ty1, Type *Ty2)
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
iterator_range< pointer_iterator< WrappedIteratorT > > make_pointer_range(RangeT &&Range)
bool hasInitializer(const GlobalVariable *GV)
Type * normalizeType(Type *Ty)
@ Enabled
Convert any .debug_str_offsets tables to DWARF64 if needed.
bool isSpvIntrinsic(const MachineInstr &MI, Intrinsic::ID IntrinsicID)
Type * getPointeeType(const Type *Ty)
PoisonValue * getNormalizedPoisonValue(Type *Ty)
bool isUntypedPointerTy(const Type *T)
SPIRV::MemorySemantics::MemorySemantics getMemSemantics(AtomicOrdering Ord)
static size_t computeFPFastMathDefaultInfoVecIndex(size_t BitWidth)