24#include "llvm/IR/IntrinsicsSPIRV.h"
31#include <unordered_set>
54#define GET_BuiltinGroup_DECL
55#include "SPIRVGenTables.inc"
60class SPIRVEmitIntrinsics
62 public InstVisitor<SPIRVEmitIntrinsics, Instruction *> {
63 SPIRVTargetMachine *TM =
nullptr;
64 SPIRVGlobalRegistry *GR =
nullptr;
66 bool TrackConstants =
true;
67 bool HaveFunPtrs =
false;
68 DenseMap<Instruction *, Constant *> AggrConsts;
69 DenseMap<Instruction *, Type *> AggrConstTypes;
70 DenseSet<Instruction *> AggrStores;
71 std::unordered_set<Value *> Named;
74 DenseMap<Function *, SmallVector<std::pair<unsigned, Type *>>> FDeclPtrTys;
77 bool CanTodoType =
true;
78 unsigned TodoTypeSz = 0;
79 DenseMap<Value *, bool> TodoType;
80 void insertTodoType(
Value *
Op) {
83 auto It = TodoType.try_emplace(
Op,
true);
89 auto It = TodoType.find(
Op);
90 if (It != TodoType.end() && It->second) {
98 auto It = TodoType.find(
Op);
99 return It != TodoType.end() && It->second;
103 std::unordered_set<Instruction *> TypeValidated;
106 enum WellKnownTypes { Event };
109 Type *deduceElementType(
Value *
I,
bool UnknownElemTypeI8);
110 Type *deduceElementTypeHelper(
Value *
I,
bool UnknownElemTypeI8);
111 Type *deduceElementTypeHelper(
Value *
I, std::unordered_set<Value *> &Visited,
112 bool UnknownElemTypeI8,
113 bool IgnoreKnownType =
false);
114 Type *deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
115 bool UnknownElemTypeI8);
116 Type *deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
117 std::unordered_set<Value *> &Visited,
118 bool UnknownElemTypeI8);
120 std::unordered_set<Value *> &Visited,
121 bool UnknownElemTypeI8);
123 bool UnknownElemTypeI8);
126 Type *deduceNestedTypeHelper(
User *
U,
bool UnknownElemTypeI8);
128 std::unordered_set<Value *> &Visited,
129 bool UnknownElemTypeI8);
133 SmallPtrSet<Instruction *, 4> *IncompleteRets,
134 const SmallPtrSet<Value *, 4> *AskOps =
nullptr,
135 bool IsPostprocessing =
false);
140 Type *reconstructType(
Value *
Op,
bool UnknownElemTypeI8,
141 bool IsPostprocessing);
146 bool UnknownElemTypeI8);
148 void insertAssignPtrTypeTargetExt(TargetExtType *AssignedType,
Value *
V,
151 Type *ExpectedElementType,
152 unsigned OperandToReplace,
155 bool shouldTryToAddMemAliasingDecoration(
Instruction *Inst);
157 void insertConstantsForFPFastMathDefault(
Module &
M);
158 void processGlobalValue(GlobalVariable &GV,
IRBuilder<> &
B);
163 std::unordered_set<Function *> &FVisited);
165 bool deduceOperandElementTypeCalledFunction(
167 Type *&KnownElemTy,
bool &Incomplete);
168 void deduceOperandElementTypeFunctionPointer(
170 Type *&KnownElemTy,
bool IsPostprocessing);
171 bool deduceOperandElementTypeFunctionRet(
172 Instruction *
I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
173 const SmallPtrSet<Value *, 4> *AskOps,
bool IsPostprocessing,
178 DenseMap<Function *, CallInst *> Ptrcasts);
180 DenseSet<std::pair<Value *, Value *>> &VisitedSubst);
183 DenseSet<std::pair<Value *, Value *>> &VisitedSubst);
184 void propagateElemTypeRec(
Value *
Op,
Type *PtrElemTy,
Type *CastElemTy,
185 DenseSet<std::pair<Value *, Value *>> &VisitedSubst,
186 std::unordered_set<Value *> &Visited,
187 DenseMap<Function *, CallInst *> Ptrcasts);
195 GetElementPtrInst *simplifyZeroLengthArrayGepInst(GetElementPtrInst *
GEP);
198 bool postprocessTypes(
Module &
M);
199 bool processFunctionPointers(
Module &
M);
200 void parseFunDeclarations(
Module &
M);
202 void useRoundingMode(ConstrainedFPIntrinsic *FPI,
IRBuilder<> &
B);
218 bool walkLogicalAccessChain(
219 GetElementPtrInst &
GEP,
220 const std::function<
void(
Type *PointedType, uint64_t
Index)>
229 Type *getGEPType(GetElementPtrInst *
GEP);
236 Type *getGEPTypeLogical(GetElementPtrInst *
GEP);
238 Instruction *buildLogicalAccessChainFromGEP(GetElementPtrInst &
GEP);
242 SPIRVEmitIntrinsics(SPIRVTargetMachine *TM =
nullptr)
243 : ModulePass(ID), TM(TM) {}
246 Instruction *visitGetElementPtrInst(GetElementPtrInst &
I);
248 Instruction *visitInsertElementInst(InsertElementInst &
I);
249 Instruction *visitExtractElementInst(ExtractElementInst &
I);
251 Instruction *visitExtractValueInst(ExtractValueInst &
I);
255 Instruction *visitAtomicCmpXchgInst(AtomicCmpXchgInst &
I);
259 StringRef getPassName()
const override {
return "SPIRV emit intrinsics"; }
261 bool runOnModule(
Module &
M)
override;
263 void getAnalysisUsage(AnalysisUsage &AU)
const override {
264 ModulePass::getAnalysisUsage(AU);
273 return II->getIntrinsicID() == Intrinsic::experimental_convergence_entry ||
274 II->getIntrinsicID() == Intrinsic::experimental_convergence_loop ||
275 II->getIntrinsicID() == Intrinsic::experimental_convergence_anchor;
278bool expectIgnoredInIRTranslation(
const Instruction *
I) {
282 switch (
II->getIntrinsicID()) {
283 case Intrinsic::invariant_start:
284 case Intrinsic::spv_resource_handlefrombinding:
285 case Intrinsic::spv_resource_getpointer:
295 if (
II->getIntrinsicID() == Intrinsic::spv_ptrcast) {
296 Value *V =
II->getArgOperand(0);
297 return getPointerRoot(V);
305char SPIRVEmitIntrinsics::ID = 0;
328 B.SetInsertPoint(
I->getParent()->getFirstNonPHIOrDbgOrAlloca());
334 B.SetCurrentDebugLocation(
I->getDebugLoc());
335 if (
I->getType()->isVoidTy())
336 B.SetInsertPoint(
I->getNextNode());
338 B.SetInsertPoint(*
I->getInsertionPointAfterDef());
343 switch (Intr->getIntrinsicID()) {
344 case Intrinsic::invariant_start:
345 case Intrinsic::invariant_end:
353 if (
I->getType()->isTokenTy())
355 "does not support token type",
360 if (!
I->hasName() ||
I->getType()->isAggregateType() ||
361 expectIgnoredInIRTranslation(
I))
366 std::vector<Value *> Args = {
369 B.CreateIntrinsic(Intrinsic::spv_assign_name, {
I->getType()}, Args);
372void SPIRVEmitIntrinsics::replaceAllUsesWith(
Value *Src,
Value *Dest,
376 if (isTodoType(Src)) {
379 insertTodoType(Dest);
383void SPIRVEmitIntrinsics::replaceAllUsesWithAndErase(
IRBuilder<> &
B,
388 std::string
Name = Src->hasName() ? Src->getName().str() :
"";
389 Src->eraseFromParent();
392 if (Named.insert(Dest).second)
417Type *SPIRVEmitIntrinsics::reconstructType(
Value *
Op,
bool UnknownElemTypeI8,
418 bool IsPostprocessing) {
433 if (UnknownElemTypeI8) {
434 if (!IsPostprocessing)
442CallInst *SPIRVEmitIntrinsics::buildSpvPtrcast(Function *
F,
Value *
Op,
450 B.SetInsertPointPastAllocas(OpA->getParent());
453 B.SetInsertPoint(
F->getEntryBlock().getFirstNonPHIOrDbgOrAlloca());
455 Type *OpTy =
Op->getType();
459 CallInst *PtrCasted =
460 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
465void SPIRVEmitIntrinsics::replaceUsesOfWithSpvPtrcast(
467 DenseMap<Function *, CallInst *> Ptrcasts) {
469 CallInst *PtrCastedI =
nullptr;
470 auto It = Ptrcasts.
find(
F);
471 if (It == Ptrcasts.
end()) {
472 PtrCastedI = buildSpvPtrcast(
F,
Op, ElemTy);
473 Ptrcasts[
F] = PtrCastedI;
475 PtrCastedI = It->second;
477 I->replaceUsesOfWith(
Op, PtrCastedI);
480void SPIRVEmitIntrinsics::propagateElemType(
482 DenseSet<std::pair<Value *, Value *>> &VisitedSubst) {
483 DenseMap<Function *, CallInst *> Ptrcasts;
485 for (
auto *U :
Users) {
488 if (!VisitedSubst.insert(std::make_pair(U,
Op)).second)
494 TypeValidated.find(UI) != TypeValidated.end())
495 replaceUsesOfWithSpvPtrcast(
Op, ElemTy, UI, Ptrcasts);
499void SPIRVEmitIntrinsics::propagateElemTypeRec(
501 DenseSet<std::pair<Value *, Value *>> &VisitedSubst) {
502 std::unordered_set<Value *> Visited;
503 DenseMap<Function *, CallInst *> Ptrcasts;
504 propagateElemTypeRec(
Op, PtrElemTy, CastElemTy, VisitedSubst, Visited,
505 std::move(Ptrcasts));
508void SPIRVEmitIntrinsics::propagateElemTypeRec(
510 DenseSet<std::pair<Value *, Value *>> &VisitedSubst,
511 std::unordered_set<Value *> &Visited,
512 DenseMap<Function *, CallInst *> Ptrcasts) {
513 if (!Visited.insert(
Op).second)
516 for (
auto *U :
Users) {
519 if (!VisitedSubst.insert(std::make_pair(U,
Op)).second)
525 TypeValidated.find(UI) != TypeValidated.end())
526 replaceUsesOfWithSpvPtrcast(
Op, CastElemTy, UI, Ptrcasts);
534SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
535 bool UnknownElemTypeI8) {
536 std::unordered_set<Value *> Visited;
537 return deduceElementTypeByValueDeep(ValueTy, Operand, Visited,
541Type *SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
542 Type *ValueTy,
Value *Operand, std::unordered_set<Value *> &Visited,
543 bool UnknownElemTypeI8) {
548 deduceElementTypeHelper(Operand, Visited, UnknownElemTypeI8))
559Type *SPIRVEmitIntrinsics::deduceElementTypeByUsersDeep(
560 Value *
Op, std::unordered_set<Value *> &Visited,
bool UnknownElemTypeI8) {
572 for (User *OpU :
Op->users()) {
574 if (
Type *Ty = deduceElementTypeHelper(Inst, Visited, UnknownElemTypeI8))
587 if ((DemangledName.
starts_with(
"__spirv_ocl_printf(") ||
596Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
Value *
I,
597 bool UnknownElemTypeI8) {
598 std::unordered_set<Value *> Visited;
599 return deduceElementTypeHelper(
I, Visited, UnknownElemTypeI8);
602void SPIRVEmitIntrinsics::maybeAssignPtrType(
Type *&Ty,
Value *
Op,
Type *RefTy,
603 bool UnknownElemTypeI8) {
605 if (!UnknownElemTypeI8)
612bool SPIRVEmitIntrinsics::walkLogicalAccessChain(
613 GetElementPtrInst &
GEP,
614 const std::function<
void(
Type *, uint64_t)> &OnLiteralIndexing,
615 const std::function<
void(
Type *,
Value *)> &OnDynamicIndexing) {
623 Value *Src = getPointerRoot(
GEP.getPointerOperand());
624 Type *CurType = deduceElementType(Src,
true);
633 OnDynamicIndexing(AT->getElementType(), Operand);
634 return AT ==
nullptr;
642 uint32_t EltTypeSize =
DL.getTypeSizeInBits(AT->getElementType()) / 8;
646 CurType = AT->getElementType();
647 OnLiteralIndexing(CurType, Index);
649 uint32_t StructSize =
DL.getTypeSizeInBits(ST) / 8;
652 const auto &STL =
DL.getStructLayout(ST);
653 unsigned Element = STL->getElementContainingOffset(
Offset);
654 Offset -= STL->getElementOffset(Element);
655 CurType =
ST->getElementType(Element);
656 OnLiteralIndexing(CurType, Element);
668SPIRVEmitIntrinsics::buildLogicalAccessChainFromGEP(GetElementPtrInst &
GEP) {
671 B.SetInsertPoint(&
GEP);
673 std::vector<Value *> Indices;
674 Indices.push_back(ConstantInt::get(
675 IntegerType::getInt32Ty(CurrF->
getContext()), 0,
false));
676 walkLogicalAccessChain(
678 [&Indices, &
B](
Type *EltType, uint64_t Index) {
680 ConstantInt::get(
B.getInt64Ty(), Index,
false));
683 uint32_t EltTypeSize =
DL.getTypeSizeInBits(EltType) / 8;
685 Offset, ConstantInt::get(
Offset->getType(), EltTypeSize,
687 Indices.push_back(Index);
692 Args.push_back(
B.getInt1(
GEP.isInBounds()));
693 Args.push_back(
GEP.getOperand(0));
695 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_gep, {
Types}, {
Args});
696 replaceAllUsesWithAndErase(
B, &
GEP, NewI);
700Type *SPIRVEmitIntrinsics::getGEPTypeLogical(GetElementPtrInst *
GEP) {
702 Type *CurType =
GEP->getResultElementType();
704 bool Interrupted = walkLogicalAccessChain(
705 *
GEP, [&CurType](
Type *EltType, uint64_t Index) { CurType = EltType; },
708 return Interrupted ?
GEP->getResultElementType() : CurType;
711Type *SPIRVEmitIntrinsics::getGEPType(GetElementPtrInst *
Ref) {
712 if (
Ref->getSourceElementType() ==
713 IntegerType::getInt8Ty(CurrF->
getContext()) &&
715 return getGEPTypeLogical(
Ref);
722 Ty =
Ref->getSourceElementType();
726 Ty =
Ref->getResultElementType();
731Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
732 Value *
I, std::unordered_set<Value *> &Visited,
bool UnknownElemTypeI8,
733 bool IgnoreKnownType) {
739 if (!IgnoreKnownType)
744 if (!Visited.insert(
I).second)
751 maybeAssignPtrType(Ty,
I,
Ref->getAllocatedType(), UnknownElemTypeI8);
753 Ty = getGEPType(
Ref);
758 KnownTy =
Op->getType();
760 maybeAssignPtrType(Ty,
I, ElemTy, UnknownElemTypeI8);
762 Ty = deduceElementTypeByValueDeep(
764 Ref->getNumOperands() > 0 ?
Ref->getOperand(0) :
nullptr, Visited,
767 Type *RefTy = deduceElementTypeHelper(
Ref->getPointerOperand(), Visited,
769 maybeAssignPtrType(Ty,
I, RefTy, UnknownElemTypeI8);
771 if (
Type *Src =
Ref->getSrcTy(), *Dest =
Ref->getDestTy();
773 Ty = deduceElementTypeHelper(
Ref->getOperand(0), Visited,
778 Ty = deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8);
782 Ty = deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8);
784 Type *BestTy =
nullptr;
786 DenseMap<Type *, unsigned> PhiTys;
787 for (
int i =
Ref->getNumIncomingValues() - 1; i >= 0; --i) {
788 Ty = deduceElementTypeByUsersDeep(
Ref->getIncomingValue(i), Visited,
795 if (It.first->second > MaxN) {
796 MaxN = It.first->second;
804 for (
Value *
Op : {
Ref->getTrueValue(),
Ref->getFalseValue()}) {
805 Ty = deduceElementTypeByUsersDeep(
Op, Visited, UnknownElemTypeI8);
810 static StringMap<unsigned> ResTypeByArg = {
814 {
"__spirv_GenericCastToPtr_ToGlobal", 0},
815 {
"__spirv_GenericCastToPtr_ToLocal", 0},
816 {
"__spirv_GenericCastToPtr_ToPrivate", 0},
817 {
"__spirv_GenericCastToPtrExplicit_ToGlobal", 0},
818 {
"__spirv_GenericCastToPtrExplicit_ToLocal", 0},
819 {
"__spirv_GenericCastToPtrExplicit_ToPrivate", 0}};
823 if (
II &&
II->getIntrinsicID() == Intrinsic::spv_resource_getpointer) {
825 if (HandleType->getTargetExtName() ==
"spirv.Image" ||
826 HandleType->getTargetExtName() ==
"spirv.SignedImage") {
827 for (User *U :
II->users()) {
832 }
else if (HandleType->getTargetExtName() ==
"spirv.VulkanBuffer") {
834 Ty = HandleType->getTypeParameter(0);
848 }
else if (
II &&
II->getIntrinsicID() ==
849 Intrinsic::spv_generic_cast_to_ptr_explicit) {
850 Ty = deduceElementTypeHelper(CI->getArgOperand(0), Visited,
852 }
else if (Function *CalledF = CI->getCalledFunction()) {
853 std::string DemangledName =
855 if (DemangledName.length() > 0)
856 DemangledName = SPIRV::lookupBuiltinNameHelper(DemangledName);
857 auto AsArgIt = ResTypeByArg.
find(DemangledName);
858 if (AsArgIt != ResTypeByArg.
end())
859 Ty = deduceElementTypeHelper(CI->getArgOperand(AsArgIt->second),
860 Visited, UnknownElemTypeI8);
867 if (Ty && !IgnoreKnownType) {
878Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(User *U,
879 bool UnknownElemTypeI8) {
880 std::unordered_set<Value *> Visited;
881 return deduceNestedTypeHelper(U,
U->getType(), Visited, UnknownElemTypeI8);
884Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(
885 User *U,
Type *OrigTy, std::unordered_set<Value *> &Visited,
886 bool UnknownElemTypeI8) {
895 if (!Visited.insert(U).second)
901 for (
unsigned i = 0; i <
U->getNumOperands(); ++i) {
903 assert(
Op &&
"Operands should not be null.");
904 Type *OpTy =
Op->getType();
908 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
915 Change |= Ty != OpTy;
923 if (
Value *
Op =
U->getNumOperands() > 0 ?
U->getOperand(0) :
nullptr) {
924 Type *OpTy = ArrTy->getElementType();
928 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
935 Type *NewTy = ArrayType::get(Ty, ArrTy->getNumElements());
941 if (
Value *
Op =
U->getNumOperands() > 0 ?
U->getOperand(0) :
nullptr) {
942 Type *OpTy = VecTy->getElementType();
946 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
953 Type *NewTy = VectorType::get(Ty, VecTy->getElementCount());
963Type *SPIRVEmitIntrinsics::deduceElementType(
Value *
I,
bool UnknownElemTypeI8) {
964 if (
Type *Ty = deduceElementTypeHelper(
I, UnknownElemTypeI8))
966 if (!UnknownElemTypeI8)
969 return IntegerType::getInt8Ty(
I->getContext());
973 Value *PointerOperand) {
987bool SPIRVEmitIntrinsics::deduceOperandElementTypeCalledFunction(
989 Type *&KnownElemTy,
bool &Incomplete) {
993 std::string DemangledName =
995 if (DemangledName.length() > 0 &&
997 const SPIRVSubtarget &
ST = TM->
getSubtarget<SPIRVSubtarget>(*CalledF);
998 auto [Grp, Opcode, ExtNo] = SPIRV::mapBuiltinToOpcode(
999 DemangledName,
ST.getPreferredInstructionSet());
1000 if (Opcode == SPIRV::OpGroupAsyncCopy) {
1001 for (
unsigned i = 0, PtrCnt = 0; i < CI->
arg_size() && PtrCnt < 2; ++i) {
1007 KnownElemTy = ElemTy;
1008 Ops.push_back(std::make_pair(
Op, i));
1010 }
else if (Grp == SPIRV::Atomic || Grp == SPIRV::AtomicFloating) {
1017 case SPIRV::OpAtomicFAddEXT:
1018 case SPIRV::OpAtomicFMinEXT:
1019 case SPIRV::OpAtomicFMaxEXT:
1020 case SPIRV::OpAtomicLoad:
1021 case SPIRV::OpAtomicCompareExchangeWeak:
1022 case SPIRV::OpAtomicCompareExchange:
1023 case SPIRV::OpAtomicExchange:
1024 case SPIRV::OpAtomicIAdd:
1025 case SPIRV::OpAtomicISub:
1026 case SPIRV::OpAtomicOr:
1027 case SPIRV::OpAtomicXor:
1028 case SPIRV::OpAtomicAnd:
1029 case SPIRV::OpAtomicUMin:
1030 case SPIRV::OpAtomicUMax:
1031 case SPIRV::OpAtomicSMin:
1032 case SPIRV::OpAtomicSMax: {
1037 Incomplete = isTodoType(
Op);
1038 Ops.push_back(std::make_pair(
Op, 0));
1040 case SPIRV::OpAtomicStore: {
1049 Incomplete = isTodoType(
Op);
1050 Ops.push_back(std::make_pair(
Op, 0));
1059void SPIRVEmitIntrinsics::deduceOperandElementTypeFunctionPointer(
1061 Type *&KnownElemTy,
bool IsPostprocessing) {
1065 Ops.push_back(std::make_pair(
Op, std::numeric_limits<unsigned>::max()));
1067 bool IsNewFTy =
false, IsIncomplete =
false;
1070 Type *ArgTy = Arg->getType();
1075 if (isTodoType(Arg))
1076 IsIncomplete =
true;
1078 IsIncomplete =
true;
1083 Type *RetTy = FTy->getReturnType();
1090 IsIncomplete =
true;
1092 IsIncomplete =
true;
1095 if (!IsPostprocessing && IsIncomplete)
1098 IsNewFTy ? FunctionType::get(RetTy, ArgTys, FTy->isVarArg()) : FTy;
1101bool SPIRVEmitIntrinsics::deduceOperandElementTypeFunctionRet(
1102 Instruction *
I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
1103 const SmallPtrSet<Value *, 4> *AskOps,
bool IsPostprocessing,
1115 DenseSet<std::pair<Value *, Value *>> VisitedSubst{std::make_pair(
I,
Op)};
1116 for (User *U :
F->users()) {
1124 propagateElemType(CI, PrevElemTy, VisitedSubst);
1134 for (Instruction *IncompleteRetI : *IncompleteRets)
1135 deduceOperandElementType(IncompleteRetI,
nullptr, AskOps,
1137 }
else if (IncompleteRets) {
1140 TypeValidated.insert(
I);
1148void SPIRVEmitIntrinsics::deduceOperandElementType(
1149 Instruction *
I, SmallPtrSet<Instruction *, 4> *IncompleteRets,
1150 const SmallPtrSet<Value *, 4> *AskOps,
bool IsPostprocessing) {
1152 Type *KnownElemTy =
nullptr;
1153 bool Incomplete =
false;
1159 Incomplete = isTodoType(
I);
1160 for (
unsigned i = 0; i <
Ref->getNumIncomingValues(); i++) {
1163 Ops.push_back(std::make_pair(
Op, i));
1169 Incomplete = isTodoType(
I);
1170 Ops.push_back(std::make_pair(
Ref->getPointerOperand(), 0));
1177 Incomplete = isTodoType(
I);
1178 Ops.push_back(std::make_pair(
Ref->getOperand(0), 0));
1182 KnownElemTy =
Ref->getSourceElementType();
1183 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1186 KnownElemTy =
I->getType();
1192 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1196 reconstructType(
Ref->getValueOperand(),
false, IsPostprocessing)))
1201 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1209 Incomplete = isTodoType(
Ref->getPointerOperand());
1210 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1218 Incomplete = isTodoType(
Ref->getPointerOperand());
1219 Ops.push_back(std::make_pair(
Ref->getPointerOperand(),
1225 Incomplete = isTodoType(
I);
1226 for (
unsigned i = 0; i <
Ref->getNumOperands(); i++) {
1229 Ops.push_back(std::make_pair(
Op, i));
1237 if (deduceOperandElementTypeFunctionRet(
I, IncompleteRets, AskOps,
1238 IsPostprocessing, KnownElemTy,
Op,
1241 Incomplete = isTodoType(CurrF);
1242 Ops.push_back(std::make_pair(
Op, 0));
1248 bool Incomplete0 = isTodoType(Op0);
1249 bool Incomplete1 = isTodoType(Op1);
1251 Type *ElemTy0 = (Incomplete0 && !Incomplete1 && ElemTy1)
1253 : GR->findDeducedElementType(Op0);
1255 KnownElemTy = ElemTy0;
1256 Incomplete = Incomplete0;
1257 Ops.push_back(std::make_pair(Op1, 1));
1258 }
else if (ElemTy1) {
1259 KnownElemTy = ElemTy1;
1260 Incomplete = Incomplete1;
1261 Ops.push_back(std::make_pair(Op0, 0));
1265 deduceOperandElementTypeCalledFunction(CI,
Ops, KnownElemTy, Incomplete);
1266 else if (HaveFunPtrs)
1267 deduceOperandElementTypeFunctionPointer(CI,
Ops, KnownElemTy,
1272 if (!KnownElemTy ||
Ops.size() == 0)
1277 for (
auto &OpIt :
Ops) {
1281 Type *AskTy =
nullptr;
1282 CallInst *AskCI =
nullptr;
1283 if (IsPostprocessing && AskOps) {
1289 if (Ty == KnownElemTy)
1292 Type *OpTy =
Op->getType();
1293 if (
Op->hasUseList() &&
1300 else if (!IsPostprocessing)
1304 if (AssignCI ==
nullptr) {
1313 DenseSet<std::pair<Value *, Value *>> VisitedSubst{
1314 std::make_pair(
I,
Op)};
1315 propagateElemTypeRec(
Op, KnownElemTy, PrevElemTy, VisitedSubst);
1319 CallInst *PtrCastI =
1320 buildSpvPtrcast(
I->getParent()->getParent(),
Op, KnownElemTy);
1321 if (OpIt.second == std::numeric_limits<unsigned>::max())
1324 I->setOperand(OpIt.second, PtrCastI);
1327 TypeValidated.insert(
I);
1330void SPIRVEmitIntrinsics::replaceMemInstrUses(Instruction *Old,
1335 if (isAssignTypeInstr(U)) {
1336 B.SetInsertPoint(U);
1337 SmallVector<Value *, 2>
Args = {
New,
U->getOperand(1)};
1338 CallInst *AssignCI =
1339 B.CreateIntrinsic(Intrinsic::spv_assign_type, {
New->getType()},
Args);
1341 U->eraseFromParent();
1344 U->replaceUsesOfWith(Old, New);
1349 New->copyMetadata(*Old);
1353void SPIRVEmitIntrinsics::preprocessUndefs(
IRBuilder<> &
B) {
1354 std::queue<Instruction *> Worklist;
1358 while (!Worklist.empty()) {
1360 bool BPrepared =
false;
1363 for (
auto &
Op :
I->operands()) {
1365 if (!AggrUndef || !
Op->getType()->isAggregateType())
1372 auto *IntrUndef =
B.CreateIntrinsic(Intrinsic::spv_undef, {});
1373 Worklist.push(IntrUndef);
1374 I->replaceUsesOfWith(
Op, IntrUndef);
1375 AggrConsts[IntrUndef] = AggrUndef;
1376 AggrConstTypes[IntrUndef] = AggrUndef->getType();
1381void SPIRVEmitIntrinsics::preprocessCompositeConstants(
IRBuilder<> &
B) {
1382 std::queue<Instruction *> Worklist;
1386 while (!Worklist.empty()) {
1387 auto *
I = Worklist.front();
1390 bool KeepInst =
false;
1391 for (
const auto &
Op :
I->operands()) {
1393 Type *ResTy =
nullptr;
1396 ResTy = COp->getType();
1408 ResTy =
Op->getType()->isVectorTy() ? COp->getType() :
B.getInt32Ty();
1413 for (
unsigned i = 0; i < COp->getNumElements(); ++i)
1414 Args.push_back(COp->getElementAsConstant(i));
1418 IsPhi ?
B.SetInsertPointPastAllocas(
I->getParent()->getParent())
1419 :
B.SetInsertPoint(
I);
1423 B.CreateIntrinsic(Intrinsic::spv_const_composite, {ResTy}, {
Args});
1427 AggrConsts[CI] = AggrConst;
1428 AggrConstTypes[CI] = deduceNestedTypeHelper(AggrConst,
false);
1440 B.CreateIntrinsic(Intrinsic::spv_assign_decoration, {
I->getType()},
1445 unsigned RoundingModeDeco,
1452 ConstantInt::get(
Int32Ty, SPIRV::Decoration::FPRoundingMode)),
1461 MDNode *SaturatedConversionNode =
1463 Int32Ty, SPIRV::Decoration::SaturatedConversion))});
1470 if (Fu->isIntrinsic()) {
1471 unsigned const int IntrinsicId = Fu->getIntrinsicID();
1472 switch (IntrinsicId) {
1473 case Intrinsic::fptosi_sat:
1474 case Intrinsic::fptoui_sat:
1493 MDString *ConstraintString =
MDString::get(Ctx,
IA->getConstraintString());
1501 B.SetInsertPoint(&
Call);
1502 B.CreateIntrinsic(Intrinsic::spv_inline_asm, {
Args});
1507void SPIRVEmitIntrinsics::useRoundingMode(ConstrainedFPIntrinsic *FPI,
1510 if (!
RM.has_value())
1512 unsigned RoundingModeDeco = std::numeric_limits<unsigned>::max();
1513 switch (
RM.value()) {
1517 case RoundingMode::NearestTiesToEven:
1518 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTE;
1520 case RoundingMode::TowardNegative:
1521 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTN;
1523 case RoundingMode::TowardPositive:
1524 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTP;
1526 case RoundingMode::TowardZero:
1527 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTZ;
1529 case RoundingMode::Dynamic:
1530 case RoundingMode::NearestTiesToAway:
1534 if (RoundingModeDeco == std::numeric_limits<unsigned>::max())
1540Instruction *SPIRVEmitIntrinsics::visitSwitchInst(SwitchInst &
I) {
1543 B.SetInsertPoint(&
I);
1546 for (
auto &
Op :
I.operands()) {
1547 if (
Op.get()->getType()->isSized()) {
1556 CallInst *NewI =
B.CreateIntrinsic(Intrinsic::spv_switch,
1557 {
I.getOperand(0)->getType()}, {
Args});
1561 I.eraseFromParent();
1564 B.SetInsertPoint(ParentBB);
1565 IndirectBrInst *BrI =
B.CreateIndirectBr(
1568 for (BasicBlock *BBCase : BBCases)
1573Instruction *SPIRVEmitIntrinsics::visitGetElementPtrInst(GetElementPtrInst &
I) {
1574 if (
I.getSourceElementType() == IntegerType::getInt8Ty(CurrF->
getContext()) &&
1582 B.SetInsertPoint(&
I);
1585 Args.push_back(
B.getInt1(
I.isInBounds()));
1587 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_gep, {
Types}, {
Args});
1588 replaceAllUsesWithAndErase(
B, &
I, NewI);
1592Instruction *SPIRVEmitIntrinsics::visitBitCastInst(BitCastInst &
I) {
1594 B.SetInsertPoint(&
I);
1603 I.eraseFromParent();
1609 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_bitcast, {
Types}, {
Args});
1610 replaceAllUsesWithAndErase(
B, &
I, NewI);
1614void SPIRVEmitIntrinsics::insertAssignPtrTypeTargetExt(
1616 Type *VTy =
V->getType();
1621 if (ElemTy != AssignedType)
1634 if (CurrentType == AssignedType)
1641 " for value " +
V->getName(),
1649void SPIRVEmitIntrinsics::replacePointerOperandWithPtrCast(
1650 Instruction *
I,
Value *Pointer,
Type *ExpectedElementType,
1652 TypeValidated.insert(
I);
1655 Type *PointerElemTy = deduceElementTypeHelper(Pointer,
false);
1656 if (PointerElemTy == ExpectedElementType ||
1662 MetadataAsValue *VMD =
buildMD(ExpectedElementVal);
1664 bool FirstPtrCastOrAssignPtrType =
true;
1670 for (
auto User :
Pointer->users()) {
1673 (
II->getIntrinsicID() != Intrinsic::spv_assign_ptr_type &&
1674 II->getIntrinsicID() != Intrinsic::spv_ptrcast) ||
1675 II->getOperand(0) != Pointer)
1680 FirstPtrCastOrAssignPtrType =
false;
1681 if (
II->getOperand(1) != VMD ||
1688 if (
II->getIntrinsicID() != Intrinsic::spv_ptrcast)
1693 if (
II->getParent() !=
I->getParent())
1696 I->setOperand(OperandToReplace,
II);
1702 if (FirstPtrCastOrAssignPtrType) {
1707 }
else if (isTodoType(Pointer)) {
1708 eraseTodoType(Pointer);
1715 DenseSet<std::pair<Value *, Value *>> VisitedSubst{
1716 std::make_pair(
I, Pointer)};
1718 propagateElemType(Pointer, PrevElemTy, VisitedSubst);
1730 auto *PtrCastI =
B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
1736void SPIRVEmitIntrinsics::insertPtrCastOrAssignTypeInstr(Instruction *
I,
1741 replacePointerOperandWithPtrCast(
1742 I,
SI->getValueOperand(), IntegerType::getInt8Ty(CurrF->
getContext()),
1748 Type *OpTy =
Op->getType();
1751 if (OpTy ==
Op->getType())
1752 OpTy = deduceElementTypeByValueDeep(OpTy,
Op,
false);
1753 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 1,
B);
1758 Type *OpTy = LI->getType();
1763 Type *NewOpTy = OpTy;
1764 OpTy = deduceElementTypeByValueDeep(OpTy, LI,
false);
1765 if (OpTy == NewOpTy)
1766 insertTodoType(Pointer);
1769 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 0,
B);
1774 Type *OpTy =
nullptr;
1781 bool IsRewrittenGEP =
1782 GEPI->getSourceElementType() == IntegerType::getInt8Ty(
I->getContext());
1784 Value *Src = getPointerRoot(Pointer);
1790 OpTy = GEPI->getSourceElementType();
1792 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 0,
B);
1794 insertTodoType(Pointer);
1806 std::string DemangledName =
1810 bool HaveTypes =
false;
1828 for (User *U : CalledArg->
users()) {
1830 if ((ElemTy = deduceElementTypeHelper(Inst,
false)) !=
nullptr)
1836 HaveTypes |= ElemTy !=
nullptr;
1841 if (DemangledName.empty() && !HaveTypes)
1859 Type *ExpectedType =
1861 if (!ExpectedType && !DemangledName.empty())
1862 ExpectedType = SPIRV::parseBuiltinCallArgumentBaseType(
1863 DemangledName,
OpIdx,
I->getContext());
1864 if (!ExpectedType || ExpectedType->
isVoidTy())
1872 replacePointerOperandWithPtrCast(CI, ArgOperand, ExpectedType,
OpIdx,
B);
1876Instruction *SPIRVEmitIntrinsics::visitInsertElementInst(InsertElementInst &
I) {
1883 I.getOperand(1)->getType(),
1884 I.getOperand(2)->getType()};
1886 B.SetInsertPoint(&
I);
1888 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_insertelt, {
Types}, {
Args});
1889 replaceAllUsesWithAndErase(
B, &
I, NewI);
1894SPIRVEmitIntrinsics::visitExtractElementInst(ExtractElementInst &
I) {
1901 B.SetInsertPoint(&
I);
1903 I.getIndexOperand()->getType()};
1904 SmallVector<Value *, 2>
Args = {
I.getVectorOperand(),
I.getIndexOperand()};
1905 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_extractelt, {
Types}, {
Args});
1906 replaceAllUsesWithAndErase(
B, &
I, NewI);
1910Instruction *SPIRVEmitIntrinsics::visitInsertValueInst(InsertValueInst &
I) {
1912 B.SetInsertPoint(&
I);
1915 Value *AggregateOp =
I.getAggregateOperand();
1919 Args.push_back(AggregateOp);
1920 Args.push_back(
I.getInsertedValueOperand());
1921 for (
auto &
Op :
I.indices())
1922 Args.push_back(
B.getInt32(
Op));
1924 B.CreateIntrinsic(Intrinsic::spv_insertv, {
Types}, {
Args});
1925 replaceMemInstrUses(&
I, NewI,
B);
1929Instruction *SPIRVEmitIntrinsics::visitExtractValueInst(ExtractValueInst &
I) {
1930 if (
I.getAggregateOperand()->getType()->isAggregateType())
1933 B.SetInsertPoint(&
I);
1935 for (
auto &
Op :
I.indices())
1936 Args.push_back(
B.getInt32(
Op));
1938 B.CreateIntrinsic(Intrinsic::spv_extractv, {
I.getType()}, {
Args});
1939 replaceAllUsesWithAndErase(
B, &
I, NewI);
1943Instruction *SPIRVEmitIntrinsics::visitLoadInst(LoadInst &
I) {
1944 if (!
I.getType()->isAggregateType())
1947 B.SetInsertPoint(&
I);
1948 TrackConstants =
false;
1953 B.CreateIntrinsic(Intrinsic::spv_load, {
I.getOperand(0)->getType()},
1954 {
I.getPointerOperand(),
B.getInt16(Flags),
1955 B.getInt8(
I.getAlign().value())});
1956 replaceMemInstrUses(&
I, NewI,
B);
1960Instruction *SPIRVEmitIntrinsics::visitStoreInst(StoreInst &
I) {
1961 if (!AggrStores.contains(&
I))
1964 B.SetInsertPoint(&
I);
1965 TrackConstants =
false;
1969 auto *PtrOp =
I.getPointerOperand();
1970 auto *NewI =
B.CreateIntrinsic(
1971 Intrinsic::spv_store, {
I.getValueOperand()->getType(), PtrOp->getType()},
1972 {
I.getValueOperand(), PtrOp,
B.getInt16(Flags),
1973 B.getInt8(
I.getAlign().value())});
1975 I.eraseFromParent();
1979Instruction *SPIRVEmitIntrinsics::visitAllocaInst(AllocaInst &
I) {
1980 Value *ArraySize =
nullptr;
1981 if (
I.isArrayAllocation()) {
1984 SPIRV::Extension::SPV_INTEL_variable_length_array))
1986 "array allocation: this instruction requires the following "
1987 "SPIR-V extension: SPV_INTEL_variable_length_array",
1989 ArraySize =
I.getArraySize();
1992 B.SetInsertPoint(&
I);
1993 TrackConstants =
false;
1994 Type *PtrTy =
I.getType();
1997 ?
B.CreateIntrinsic(Intrinsic::spv_alloca_array,
1998 {PtrTy, ArraySize->
getType()},
1999 {ArraySize,
B.getInt8(
I.getAlign().value())})
2000 :
B.CreateIntrinsic(
Intrinsic::spv_alloca, {PtrTy},
2001 {
B.getInt8(
I.getAlign().value())});
2002 replaceAllUsesWithAndErase(
B, &
I, NewI);
2006Instruction *SPIRVEmitIntrinsics::visitAtomicCmpXchgInst(AtomicCmpXchgInst &
I) {
2007 assert(
I.getType()->isAggregateType() &&
"Aggregate result is expected");
2009 B.SetInsertPoint(&
I);
2011 Args.push_back(
B.getInt32(
2012 static_cast<uint32_t
>(
getMemScope(
I.getContext(),
I.getSyncScopeID()))));
2013 Args.push_back(
B.getInt32(
2015 Args.push_back(
B.getInt32(
2017 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_cmpxchg,
2018 {
I.getPointerOperand()->getType()}, {
Args});
2019 replaceMemInstrUses(&
I, NewI,
B);
2023Instruction *SPIRVEmitIntrinsics::visitUnreachableInst(UnreachableInst &
I) {
2025 B.SetInsertPoint(&
I);
2026 B.CreateIntrinsic(Intrinsic::spv_unreachable, {});
2030void SPIRVEmitIntrinsics::processGlobalValue(GlobalVariable &GV,
2033 static const StringSet<> ArtificialGlobals{
"llvm.global.annotations",
2034 "llvm.compiler.used"};
2044 deduceElementTypeHelper(&GV,
false);
2048 auto *InitInst =
B.CreateIntrinsic(Intrinsic::spv_init_global,
2050 InitInst->setArgOperand(1, Init);
2053 B.CreateIntrinsic(Intrinsic::spv_unref_global, GV.
getType(), &GV);
2059bool SPIRVEmitIntrinsics::insertAssignPtrTypeIntrs(Instruction *
I,
2061 bool UnknownElemTypeI8) {
2067 if (
Type *ElemTy = deduceElementType(
I, UnknownElemTypeI8)) {
2074void SPIRVEmitIntrinsics::insertAssignTypeIntrs(Instruction *
I,
2077 static StringMap<unsigned> ResTypeWellKnown = {
2078 {
"async_work_group_copy", WellKnownTypes::Event},
2079 {
"async_work_group_strided_copy", WellKnownTypes::Event},
2080 {
"__spirv_GroupAsyncCopy", WellKnownTypes::Event}};
2084 bool IsKnown =
false;
2089 std::string DemangledName =
2092 if (DemangledName.length() > 0)
2094 SPIRV::lookupBuiltinNameHelper(DemangledName, &DecorationId);
2095 auto ResIt = ResTypeWellKnown.
find(DemangledName);
2096 if (ResIt != ResTypeWellKnown.
end()) {
2099 switch (ResIt->second) {
2100 case WellKnownTypes::Event:
2107 switch (DecorationId) {
2110 case FPDecorationId::SAT:
2113 case FPDecorationId::RTE:
2115 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTE,
B);
2117 case FPDecorationId::RTZ:
2119 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTZ,
B);
2121 case FPDecorationId::RTP:
2123 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTP,
B);
2125 case FPDecorationId::RTN:
2127 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTN,
B);
2133 Type *Ty =
I->getType();
2136 Type *TypeToAssign = Ty;
2138 if (
II->getIntrinsicID() == Intrinsic::spv_const_composite ||
2139 II->getIntrinsicID() == Intrinsic::spv_undef) {
2140 auto It = AggrConstTypes.find(
II);
2141 if (It == AggrConstTypes.end())
2143 TypeToAssign = It->second;
2149 for (
const auto &
Op :
I->operands()) {
2154 Type *OpTy =
Op->getType();
2156 CallInst *AssignCI =
2161 Type *OpTy =
Op->getType();
2176 CallInst *AssignCI =
2186bool SPIRVEmitIntrinsics::shouldTryToAddMemAliasingDecoration(
2187 Instruction *Inst) {
2189 if (!STI->
canUseExtension(SPIRV::Extension::SPV_INTEL_memory_access_aliasing))
2200 case Intrinsic::spv_load:
2201 case Intrinsic::spv_store:
2208 const std::string
Prefix =
"__spirv_Atomic";
2209 const bool IsAtomic =
Name.find(Prefix) == 0;
2217void SPIRVEmitIntrinsics::insertSpirvDecorations(Instruction *
I,
2219 if (MDNode *MD =
I->getMetadata(
"spirv.Decorations")) {
2221 B.CreateIntrinsic(Intrinsic::spv_assign_decoration, {
I->getType()},
2226 auto processMemAliasingDecoration = [&](
unsigned Kind) {
2227 if (MDNode *AliasListMD =
I->getMetadata(Kind)) {
2228 if (shouldTryToAddMemAliasingDecoration(
I)) {
2229 uint32_t Dec =
Kind == LLVMContext::MD_alias_scope
2230 ? SPIRV::Decoration::AliasScopeINTEL
2231 : SPIRV::Decoration::NoAliasINTEL;
2233 I, ConstantInt::get(
B.getInt32Ty(), Dec),
2236 B.CreateIntrinsic(Intrinsic::spv_assign_aliasing_decoration,
2237 {
I->getType()}, {
Args});
2241 processMemAliasingDecoration(LLVMContext::MD_alias_scope);
2242 processMemAliasingDecoration(LLVMContext::MD_noalias);
2245 if (MDNode *MD =
I->getMetadata(LLVMContext::MD_fpmath)) {
2247 bool AllowFPMaxError =
2249 if (!AllowFPMaxError)
2253 B.CreateIntrinsic(Intrinsic::spv_assign_fpmaxerror_decoration,
2262 &FPFastMathDefaultInfoMap,
2264 auto it = FPFastMathDefaultInfoMap.
find(
F);
2265 if (it != FPFastMathDefaultInfoMap.
end())
2273 SPIRV::FPFastMathMode::None);
2275 SPIRV::FPFastMathMode::None);
2277 SPIRV::FPFastMathMode::None);
2278 return FPFastMathDefaultInfoMap[
F] = std::move(FPFastMathDefaultInfoVec);
2284 size_t BitWidth = Ty->getScalarSizeInBits();
2288 assert(Index >= 0 && Index < 3 &&
2289 "Expected FPFastMathDefaultInfo for half, float, or double");
2290 assert(FPFastMathDefaultInfoVec.
size() == 3 &&
2291 "Expected FPFastMathDefaultInfoVec to have exactly 3 elements");
2292 return FPFastMathDefaultInfoVec[Index];
2295void SPIRVEmitIntrinsics::insertConstantsForFPFastMathDefault(
Module &M) {
2297 if (!
ST->canUseExtension(SPIRV::Extension::SPV_KHR_float_controls2))
2306 auto Node =
M.getNamedMetadata(
"spirv.ExecutionMode");
2308 if (!
M.getNamedMetadata(
"opencl.enable.FP_CONTRACT")) {
2316 ConstantInt::get(Type::getInt32Ty(
M.getContext()), 0);
2319 [[maybe_unused]] GlobalVariable *GV =
2320 new GlobalVariable(M,
2321 Type::getInt32Ty(
M.getContext()),
2335 DenseMap<Function *, SPIRV::FPFastMathDefaultInfoVector>
2336 FPFastMathDefaultInfoMap;
2338 for (
unsigned i = 0; i <
Node->getNumOperands(); i++) {
2347 if (EM == SPIRV::ExecutionMode::FPFastMathDefault) {
2349 "Expected 4 operands for FPFastMathDefault");
2355 SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec =
2357 SPIRV::FPFastMathDefaultInfo &
Info =
2360 Info.FPFastMathDefault =
true;
2361 }
else if (EM == SPIRV::ExecutionMode::ContractionOff) {
2363 "Expected no operands for ContractionOff");
2367 SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec =
2369 for (SPIRV::FPFastMathDefaultInfo &
Info : FPFastMathDefaultInfoVec) {
2370 Info.ContractionOff =
true;
2372 }
else if (EM == SPIRV::ExecutionMode::SignedZeroInfNanPreserve) {
2374 "Expected 1 operand for SignedZeroInfNanPreserve");
2375 unsigned TargetWidth =
2380 SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec =
2384 assert(Index >= 0 && Index < 3 &&
2385 "Expected FPFastMathDefaultInfo for half, float, or double");
2386 assert(FPFastMathDefaultInfoVec.
size() == 3 &&
2387 "Expected FPFastMathDefaultInfoVec to have exactly 3 elements");
2388 FPFastMathDefaultInfoVec[
Index].SignedZeroInfNanPreserve =
true;
2392 std::unordered_map<unsigned, GlobalVariable *> GlobalVars;
2393 for (
auto &[Func, FPFastMathDefaultInfoVec] : FPFastMathDefaultInfoMap) {
2394 if (FPFastMathDefaultInfoVec.
empty())
2397 for (
const SPIRV::FPFastMathDefaultInfo &
Info : FPFastMathDefaultInfoVec) {
2398 assert(
Info.Ty &&
"Expected target type for FPFastMathDefaultInfo");
2401 if (Flags == SPIRV::FPFastMathMode::None && !
Info.ContractionOff &&
2402 !
Info.SignedZeroInfNanPreserve && !
Info.FPFastMathDefault)
2406 if (
Info.ContractionOff && (Flags & SPIRV::FPFastMathMode::AllowContract))
2408 "and AllowContract");
2410 if (
Info.SignedZeroInfNanPreserve &&
2412 (SPIRV::FPFastMathMode::NotNaN | SPIRV::FPFastMathMode::NotInf |
2413 SPIRV::FPFastMathMode::NSZ))) {
2414 if (
Info.FPFastMathDefault)
2416 "SignedZeroInfNanPreserve but at least one of "
2417 "NotNaN/NotInf/NSZ is enabled.");
2420 if ((Flags & SPIRV::FPFastMathMode::AllowTransform) &&
2421 !((Flags & SPIRV::FPFastMathMode::AllowReassoc) &&
2422 (Flags & SPIRV::FPFastMathMode::AllowContract))) {
2424 "AllowTransform requires AllowReassoc and "
2425 "AllowContract to be set.");
2428 auto it = GlobalVars.find(Flags);
2429 GlobalVariable *GV =
nullptr;
2430 if (it != GlobalVars.end()) {
2436 ConstantInt::get(Type::getInt32Ty(
M.getContext()), Flags);
2439 GV =
new GlobalVariable(M,
2440 Type::getInt32Ty(
M.getContext()),
2445 GlobalVars[
Flags] = GV;
2451void SPIRVEmitIntrinsics::processInstrAfterVisit(Instruction *
I,
2454 bool IsConstComposite =
2455 II &&
II->getIntrinsicID() == Intrinsic::spv_const_composite;
2456 if (IsConstComposite && TrackConstants) {
2458 auto t = AggrConsts.find(
I);
2459 assert(t != AggrConsts.end());
2462 {
II->getType(),
II->getType()}, t->second,
I, {},
B);
2464 NewOp->setArgOperand(0,
I);
2467 for (
const auto &
Op :
I->operands()) {
2471 unsigned OpNo =
Op.getOperandNo();
2472 if (
II && ((
II->getIntrinsicID() == Intrinsic::spv_gep && OpNo == 0) ||
2473 (
II->paramHasAttr(OpNo, Attribute::ImmArg))))
2477 IsPhi ?
B.SetInsertPointPastAllocas(
I->getParent()->getParent())
2478 :
B.SetInsertPoint(
I);
2481 Type *OpTy =
Op->getType();
2489 {OpTy, OpTyVal->
getType()},
Op, OpTyVal, {},
B);
2491 if (!IsConstComposite &&
isPointerTy(OpTy) && OpElemTy !=
nullptr &&
2492 OpElemTy != IntegerType::getInt8Ty(
I->getContext())) {
2494 SmallVector<Value *, 2>
Args = {
2497 CallInst *PtrCasted =
2498 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
2503 I->setOperand(OpNo, NewOp);
2505 if (Named.insert(
I).second)
2509Type *SPIRVEmitIntrinsics::deduceFunParamElementType(Function *
F,
2511 std::unordered_set<Function *> FVisited;
2512 return deduceFunParamElementType(
F,
OpIdx, FVisited);
2515Type *SPIRVEmitIntrinsics::deduceFunParamElementType(
2516 Function *
F,
unsigned OpIdx, std::unordered_set<Function *> &FVisited) {
2518 if (!FVisited.insert(
F).second)
2521 std::unordered_set<Value *> Visited;
2524 for (User *U :
F->users()) {
2536 if (
Type *Ty = deduceElementTypeHelper(OpArg, Visited,
false))
2539 for (User *OpU : OpArg->
users()) {
2541 if (!Inst || Inst == CI)
2544 if (
Type *Ty = deduceElementTypeHelper(Inst, Visited,
false))
2551 if (FVisited.find(OuterF) != FVisited.end())
2553 for (
unsigned i = 0; i < OuterF->
arg_size(); ++i) {
2554 if (OuterF->
getArg(i) == OpArg) {
2555 Lookup.push_back(std::make_pair(OuterF, i));
2562 for (
auto &Pair :
Lookup) {
2563 if (
Type *Ty = deduceFunParamElementType(Pair.first, Pair.second, FVisited))
2570void SPIRVEmitIntrinsics::processParamTypesByFunHeader(Function *
F,
2572 B.SetInsertPointPastAllocas(
F);
2586 for (User *U :
F->users()) {
2602 for (User *U : Arg->
users()) {
2606 CI->
getParent()->getParent() == CurrF) {
2608 deduceOperandElementTypeFunctionPointer(CI,
Ops, ElemTy,
false);
2619void SPIRVEmitIntrinsics::processParamTypes(Function *
F,
IRBuilder<> &
B) {
2620 B.SetInsertPointPastAllocas(
F);
2626 if (!ElemTy && (ElemTy = deduceFunParamElementType(
F,
OpIdx)) !=
nullptr) {
2628 DenseSet<std::pair<Value *, Value *>> VisitedSubst;
2630 propagateElemType(Arg, IntegerType::getInt8Ty(
F->getContext()),
2642 bool IsNewFTy =
false;
2658bool SPIRVEmitIntrinsics::processFunctionPointers(
Module &M) {
2661 if (
F.isIntrinsic())
2663 if (
F.isDeclaration()) {
2664 for (User *U :
F.users()) {
2677 for (User *U :
F.users()) {
2679 if (!
II ||
II->arg_size() != 3 ||
II->getOperand(0) != &
F)
2681 if (
II->getIntrinsicID() == Intrinsic::spv_assign_ptr_type ||
2682 II->getIntrinsicID() == Intrinsic::spv_ptrcast) {
2689 if (Worklist.
empty())
2695 "cannot allocate a name for the internal service function");
2696 LLVMContext &Ctx =
M.getContext();
2704 for (Function *
F : Worklist) {
2706 for (
const auto &Arg :
F->args())
2708 IRB.CreateCall(
F, Args);
2710 IRB.CreateRetVoid();
2716void SPIRVEmitIntrinsics::applyDemangledPtrArgTypes(
IRBuilder<> &
B) {
2717 DenseMap<Function *, CallInst *> Ptrcasts;
2718 for (
auto It : FDeclPtrTys) {
2720 for (
auto *U :
F->users()) {
2725 for (
auto [Idx, ElemTy] : It.second) {
2733 B.SetInsertPointPastAllocas(Arg->
getParent());
2738 replaceUsesOfWithSpvPtrcast(Param,
normalizeType(ElemTy), CI,
2747 .getFirstNonPHIOrDbgOrAlloca());
2768SPIRVEmitIntrinsics::simplifyZeroLengthArrayGepInst(GetElementPtrInst *
GEP) {
2775 Type *SrcTy =
GEP->getSourceElementType();
2776 SmallVector<Value *, 8> Indices(
GEP->indices());
2778 if (ArrTy && ArrTy->getNumElements() == 0 &&
2781 Indices.erase(Indices.begin());
2782 SrcTy = ArrTy->getElementType();
2783 Value *NewGEP = Builder.CreateGEP(SrcTy,
GEP->getPointerOperand(), Indices,
2784 "",
GEP->getNoWrapFlags());
2791bool SPIRVEmitIntrinsics::runOnFunction(Function &Func) {
2792 if (
Func.isDeclaration())
2796 GR =
ST.getSPIRVGlobalRegistry();
2800 ST.canUseExtension(SPIRV::Extension::SPV_INTEL_function_pointers);
2805 AggrConstTypes.clear();
2810 SmallPtrSet<Instruction *, 4> DeadInsts;
2813 if (!
Ref || GR->findDeducedElementType(
Ref))
2816 GetElementPtrInst *NewGEP = simplifyZeroLengthArrayGepInst(
Ref);
2818 Ref->replaceAllUsesWith(NewGEP);
2822 if (
Type *GepTy = getGEPType(
Ref))
2826 for (
auto *
I : DeadInsts) {
2827 assert(
I->use_empty() &&
"Dead instruction should not have any uses left");
2828 I->eraseFromParent();
2831 processParamTypesByFunHeader(CurrF,
B);
2840 Type *ElTy =
SI->getValueOperand()->getType();
2842 AggrStores.insert(&
I);
2845 B.SetInsertPoint(&
Func.getEntryBlock(),
Func.getEntryBlock().begin());
2846 for (
auto &GV :
Func.getParent()->globals())
2847 processGlobalValue(GV,
B);
2849 preprocessUndefs(
B);
2850 preprocessCompositeConstants(
B);
2854 applyDemangledPtrArgTypes(
B);
2857 for (
auto &
I : Worklist) {
2859 if (isConvergenceIntrinsic(
I))
2862 bool Postpone = insertAssignPtrTypeIntrs(
I,
B,
false);
2864 insertAssignTypeIntrs(
I,
B);
2865 insertPtrCastOrAssignTypeInstr(
I,
B);
2869 if (Postpone && !GR->findAssignPtrTypeInstr(
I))
2870 insertAssignPtrTypeIntrs(
I,
B,
true);
2873 useRoundingMode(FPI,
B);
2878 SmallPtrSet<Instruction *, 4> IncompleteRets;
2880 deduceOperandElementType(&
I, &IncompleteRets);
2884 for (BasicBlock &BB : Func)
2885 for (PHINode &Phi : BB.
phis())
2887 deduceOperandElementType(&Phi,
nullptr);
2889 for (
auto *
I : Worklist) {
2890 TrackConstants =
true;
2900 if (isConvergenceIntrinsic(
I))
2904 processInstrAfterVisit(
I,
B);
2911bool SPIRVEmitIntrinsics::postprocessTypes(
Module &M) {
2912 if (!GR || TodoTypeSz == 0)
2915 unsigned SzTodo = TodoTypeSz;
2916 DenseMap<Value *, SmallPtrSet<Value *, 4>> ToProcess;
2921 CallInst *AssignCI = GR->findAssignPtrTypeInstr(
Op);
2922 Type *KnownTy = GR->findDeducedElementType(
Op);
2923 if (!KnownTy || !AssignCI)
2929 std::unordered_set<Value *> Visited;
2930 if (
Type *ElemTy = deduceElementTypeHelper(
Op, Visited,
false,
true)) {
2931 if (ElemTy != KnownTy) {
2932 DenseSet<std::pair<Value *, Value *>> VisitedSubst;
2933 propagateElemType(CI, ElemTy, VisitedSubst);
2940 if (
Op->hasUseList()) {
2941 for (User *U :
Op->users()) {
2948 if (TodoTypeSz == 0)
2953 SmallPtrSet<Instruction *, 4> IncompleteRets;
2955 auto It = ToProcess.
find(&
I);
2956 if (It == ToProcess.
end())
2958 It->second.remove_if([
this](
Value *V) {
return !isTodoType(V); });
2959 if (It->second.size() == 0)
2961 deduceOperandElementType(&
I, &IncompleteRets, &It->second,
true);
2962 if (TodoTypeSz == 0)
2967 return SzTodo > TodoTypeSz;
2971void SPIRVEmitIntrinsics::parseFunDeclarations(
Module &M) {
2973 if (!
F.isDeclaration() ||
F.isIntrinsic())
2977 if (DemangledName.empty())
2981 auto [Grp, Opcode, ExtNo] = SPIRV::mapBuiltinToOpcode(
2982 DemangledName,
ST.getPreferredInstructionSet());
2983 if (Opcode != SPIRV::OpGroupAsyncCopy)
2986 SmallVector<unsigned> Idxs;
2995 LLVMContext &Ctx =
F.getContext();
2997 SPIRV::parseBuiltinTypeStr(TypeStrs, DemangledName, Ctx);
2998 if (!TypeStrs.
size())
3001 for (
unsigned Idx : Idxs) {
3002 if (Idx >= TypeStrs.
size())
3005 SPIRV::parseBuiltinCallArgumentType(TypeStrs[Idx].trim(), Ctx))
3008 FDeclPtrTys[&
F].push_back(std::make_pair(Idx, ElemTy));
3013bool SPIRVEmitIntrinsics::runOnModule(
Module &M) {
3016 parseFunDeclarations(M);
3017 insertConstantsForFPFastMathDefault(M);
3027 if (!
F.isDeclaration() && !
F.isIntrinsic()) {
3029 processParamTypes(&
F,
B);
3033 CanTodoType =
false;
3034 Changed |= postprocessTypes(M);
3037 Changed |= processFunctionPointers(M);
3043 return new SPIRVEmitIntrinsics(TM);
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Expand Atomic instructions
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
Analysis containing CSE Info
static void replaceAllUsesWith(Value *Old, Value *New, SmallPtrSet< BasicBlock *, 32 > &FreshBBs, bool IsHuge)
Replace all old uses with new ones, and push the updated BBs into FreshBBs.
This file defines the DenseSet and SmallDenseSet classes.
static bool runOnFunction(Function &F, bool PostInlining)
iv Induction Variable Users
const AbstractManglingParser< Derived, Alloc >::OperatorInfo AbstractManglingParser< Derived, Alloc >::Ops[]
Machine Check Debug Module
MachineInstr unsigned OpIdx
uint64_t IntrinsicInst * II
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
static unsigned getNumElements(Type *Ty)
static bool isMemInstrToReplace(Instruction *I)
static bool isAggrConstForceInt32(const Value *V)
static SPIRV::FPFastMathDefaultInfoVector & getOrCreateFPFastMathDefaultInfoVec(const Module &M, DenseMap< Function *, SPIRV::FPFastMathDefaultInfoVector > &FPFastMathDefaultInfoMap, Function *F)
static Type * getAtomicElemTy(SPIRVGlobalRegistry *GR, Instruction *I, Value *PointerOperand)
static void reportFatalOnTokenType(const Instruction *I)
static void setInsertPointAfterDef(IRBuilder<> &B, Instruction *I)
static void emitAssignName(Instruction *I, IRBuilder<> &B)
static Type * getPointeeTypeByCallInst(StringRef DemangledName, Function *CalledF, unsigned OpIdx)
static void createRoundingModeDecoration(Instruction *I, unsigned RoundingModeDeco, IRBuilder<> &B)
static void createDecorationIntrinsic(Instruction *I, MDNode *Node, IRBuilder<> &B)
static SPIRV::FPFastMathDefaultInfo & getFPFastMathDefaultInfo(SPIRV::FPFastMathDefaultInfoVector &FPFastMathDefaultInfoVec, const Type *Ty)
static bool IsKernelArgInt8(Function *F, StoreInst *SI)
static void addSaturatedDecorationToIntrinsic(Instruction *I, IRBuilder<> &B)
static void setInsertPointSkippingPhis(IRBuilder<> &B, Instruction *I)
static FunctionType * getFunctionPointerElemType(Function *F, SPIRVGlobalRegistry *GR)
static void createSaturatedConversionDecoration(Instruction *I, IRBuilder<> &B)
static Type * restoreMutatedType(SPIRVGlobalRegistry *GR, Instruction *I, Type *Ty)
static bool requireAssignType(Instruction *I)
void visit(MachineFunction &MF, MachineBasicBlock &Start, std::function< void(MachineBasicBlock *)> op)
static void insertSpirvDecorations(MachineFunction &MF, SPIRVGlobalRegistry *GR, MachineIRBuilder MIB)
#define SPIRV_BACKEND_SERVICE_FUN_NAME
StringSet - A set-like wrapper for the StringMap.
static SymbolRef::Type getType(const Symbol *Sym)
static int Lookup(ArrayRef< TableEntry > Table, unsigned Opcode)
This class represents an incoming formal argument to a Function.
const Function * getParent() const
static unsigned getPointerOperandIndex()
static unsigned getPointerOperandIndex()
iterator_range< const_phi_iterator > phis() const
Returns a range that iterates over the phis in the basic block.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
LLVM_ABI LLVMContext & getContext() const
Get the context in which this basic block lives.
static LLVM_ABI BlockAddress * get(Function *F, BasicBlock *BB)
Return a BlockAddress for the specified function and basic block.
bool isInlineAsm() const
Check if this call is an inline asm statement.
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
LLVM_ABI bool isIndirectCall() const
Return true if the callsite is an indirect call.
Value * getCalledOperand() const
Value * getArgOperand(unsigned i) const
FunctionType * getFunctionType() const
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
unsigned arg_size() const
This class represents a function call, abstracting a target machine's calling convention.
uint64_t getZExtValue() const
Return the constant as a 64-bit unsigned integer value after it has been zero extended as appropriate...
static LLVM_ABI Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
LLVM_ABI std::optional< RoundingMode > getRoundingMode() const
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
static LLVM_ABI FunctionType * get(Type *Result, ArrayRef< Type * > Params, bool isVarArg)
This static method is the primary way of constructing a FunctionType.
void addFnAttr(Attribute::AttrKind Kind)
Add function attributes to this function.
static Function * Create(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
const DataLayout & getDataLayout() const
Get the data layout of the module this function belongs to.
Intrinsic::ID getIntrinsicID() const LLVM_READONLY
getIntrinsicID - This method returns the ID number of the specified function, or Intrinsic::not_intri...
bool isIntrinsic() const
isIntrinsic - Returns true if the function's name starts with "llvm.".
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
Type * getReturnType() const
Returns the type of the ret val.
Argument * getArg(unsigned i) const
static LLVM_ABI Type * getTypeAtIndex(Type *Ty, Value *Idx)
Return the type of the element at the given index of an indexable type.
static unsigned getPointerOperandIndex()
PointerType * getType() const
Global values are always pointers.
@ PrivateLinkage
Like Internal, but omit from symbol table.
@ InternalLinkage
Rename collisions when linking (static functions).
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
LLVM_ABI void addDestination(BasicBlock *Dest)
Add a destination.
Base class for instruction visitors.
LLVM_ABI InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
Instruction * user_back()
Specialize the methods defined in Value, as we know that an instruction can only be used by other ins...
LLVM_ABI const Function * getFunction() const
Return the function this instruction belongs to.
LLVM_ABI void copyMetadata(const Instruction &SrcInst, ArrayRef< unsigned > WL=ArrayRef< unsigned >())
Copy metadata from SrcInst to this instruction.
This is an important class for using LLVM in a threaded context.
static unsigned getPointerOperandIndex()
const MDOperand & getOperand(unsigned I) const
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
unsigned getNumOperands() const
Return number of MDNode operands.
static LLVM_ABI MDString * get(LLVMContext &Context, StringRef Str)
Flags
Flags values. These may be or'd together.
ModulePass class - This class is used to implement unstructured interprocedural optimizations and ana...
A Module instance is used to store all the information related to an LLVM module.
void addAssignPtrTypeInstr(Value *Val, CallInst *AssignPtrTyCI)
void buildAssignPtr(IRBuilder<> &B, Type *ElemTy, Value *Arg)
Type * findDeducedCompositeType(const Value *Val)
void replaceAllUsesWith(Value *Old, Value *New, bool DeleteOld=true)
void addDeducedElementType(Value *Val, Type *Ty)
void addReturnType(const Function *ArgF, TypedPointerType *DerivedTy)
Type * findMutated(const Value *Val)
void addDeducedCompositeType(Value *Val, Type *Ty)
void buildAssignType(IRBuilder<> &B, Type *Ty, Value *Arg)
Type * findDeducedElementType(const Value *Val)
void updateAssignType(CallInst *AssignCI, Value *Arg, Value *OfType)
CallInst * findAssignPtrTypeInstr(const Value *Val)
const SPIRVTargetLowering * getTargetLowering() const override
bool isLogicalSPIRV() const
bool canUseExtension(SPIRV::Extension::Extension E) const
const SPIRVSubtarget * getSubtargetImpl() const
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
bool contains(ConstPtrType Ptr) const
reference emplace_back(ArgTypes &&... Args)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
An instruction for storing to memory.
static unsigned getPointerOperandIndex()
iterator find(StringRef Key)
StringRef - Represent a constant reference to a string, i.e.
bool starts_with(StringRef Prefix) const
Check if this string starts with the given Prefix.
bool contains(StringRef key) const
Check if the set contains the given key.
static LLVM_ABI StructType * create(LLVMContext &Context, StringRef Name)
This creates an identified struct.
static LLVM_ABI TargetExtType * get(LLVMContext &Context, StringRef Name, ArrayRef< Type * > Types={}, ArrayRef< unsigned > Ints={})
Return a target extension type having the specified name and optional type and integer parameters.
Type * getTypeParameter(unsigned i) const
const STC & getSubtarget(const Function &F) const
This method returns a pointer to the specified type of TargetSubtargetInfo.
The instances of the Type class are immutable: once they are created, they are never changed.
bool isVectorTy() const
True if this is an instance of VectorType.
bool isArrayTy() const
True if this is an instance of ArrayType.
static LLVM_ABI IntegerType * getInt32Ty(LLVMContext &C)
bool isPointerTy() const
True if this is an instance of PointerType.
Type * getArrayElementType() const
LLVM_ABI StringRef getTargetExtName() const
static LLVM_ABI IntegerType * getInt8Ty(LLVMContext &C)
bool isStructTy() const
True if this is an instance of StructType.
bool isTargetExtTy() const
Return true if this is a target extension type.
bool isAggregateType() const
Return true if the type is an aggregate type.
static LLVM_ABI Type * getDoubleTy(LLVMContext &C)
static LLVM_ABI Type * getFloatTy(LLVMContext &C)
static LLVM_ABI Type * getHalfTy(LLVMContext &C)
bool isVoidTy() const
Return true if this is 'void'.
static LLVM_ABI bool isValidElementType(Type *ElemTy)
Return true if the specified type is valid as a element type.
static LLVM_ABI TypedPointerType * get(Type *ElementType, unsigned AddressSpace)
This constructs a pointer to an object of the specified type in a numbered address space.
static LLVM_ABI UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
LLVM_ABI bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
void setOperand(unsigned i, Value *Val)
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
LLVM_ABI void setName(const Twine &Name)
Change the name of the value.
iterator_range< user_iterator > users()
LLVM_ABI StringRef getName() const
Return a constant reference to the value's name.
const ParentTy * getParent() const
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
@ SPIR_KERNEL
Used for SPIR kernel functions.
@ BasicBlock
Various leaf nodes.
bool match(Val *V, const Pattern &P)
is_zero m_Zero()
Match any null constant or a vector with all elements equal to 0.
DenseSetImpl< ValueT, DenseMap< ValueT, DenseSetEmpty, ValueInfoT, DenseSetPair< ValueT > >, ValueInfoT > DenseSet
ElementType
The element type of an SRV or UAV resource.
@ User
could "use" a pointer
NodeAddr< PhiNode * > Phi
NodeAddr< NodeBase * > Node
NodeAddr< FuncNode * > Func
friend class Instruction
Iterator for Instructions in a `BasicBlock.
This is an optimization pass for GlobalISel generic memory operations.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
bool getVacantFunctionName(Module &M, std::string &Name)
FunctionAddr VTableAddr Value
bool isTypedPointerWrapper(const TargetExtType *ExtTy)
ModulePass * createSPIRVEmitIntrinsicsPass(SPIRVTargetMachine *TM)
unsigned getPointerAddressSpace(const Type *T)
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
FunctionAddr VTableAddr uintptr_t uintptr_t Int32Ty
CallInst * buildIntrWithMD(Intrinsic::ID IntrID, ArrayRef< Type * > Types, Value *Arg, Value *Arg2, ArrayRef< Constant * > Imms, IRBuilder<> &B)
void append_range(Container &C, Range &&R)
Wrapper function to append range R to container C.
bool isNestedPointer(const Type *Ty)
MetadataAsValue * buildMD(Value *Arg)
std::string getOclOrSpirvBuiltinDemangledName(StringRef Name)
auto reverse(ContainerTy &&C)
Type * getTypedPointerWrapper(Type *ElemTy, unsigned AS)
bool isPointerTy(const Type *T)
LLVM_ABI void report_fatal_error(Error Err, bool gen_crash_diag=true)
class LLVM_GSL_OWNER SmallVector
Forward declaration of SmallVector so that calculateSmallVectorDefaultInlinedElements can reference s...
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
SPIRV::Scope::Scope getMemScope(LLVMContext &Ctx, SyncScope::ID Id)
@ Ref
The access may reference the value stored in memory.
IRBuilder(LLVMContext &, FolderTy, InserterTy, MDNode *, ArrayRef< OperandBundleDef >) -> IRBuilder< FolderTy, InserterTy >
DWARFExpression::Operation Op
Type * getPointeeTypeByAttr(Argument *Arg)
bool hasPointeeTypeAttr(Argument *Arg)
constexpr unsigned BitWidth
bool isEquivalentTypes(Type *Ty1, Type *Ty2)
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
iterator_range< pointer_iterator< WrappedIteratorT > > make_pointer_range(RangeT &&Range)
bool hasInitializer(const GlobalVariable *GV)
Type * normalizeType(Type *Ty)
bool isSpvIntrinsic(const MachineInstr &MI, Intrinsic::ID IntrinsicID)
Type * getPointeeType(const Type *Ty)
PoisonValue * getNormalizedPoisonValue(Type *Ty)
bool isUntypedPointerTy(const Type *T)
SPIRV::MemorySemantics::MemorySemantics getMemSemantics(AtomicOrdering Ord)
static size_t computeFPFastMathDefaultInfoVecIndex(size_t BitWidth)