LLVM 19.0.0git
SPIRVEmitIntrinsics.cpp
Go to the documentation of this file.
1//===-- SPIRVEmitIntrinsics.cpp - emit SPIRV intrinsics ---------*- C++ -*-===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// The pass emits SPIRV intrinsics keeping essential high-level information for
10// the translation of LLVM IR to SPIR-V.
11//
12//===----------------------------------------------------------------------===//
13
14#include "SPIRV.h"
15#include "SPIRVBuiltins.h"
16#include "SPIRVMetadata.h"
17#include "SPIRVSubtarget.h"
18#include "SPIRVTargetMachine.h"
19#include "SPIRVUtils.h"
20#include "llvm/IR/IRBuilder.h"
22#include "llvm/IR/InstVisitor.h"
23#include "llvm/IR/IntrinsicsSPIRV.h"
25
26#include <queue>
27
28// This pass performs the following transformation on LLVM IR level required
29// for the following translation to SPIR-V:
30// - replaces direct usages of aggregate constants with target-specific
31// intrinsics;
32// - replaces aggregates-related instructions (extract/insert, ld/st, etc)
33// with a target-specific intrinsics;
34// - emits intrinsics for the global variable initializers since IRTranslator
35// doesn't handle them and it's not very convenient to translate them
36// ourselves;
37// - emits intrinsics to keep track of the string names assigned to the values;
38// - emits intrinsics to keep track of constants (this is necessary to have an
39// LLVM IR constant after the IRTranslation is completed) for their further
40// deduplication;
41// - emits intrinsics to keep track of original LLVM types of the values
42// to be able to emit proper SPIR-V types eventually.
43//
44// TODO: consider removing spv.track.constant in favor of spv.assign.type.
45
46using namespace llvm;
47
48namespace llvm {
50} // namespace llvm
51
52namespace {
53class SPIRVEmitIntrinsics
54 : public ModulePass,
55 public InstVisitor<SPIRVEmitIntrinsics, Instruction *> {
56 SPIRVTargetMachine *TM = nullptr;
57 SPIRVGlobalRegistry *GR = nullptr;
58 Function *F = nullptr;
59 bool TrackConstants = true;
62 DenseSet<Instruction *> AggrStores;
63
64 // a registry of created Intrinsic::spv_assign_ptr_type instructions
65 DenseMap<Value *, CallInst *> AssignPtrTypeInstr;
66
67 // deduce element type of untyped pointers
68 Type *deduceElementType(Value *I);
69 Type *deduceElementTypeHelper(Value *I);
70 Type *deduceElementTypeHelper(Value *I, std::unordered_set<Value *> &Visited);
71 Type *deduceElementTypeByValueDeep(Type *ValueTy, Value *Operand,
72 std::unordered_set<Value *> &Visited);
73 Type *deduceElementTypeByUsersDeep(Value *Op,
74 std::unordered_set<Value *> &Visited);
75
76 // deduce nested types of composites
77 Type *deduceNestedTypeHelper(User *U);
78 Type *deduceNestedTypeHelper(User *U, Type *Ty,
79 std::unordered_set<Value *> &Visited);
80
81 // deduce Types of operands of the Instruction if possible
82 void deduceOperandElementType(Instruction *I);
83
84 void preprocessCompositeConstants(IRBuilder<> &B);
85 void preprocessUndefs(IRBuilder<> &B);
86
87 CallInst *buildIntrWithMD(Intrinsic::ID IntrID, ArrayRef<Type *> Types,
88 Value *Arg, Value *Arg2, ArrayRef<Constant *> Imms,
89 IRBuilder<> &B) {
91 MDTuple *TyMD = MDNode::get(F->getContext(), CM);
92 MetadataAsValue *VMD = MetadataAsValue::get(F->getContext(), TyMD);
94 Args.push_back(Arg2);
95 Args.push_back(VMD);
96 for (auto *Imm : Imms)
97 Args.push_back(Imm);
98 return B.CreateIntrinsic(IntrID, {Types}, Args);
99 }
100
101 void buildAssignPtr(IRBuilder<> &B, Type *ElemTy, Value *Arg);
102
103 void replaceMemInstrUses(Instruction *Old, Instruction *New, IRBuilder<> &B);
104 void processInstrAfterVisit(Instruction *I, IRBuilder<> &B);
105 void insertAssignPtrTypeIntrs(Instruction *I, IRBuilder<> &B);
106 void insertAssignTypeIntrs(Instruction *I, IRBuilder<> &B);
107 void insertAssignTypeInstrForTargetExtTypes(TargetExtType *AssignedType,
108 Value *V, IRBuilder<> &B);
109 void replacePointerOperandWithPtrCast(Instruction *I, Value *Pointer,
110 Type *ExpectedElementType,
111 unsigned OperandToReplace,
112 IRBuilder<> &B);
113 void insertPtrCastOrAssignTypeInstr(Instruction *I, IRBuilder<> &B);
115 void processGlobalValue(GlobalVariable &GV, IRBuilder<> &B);
116 void processParamTypes(Function *F, IRBuilder<> &B);
117 void processParamTypesByFunHeader(Function *F, IRBuilder<> &B);
118 Type *deduceFunParamElementType(Function *F, unsigned OpIdx);
119 Type *deduceFunParamElementType(Function *F, unsigned OpIdx,
120 std::unordered_set<Function *> &FVisited);
121
122public:
123 static char ID;
124 SPIRVEmitIntrinsics() : ModulePass(ID) {
126 }
127 SPIRVEmitIntrinsics(SPIRVTargetMachine *_TM) : ModulePass(ID), TM(_TM) {
129 }
143
144 StringRef getPassName() const override { return "SPIRV emit intrinsics"; }
145
146 bool runOnModule(Module &M) override;
147 bool runOnFunction(Function &F);
148
149 void getAnalysisUsage(AnalysisUsage &AU) const override {
151 }
152};
153} // namespace
154
155char SPIRVEmitIntrinsics::ID = 0;
156
157INITIALIZE_PASS(SPIRVEmitIntrinsics, "emit-intrinsics", "SPIRV emit intrinsics",
158 false, false)
159
160static inline bool isAssignTypeInstr(const Instruction *I) {
161 return isa<IntrinsicInst>(I) &&
162 cast<IntrinsicInst>(I)->getIntrinsicID() == Intrinsic::spv_assign_type;
163}
164
166 return isa<StoreInst>(I) || isa<LoadInst>(I) || isa<InsertValueInst>(I) ||
167 isa<ExtractValueInst>(I) || isa<AtomicCmpXchgInst>(I);
168}
169
170static bool isAggrConstForceInt32(const Value *V) {
171 return isa<ConstantArray>(V) || isa<ConstantStruct>(V) ||
172 isa<ConstantDataArray>(V) ||
173 (isa<ConstantAggregateZero>(V) && !V->getType()->isVectorTy());
174}
175
177 if (isa<PHINode>(I))
178 B.SetInsertPoint(I->getParent()->getFirstNonPHIOrDbgOrAlloca());
179 else
180 B.SetInsertPoint(I);
181}
182
184 IntrinsicInst *Intr = dyn_cast<IntrinsicInst>(I);
185 if (Intr) {
186 switch (Intr->getIntrinsicID()) {
187 case Intrinsic::invariant_start:
188 case Intrinsic::invariant_end:
189 return false;
190 }
191 }
192 return true;
193}
194
195static inline void reportFatalOnTokenType(const Instruction *I) {
196 if (I->getType()->isTokenTy())
197 report_fatal_error("A token is encountered but SPIR-V without extensions "
198 "does not support token type",
199 false);
200}
201
202void SPIRVEmitIntrinsics::buildAssignPtr(IRBuilder<> &B, Type *ElemTy,
203 Value *Arg) {
204 CallInst *AssignPtrTyCI =
205 buildIntrWithMD(Intrinsic::spv_assign_ptr_type, {Arg->getType()},
206 Constant::getNullValue(ElemTy), Arg,
207 {B.getInt32(getPointerAddressSpace(Arg->getType()))}, B);
208 GR->addDeducedElementType(AssignPtrTyCI, ElemTy);
209 GR->addDeducedElementType(Arg, ElemTy);
210 AssignPtrTypeInstr[Arg] = AssignPtrTyCI;
211}
212
213// Set element pointer type to the given value of ValueTy and tries to
214// specify this type further (recursively) by Operand value, if needed.
215Type *SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
216 Type *ValueTy, Value *Operand, std::unordered_set<Value *> &Visited) {
217 Type *Ty = ValueTy;
218 if (Operand) {
219 if (auto *PtrTy = dyn_cast<PointerType>(Ty)) {
220 if (Type *NestedTy = deduceElementTypeHelper(Operand, Visited))
221 Ty = TypedPointerType::get(NestedTy, PtrTy->getAddressSpace());
222 } else {
223 Ty = deduceNestedTypeHelper(dyn_cast<User>(Operand), Ty, Visited);
224 }
225 }
226 return Ty;
227}
228
229// Traverse User instructions to deduce an element pointer type of the operand.
230Type *SPIRVEmitIntrinsics::deduceElementTypeByUsersDeep(
231 Value *Op, std::unordered_set<Value *> &Visited) {
232 if (!Op || !isPointerTy(Op->getType()))
233 return nullptr;
234
235 if (auto PType = dyn_cast<TypedPointerType>(Op->getType()))
236 return PType->getElementType();
237
238 // maybe we already know operand's element type
239 if (Type *KnownTy = GR->findDeducedElementType(Op))
240 return KnownTy;
241
242 for (User *OpU : Op->users()) {
243 if (Instruction *Inst = dyn_cast<Instruction>(OpU)) {
244 if (Type *Ty = deduceElementTypeHelper(Inst, Visited))
245 return Ty;
246 }
247 }
248 return nullptr;
249}
250
251// Implements what we know in advance about intrinsics and builtin calls
252// TODO: consider feasibility of this particular case to be generalized by
253// encoding knowledge about intrinsics and builtin calls by corresponding
254// specification rules
256 Function *CalledF, unsigned OpIdx) {
257 if ((DemangledName.starts_with("__spirv_ocl_printf(") ||
258 DemangledName.starts_with("printf(")) &&
259 OpIdx == 0)
260 return IntegerType::getInt8Ty(CalledF->getContext());
261 return nullptr;
262}
263
264// Deduce and return a successfully deduced Type of the Instruction,
265// or nullptr otherwise.
266Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(Value *I) {
267 std::unordered_set<Value *> Visited;
268 return deduceElementTypeHelper(I, Visited);
269}
270
271Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
272 Value *I, std::unordered_set<Value *> &Visited) {
273 // allow to pass nullptr as an argument
274 if (!I)
275 return nullptr;
276
277 // maybe already known
278 if (Type *KnownTy = GR->findDeducedElementType(I))
279 return KnownTy;
280
281 // maybe a cycle
282 if (Visited.find(I) != Visited.end())
283 return nullptr;
284 Visited.insert(I);
285
286 // fallback value in case when we fail to deduce a type
287 Type *Ty = nullptr;
288 // look for known basic patterns of type inference
289 if (auto *Ref = dyn_cast<AllocaInst>(I)) {
290 Ty = Ref->getAllocatedType();
291 } else if (auto *Ref = dyn_cast<GetElementPtrInst>(I)) {
292 Ty = Ref->getResultElementType();
293 } else if (auto *Ref = dyn_cast<GlobalValue>(I)) {
294 Ty = deduceElementTypeByValueDeep(
295 Ref->getValueType(),
296 Ref->getNumOperands() > 0 ? Ref->getOperand(0) : nullptr, Visited);
297 } else if (auto *Ref = dyn_cast<AddrSpaceCastInst>(I)) {
298 Ty = deduceElementTypeHelper(Ref->getPointerOperand(), Visited);
299 } else if (auto *Ref = dyn_cast<BitCastInst>(I)) {
300 if (Type *Src = Ref->getSrcTy(), *Dest = Ref->getDestTy();
301 isPointerTy(Src) && isPointerTy(Dest))
302 Ty = deduceElementTypeHelper(Ref->getOperand(0), Visited);
303 } else if (auto *Ref = dyn_cast<AtomicCmpXchgInst>(I)) {
304 Value *Op = Ref->getNewValOperand();
305 Ty = deduceElementTypeByValueDeep(Op->getType(), Op, Visited);
306 } else if (auto *Ref = dyn_cast<AtomicRMWInst>(I)) {
307 Value *Op = Ref->getValOperand();
308 Ty = deduceElementTypeByValueDeep(Op->getType(), Op, Visited);
309 } else if (auto *Ref = dyn_cast<PHINode>(I)) {
310 for (unsigned i = 0; i < Ref->getNumIncomingValues(); i++) {
311 Ty = deduceElementTypeByUsersDeep(Ref->getIncomingValue(i), Visited);
312 if (Ty)
313 break;
314 }
315 } else if (auto *Ref = dyn_cast<SelectInst>(I)) {
316 for (Value *Op : {Ref->getTrueValue(), Ref->getFalseValue()}) {
317 Ty = deduceElementTypeByUsersDeep(Op, Visited);
318 if (Ty)
319 break;
320 }
321 }
322
323 // remember the found relationship
324 if (Ty) {
325 // specify nested types if needed, otherwise return unchanged
326 GR->addDeducedElementType(I, Ty);
327 }
328
329 return Ty;
330}
331
332// Re-create a type of the value if it has untyped pointer fields, also nested.
333// Return the original value type if no corrections of untyped pointer
334// information is found or needed.
335Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(User *U) {
336 std::unordered_set<Value *> Visited;
337 return deduceNestedTypeHelper(U, U->getType(), Visited);
338}
339
340Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(
341 User *U, Type *OrigTy, std::unordered_set<Value *> &Visited) {
342 if (!U)
343 return OrigTy;
344
345 // maybe already known
346 if (Type *KnownTy = GR->findDeducedCompositeType(U))
347 return KnownTy;
348
349 // maybe a cycle
350 if (Visited.find(U) != Visited.end())
351 return OrigTy;
352 Visited.insert(U);
353
354 if (dyn_cast<StructType>(OrigTy)) {
356 bool Change = false;
357 for (unsigned i = 0; i < U->getNumOperands(); ++i) {
358 Value *Op = U->getOperand(i);
359 Type *OpTy = Op->getType();
360 Type *Ty = OpTy;
361 if (Op) {
362 if (auto *PtrTy = dyn_cast<PointerType>(OpTy)) {
363 if (Type *NestedTy = deduceElementTypeHelper(Op, Visited))
364 Ty = TypedPointerType::get(NestedTy, PtrTy->getAddressSpace());
365 } else {
366 Ty = deduceNestedTypeHelper(dyn_cast<User>(Op), OpTy, Visited);
367 }
368 }
369 Tys.push_back(Ty);
370 Change |= Ty != OpTy;
371 }
372 if (Change) {
373 Type *NewTy = StructType::create(Tys);
374 GR->addDeducedCompositeType(U, NewTy);
375 return NewTy;
376 }
377 } else if (auto *ArrTy = dyn_cast<ArrayType>(OrigTy)) {
378 if (Value *Op = U->getNumOperands() > 0 ? U->getOperand(0) : nullptr) {
379 Type *OpTy = ArrTy->getElementType();
380 Type *Ty = OpTy;
381 if (auto *PtrTy = dyn_cast<PointerType>(OpTy)) {
382 if (Type *NestedTy = deduceElementTypeHelper(Op, Visited))
383 Ty = TypedPointerType::get(NestedTy, PtrTy->getAddressSpace());
384 } else {
385 Ty = deduceNestedTypeHelper(dyn_cast<User>(Op), OpTy, Visited);
386 }
387 if (Ty != OpTy) {
388 Type *NewTy = ArrayType::get(Ty, ArrTy->getNumElements());
389 GR->addDeducedCompositeType(U, NewTy);
390 return NewTy;
391 }
392 }
393 } else if (auto *VecTy = dyn_cast<VectorType>(OrigTy)) {
394 if (Value *Op = U->getNumOperands() > 0 ? U->getOperand(0) : nullptr) {
395 Type *OpTy = VecTy->getElementType();
396 Type *Ty = OpTy;
397 if (auto *PtrTy = dyn_cast<PointerType>(OpTy)) {
398 if (Type *NestedTy = deduceElementTypeHelper(Op, Visited))
399 Ty = TypedPointerType::get(NestedTy, PtrTy->getAddressSpace());
400 } else {
401 Ty = deduceNestedTypeHelper(dyn_cast<User>(Op), OpTy, Visited);
402 }
403 if (Ty != OpTy) {
404 Type *NewTy = VectorType::get(Ty, VecTy->getElementCount());
405 GR->addDeducedCompositeType(U, NewTy);
406 return NewTy;
407 }
408 }
409 }
410
411 return OrigTy;
412}
413
414Type *SPIRVEmitIntrinsics::deduceElementType(Value *I) {
415 if (Type *Ty = deduceElementTypeHelper(I))
416 return Ty;
417 return IntegerType::getInt8Ty(I->getContext());
418}
419
420// If the Instruction has Pointer operands with unresolved types, this function
421// tries to deduce them. If the Instruction has Pointer operands with known
422// types which differ from expected, this function tries to insert a bitcast to
423// resolve the issue.
424void SPIRVEmitIntrinsics::deduceOperandElementType(Instruction *I) {
426 Type *KnownElemTy = nullptr;
427 // look for known basic patterns of type inference
428 if (auto *Ref = dyn_cast<PHINode>(I)) {
429 if (!isPointerTy(I->getType()) ||
430 !(KnownElemTy = GR->findDeducedElementType(I)))
431 return;
432 for (unsigned i = 0; i < Ref->getNumIncomingValues(); i++) {
433 Value *Op = Ref->getIncomingValue(i);
434 if (isPointerTy(Op->getType()))
435 Ops.push_back(std::make_pair(Op, i));
436 }
437 } else if (auto *Ref = dyn_cast<SelectInst>(I)) {
438 if (!isPointerTy(I->getType()) ||
439 !(KnownElemTy = GR->findDeducedElementType(I)))
440 return;
441 for (unsigned i = 0; i < Ref->getNumOperands(); i++) {
442 Value *Op = Ref->getOperand(i);
443 if (isPointerTy(Op->getType()))
444 Ops.push_back(std::make_pair(Op, i));
445 }
446 } else if (auto *Ref = dyn_cast<ReturnInst>(I)) {
447 Type *RetTy = F->getReturnType();
448 if (!isPointerTy(RetTy))
449 return;
450 Value *Op = Ref->getReturnValue();
451 if (!Op)
452 return;
453 if (!(KnownElemTy = GR->findDeducedElementType(F))) {
454 if (Type *OpElemTy = GR->findDeducedElementType(Op)) {
455 GR->addDeducedElementType(F, OpElemTy);
456 TypedPointerType *DerivedTy =
458 GR->addReturnType(F, DerivedTy);
459 }
460 return;
461 }
462 Ops.push_back(std::make_pair(Op, 0));
463 } else if (auto *Ref = dyn_cast<ICmpInst>(I)) {
464 if (!isPointerTy(Ref->getOperand(0)->getType()))
465 return;
466 Value *Op0 = Ref->getOperand(0);
467 Value *Op1 = Ref->getOperand(1);
468 Type *ElemTy0 = GR->findDeducedElementType(Op0);
469 Type *ElemTy1 = GR->findDeducedElementType(Op1);
470 if (ElemTy0) {
471 KnownElemTy = ElemTy0;
472 Ops.push_back(std::make_pair(Op1, 1));
473 } else if (ElemTy1) {
474 KnownElemTy = ElemTy1;
475 Ops.push_back(std::make_pair(Op0, 0));
476 }
477 }
478
479 // There is no enough info to deduce types or all is valid.
480 if (!KnownElemTy || Ops.size() == 0)
481 return;
482
483 LLVMContext &Ctx = F->getContext();
484 IRBuilder<> B(Ctx);
485 for (auto &OpIt : Ops) {
486 Value *Op = OpIt.first;
487 if (Op->use_empty())
488 continue;
489 Type *Ty = GR->findDeducedElementType(Op);
490 if (Ty == KnownElemTy)
491 continue;
492 if (Instruction *User = dyn_cast<Instruction>(Op->use_begin()->get()))
493 setInsertPointSkippingPhis(B, User->getNextNode());
494 else
496 Value *OpTyVal = Constant::getNullValue(KnownElemTy);
497 Type *OpTy = Op->getType();
498 if (!Ty) {
499 GR->addDeducedElementType(Op, KnownElemTy);
500 // check if there is existing Intrinsic::spv_assign_ptr_type instruction
501 auto It = AssignPtrTypeInstr.find(Op);
502 if (It == AssignPtrTypeInstr.end()) {
503 CallInst *CI =
504 buildIntrWithMD(Intrinsic::spv_assign_ptr_type, {OpTy}, OpTyVal, Op,
505 {B.getInt32(getPointerAddressSpace(OpTy))}, B);
506 AssignPtrTypeInstr[Op] = CI;
507 } else {
508 It->second->setArgOperand(
509 1,
511 Ctx, MDNode::get(Ctx, ValueAsMetadata::getConstant(OpTyVal))));
512 }
513 } else {
514 SmallVector<Type *, 2> Types = {OpTy, OpTy};
516 Ctx, MDNode::get(Ctx, ValueAsMetadata::getConstant(OpTyVal)));
518 B.getInt32(getPointerAddressSpace(OpTy))};
519 CallInst *PtrCastI =
520 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {Types}, Args);
521 I->setOperand(OpIt.second, PtrCastI);
522 }
523 }
524}
525
526void SPIRVEmitIntrinsics::replaceMemInstrUses(Instruction *Old,
527 Instruction *New,
528 IRBuilder<> &B) {
529 while (!Old->user_empty()) {
530 auto *U = Old->user_back();
531 if (isAssignTypeInstr(U)) {
532 B.SetInsertPoint(U);
533 SmallVector<Value *, 2> Args = {New, U->getOperand(1)};
534 B.CreateIntrinsic(Intrinsic::spv_assign_type, {New->getType()}, Args);
535 U->eraseFromParent();
536 } else if (isMemInstrToReplace(U) || isa<ReturnInst>(U) ||
537 isa<CallInst>(U)) {
538 U->replaceUsesOfWith(Old, New);
539 } else {
540 llvm_unreachable("illegal aggregate intrinsic user");
541 }
542 }
543 Old->eraseFromParent();
544}
545
546void SPIRVEmitIntrinsics::preprocessUndefs(IRBuilder<> &B) {
547 std::queue<Instruction *> Worklist;
548 for (auto &I : instructions(F))
549 Worklist.push(&I);
550
551 while (!Worklist.empty()) {
552 Instruction *I = Worklist.front();
553 Worklist.pop();
554
555 for (auto &Op : I->operands()) {
556 auto *AggrUndef = dyn_cast<UndefValue>(Op);
557 if (!AggrUndef || !Op->getType()->isAggregateType())
558 continue;
559
560 B.SetInsertPoint(I);
561 auto *IntrUndef = B.CreateIntrinsic(Intrinsic::spv_undef, {}, {});
562 Worklist.push(IntrUndef);
563 I->replaceUsesOfWith(Op, IntrUndef);
564 AggrConsts[IntrUndef] = AggrUndef;
565 AggrConstTypes[IntrUndef] = AggrUndef->getType();
566 }
567 }
568}
569
570void SPIRVEmitIntrinsics::preprocessCompositeConstants(IRBuilder<> &B) {
571 std::queue<Instruction *> Worklist;
572 for (auto &I : instructions(F))
573 Worklist.push(&I);
574
575 while (!Worklist.empty()) {
576 auto *I = Worklist.front();
577 assert(I);
578 bool KeepInst = false;
579 for (const auto &Op : I->operands()) {
580 Constant *AggrConst = nullptr;
581 Type *ResTy = nullptr;
582 if (auto *COp = dyn_cast<ConstantVector>(Op)) {
583 AggrConst = cast<Constant>(COp);
584 ResTy = COp->getType();
585 } else if (auto *COp = dyn_cast<ConstantArray>(Op)) {
586 AggrConst = cast<Constant>(COp);
587 ResTy = B.getInt32Ty();
588 } else if (auto *COp = dyn_cast<ConstantStruct>(Op)) {
589 AggrConst = cast<Constant>(COp);
590 ResTy = B.getInt32Ty();
591 } else if (auto *COp = dyn_cast<ConstantDataArray>(Op)) {
592 AggrConst = cast<Constant>(COp);
593 ResTy = B.getInt32Ty();
594 } else if (auto *COp = dyn_cast<ConstantAggregateZero>(Op)) {
595 if (!Op->getType()->isVectorTy()) {
596 AggrConst = cast<Constant>(COp);
597 ResTy = B.getInt32Ty();
598 }
599 }
600 if (AggrConst) {
602 if (auto *COp = dyn_cast<ConstantDataSequential>(Op))
603 for (unsigned i = 0; i < COp->getNumElements(); ++i)
604 Args.push_back(COp->getElementAsConstant(i));
605 else
606 for (auto &COp : AggrConst->operands())
607 Args.push_back(COp);
608 B.SetInsertPoint(I);
609 auto *CI =
610 B.CreateIntrinsic(Intrinsic::spv_const_composite, {ResTy}, {Args});
611 Worklist.push(CI);
612 I->replaceUsesOfWith(Op, CI);
613 KeepInst = true;
614 AggrConsts[CI] = AggrConst;
615 AggrConstTypes[CI] = deduceNestedTypeHelper(AggrConst);
616 }
617 }
618 if (!KeepInst)
619 Worklist.pop();
620 }
621}
622
623Instruction *SPIRVEmitIntrinsics::visitSwitchInst(SwitchInst &I) {
624 BasicBlock *ParentBB = I.getParent();
625 IRBuilder<> B(ParentBB);
626 B.SetInsertPoint(&I);
629 for (auto &Op : I.operands()) {
630 if (Op.get()->getType()->isSized()) {
631 Args.push_back(Op);
632 } else if (BasicBlock *BB = dyn_cast<BasicBlock>(Op.get())) {
633 BBCases.push_back(BB);
634 Args.push_back(BlockAddress::get(BB->getParent(), BB));
635 } else {
636 report_fatal_error("Unexpected switch operand");
637 }
638 }
639 CallInst *NewI = B.CreateIntrinsic(Intrinsic::spv_switch,
640 {I.getOperand(0)->getType()}, {Args});
641 // remove switch to avoid its unneeded and undesirable unwrap into branches
642 // and conditions
643 I.replaceAllUsesWith(NewI);
644 I.eraseFromParent();
645 // insert artificial and temporary instruction to preserve valid CFG,
646 // it will be removed after IR translation pass
647 B.SetInsertPoint(ParentBB);
648 IndirectBrInst *BrI = B.CreateIndirectBr(
649 Constant::getNullValue(PointerType::getUnqual(ParentBB->getContext())),
650 BBCases.size());
651 for (BasicBlock *BBCase : BBCases)
652 BrI->addDestination(BBCase);
653 return BrI;
654}
655
656Instruction *SPIRVEmitIntrinsics::visitGetElementPtrInst(GetElementPtrInst &I) {
657 IRBuilder<> B(I.getParent());
658 B.SetInsertPoint(&I);
659 SmallVector<Type *, 2> Types = {I.getType(), I.getOperand(0)->getType()};
661 Args.push_back(B.getInt1(I.isInBounds()));
662 for (auto &Op : I.operands())
663 Args.push_back(Op);
664 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_gep, {Types}, {Args});
665 I.replaceAllUsesWith(NewI);
666 I.eraseFromParent();
667 return NewI;
668}
669
670Instruction *SPIRVEmitIntrinsics::visitBitCastInst(BitCastInst &I) {
671 IRBuilder<> B(I.getParent());
672 B.SetInsertPoint(&I);
673 Value *Source = I.getOperand(0);
674
675 // SPIR-V, contrary to LLVM 17+ IR, supports bitcasts between pointers of
676 // varying element types. In case of IR coming from older versions of LLVM
677 // such bitcasts do not provide sufficient information, should be just skipped
678 // here, and handled in insertPtrCastOrAssignTypeInstr.
679 if (isPointerTy(I.getType())) {
680 I.replaceAllUsesWith(Source);
681 I.eraseFromParent();
682 return nullptr;
683 }
684
685 SmallVector<Type *, 2> Types = {I.getType(), Source->getType()};
686 SmallVector<Value *> Args(I.op_begin(), I.op_end());
687 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_bitcast, {Types}, {Args});
688 std::string InstName = I.hasName() ? I.getName().str() : "";
689 I.replaceAllUsesWith(NewI);
690 I.eraseFromParent();
691 NewI->setName(InstName);
692 return NewI;
693}
694
695void SPIRVEmitIntrinsics::insertAssignTypeInstrForTargetExtTypes(
696 TargetExtType *AssignedType, Value *V, IRBuilder<> &B) {
697 // Do not emit spv_assign_type if the V is of the AssignedType already.
698 if (V->getType() == AssignedType)
699 return;
700
701 // Do not emit spv_assign_type if there is one already targetting V. If the
702 // found spv_assign_type assigns a type different than AssignedType, report an
703 // error. Builtin types cannot be redeclared or casted.
704 for (auto User : V->users()) {
705 auto *II = dyn_cast<IntrinsicInst>(User);
706 if (!II || II->getIntrinsicID() != Intrinsic::spv_assign_type)
707 continue;
708
709 MetadataAsValue *VMD = cast<MetadataAsValue>(II->getOperand(1));
711 dyn_cast<ConstantAsMetadata>(VMD->getMetadata())->getType();
712 if (BuiltinType != AssignedType)
713 report_fatal_error("Type mismatch " + BuiltinType->getTargetExtName() +
714 "/" + AssignedType->getTargetExtName() +
715 " for value " + V->getName(),
716 false);
717 return;
718 }
719
720 Constant *Const = UndefValue::get(AssignedType);
721 buildIntrWithMD(Intrinsic::spv_assign_type, {V->getType()}, Const, V, {}, B);
722}
723
724void SPIRVEmitIntrinsics::replacePointerOperandWithPtrCast(
725 Instruction *I, Value *Pointer, Type *ExpectedElementType,
726 unsigned OperandToReplace, IRBuilder<> &B) {
727 // If Pointer is the result of nop BitCastInst (ptr -> ptr), use the source
728 // pointer instead. The BitCastInst should be later removed when visited.
729 while (BitCastInst *BC = dyn_cast<BitCastInst>(Pointer))
730 Pointer = BC->getOperand(0);
731
732 // Do not emit spv_ptrcast if Pointer's element type is ExpectedElementType
733 Type *PointerElemTy = deduceElementTypeHelper(Pointer);
734 if (PointerElemTy == ExpectedElementType)
735 return;
736
738 Constant *ExpectedElementTypeConst =
739 Constant::getNullValue(ExpectedElementType);
741 ValueAsMetadata::getConstant(ExpectedElementTypeConst);
742 MDTuple *TyMD = MDNode::get(F->getContext(), CM);
743 MetadataAsValue *VMD = MetadataAsValue::get(F->getContext(), TyMD);
744 unsigned AddressSpace = getPointerAddressSpace(Pointer->getType());
745 bool FirstPtrCastOrAssignPtrType = true;
746
747 // Do not emit new spv_ptrcast if equivalent one already exists or when
748 // spv_assign_ptr_type already targets this pointer with the same element
749 // type.
750 for (auto User : Pointer->users()) {
751 auto *II = dyn_cast<IntrinsicInst>(User);
752 if (!II ||
753 (II->getIntrinsicID() != Intrinsic::spv_assign_ptr_type &&
754 II->getIntrinsicID() != Intrinsic::spv_ptrcast) ||
755 II->getOperand(0) != Pointer)
756 continue;
757
758 // There is some spv_ptrcast/spv_assign_ptr_type already targeting this
759 // pointer.
760 FirstPtrCastOrAssignPtrType = false;
761 if (II->getOperand(1) != VMD ||
762 dyn_cast<ConstantInt>(II->getOperand(2))->getSExtValue() !=
764 continue;
765
766 // The spv_ptrcast/spv_assign_ptr_type targeting this pointer is of the same
767 // element type and address space.
768 if (II->getIntrinsicID() != Intrinsic::spv_ptrcast)
769 return;
770
771 // This must be a spv_ptrcast, do not emit new if this one has the same BB
772 // as I. Otherwise, search for other spv_ptrcast/spv_assign_ptr_type.
773 if (II->getParent() != I->getParent())
774 continue;
775
776 I->setOperand(OperandToReplace, II);
777 return;
778 }
779
780 // // Do not emit spv_ptrcast if it would cast to the default pointer element
781 // // type (i8) of the same address space.
782 // if (ExpectedElementType->isIntegerTy(8))
783 // return;
784
785 // If this would be the first spv_ptrcast, do not emit spv_ptrcast and emit
786 // spv_assign_ptr_type instead.
787 if (FirstPtrCastOrAssignPtrType &&
788 (isa<Instruction>(Pointer) || isa<Argument>(Pointer))) {
789 CallInst *CI = buildIntrWithMD(
790 Intrinsic::spv_assign_ptr_type, {Pointer->getType()},
791 ExpectedElementTypeConst, Pointer, {B.getInt32(AddressSpace)}, B);
792 GR->addDeducedElementType(CI, ExpectedElementType);
793 GR->addDeducedElementType(Pointer, ExpectedElementType);
794 AssignPtrTypeInstr[Pointer] = CI;
795 return;
796 }
797
798 // Emit spv_ptrcast
799 SmallVector<Type *, 2> Types = {Pointer->getType(), Pointer->getType()};
801 auto *PtrCastI = B.CreateIntrinsic(Intrinsic::spv_ptrcast, {Types}, Args);
802 I->setOperand(OperandToReplace, PtrCastI);
803}
804
805void SPIRVEmitIntrinsics::insertPtrCastOrAssignTypeInstr(Instruction *I,
806 IRBuilder<> &B) {
807 // Handle basic instructions:
808 StoreInst *SI = dyn_cast<StoreInst>(I);
809 if (SI && F->getCallingConv() == CallingConv::SPIR_KERNEL &&
810 isPointerTy(SI->getValueOperand()->getType()) &&
811 isa<Argument>(SI->getValueOperand())) {
812 return replacePointerOperandWithPtrCast(
813 I, SI->getValueOperand(), IntegerType::getInt8Ty(F->getContext()), 0,
814 B);
815 } else if (SI) {
816 return replacePointerOperandWithPtrCast(
817 I, SI->getPointerOperand(), SI->getValueOperand()->getType(), 1, B);
818 } else if (LoadInst *LI = dyn_cast<LoadInst>(I)) {
819 return replacePointerOperandWithPtrCast(I, LI->getPointerOperand(),
820 LI->getType(), 0, B);
821 } else if (GetElementPtrInst *GEPI = dyn_cast<GetElementPtrInst>(I)) {
822 return replacePointerOperandWithPtrCast(I, GEPI->getPointerOperand(),
823 GEPI->getSourceElementType(), 0, B);
824 }
825
826 // Handle calls to builtins (non-intrinsics):
827 CallInst *CI = dyn_cast<CallInst>(I);
828 if (!CI || CI->isIndirectCall() || CI->isInlineAsm() ||
830 return;
831
832 // collect information about formal parameter types
833 std::string DemangledName =
835 Function *CalledF = CI->getCalledFunction();
836 SmallVector<Type *, 4> CalledArgTys;
837 bool HaveTypes = false;
838 for (unsigned OpIdx = 0; OpIdx < CalledF->arg_size(); ++OpIdx) {
839 Argument *CalledArg = CalledF->getArg(OpIdx);
840 Type *ArgType = CalledArg->getType();
841 if (!isPointerTy(ArgType)) {
842 CalledArgTys.push_back(nullptr);
843 } else if (isTypedPointerTy(ArgType)) {
844 CalledArgTys.push_back(cast<TypedPointerType>(ArgType)->getElementType());
845 HaveTypes = true;
846 } else {
847 Type *ElemTy = GR->findDeducedElementType(CalledArg);
848 if (!ElemTy && hasPointeeTypeAttr(CalledArg))
849 ElemTy = getPointeeTypeByAttr(CalledArg);
850 if (!ElemTy) {
851 ElemTy = getPointeeTypeByCallInst(DemangledName, CalledF, OpIdx);
852 if (ElemTy) {
853 GR->addDeducedElementType(CalledArg, ElemTy);
854 } else {
855 for (User *U : CalledArg->users()) {
856 if (Instruction *Inst = dyn_cast<Instruction>(U)) {
857 if ((ElemTy = deduceElementTypeHelper(Inst)) != nullptr)
858 break;
859 }
860 }
861 }
862 }
863 HaveTypes |= ElemTy != nullptr;
864 CalledArgTys.push_back(ElemTy);
865 }
866 }
867
868 if (DemangledName.empty() && !HaveTypes)
869 return;
870
871 for (unsigned OpIdx = 0; OpIdx < CI->arg_size(); OpIdx++) {
872 Value *ArgOperand = CI->getArgOperand(OpIdx);
873 if (!isa<PointerType>(ArgOperand->getType()) &&
874 !isa<TypedPointerType>(ArgOperand->getType()))
875 continue;
876
877 // Constants (nulls/undefs) are handled in insertAssignPtrTypeIntrs()
878 if (!isa<Instruction>(ArgOperand) && !isa<Argument>(ArgOperand)) {
879 // However, we may have assumptions about the formal argument's type and
880 // may have a need to insert a ptr cast for the actual parameter of this
881 // call.
882 Argument *CalledArg = CalledF->getArg(OpIdx);
883 if (!GR->findDeducedElementType(CalledArg))
884 continue;
885 }
886
887 Type *ExpectedType =
888 OpIdx < CalledArgTys.size() ? CalledArgTys[OpIdx] : nullptr;
889 if (!ExpectedType && !DemangledName.empty())
891 DemangledName, OpIdx, I->getContext());
892 if (!ExpectedType)
893 continue;
894
895 if (ExpectedType->isTargetExtTy())
896 insertAssignTypeInstrForTargetExtTypes(cast<TargetExtType>(ExpectedType),
897 ArgOperand, B);
898 else
899 replacePointerOperandWithPtrCast(CI, ArgOperand, ExpectedType, OpIdx, B);
900 }
901}
902
903Instruction *SPIRVEmitIntrinsics::visitInsertElementInst(InsertElementInst &I) {
904 SmallVector<Type *, 4> Types = {I.getType(), I.getOperand(0)->getType(),
905 I.getOperand(1)->getType(),
906 I.getOperand(2)->getType()};
907 IRBuilder<> B(I.getParent());
908 B.SetInsertPoint(&I);
909 SmallVector<Value *> Args(I.op_begin(), I.op_end());
910 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_insertelt, {Types}, {Args});
911 std::string InstName = I.hasName() ? I.getName().str() : "";
912 I.replaceAllUsesWith(NewI);
913 I.eraseFromParent();
914 NewI->setName(InstName);
915 return NewI;
916}
917
919SPIRVEmitIntrinsics::visitExtractElementInst(ExtractElementInst &I) {
920 IRBuilder<> B(I.getParent());
921 B.SetInsertPoint(&I);
922 SmallVector<Type *, 3> Types = {I.getType(), I.getVectorOperandType(),
923 I.getIndexOperand()->getType()};
924 SmallVector<Value *, 2> Args = {I.getVectorOperand(), I.getIndexOperand()};
925 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_extractelt, {Types}, {Args});
926 std::string InstName = I.hasName() ? I.getName().str() : "";
927 I.replaceAllUsesWith(NewI);
928 I.eraseFromParent();
929 NewI->setName(InstName);
930 return NewI;
931}
932
933Instruction *SPIRVEmitIntrinsics::visitInsertValueInst(InsertValueInst &I) {
934 IRBuilder<> B(I.getParent());
935 B.SetInsertPoint(&I);
936 SmallVector<Type *, 1> Types = {I.getInsertedValueOperand()->getType()};
938 for (auto &Op : I.operands())
939 if (isa<UndefValue>(Op))
940 Args.push_back(UndefValue::get(B.getInt32Ty()));
941 else
942 Args.push_back(Op);
943 for (auto &Op : I.indices())
944 Args.push_back(B.getInt32(Op));
945 Instruction *NewI =
946 B.CreateIntrinsic(Intrinsic::spv_insertv, {Types}, {Args});
947 replaceMemInstrUses(&I, NewI, B);
948 return NewI;
949}
950
951Instruction *SPIRVEmitIntrinsics::visitExtractValueInst(ExtractValueInst &I) {
952 IRBuilder<> B(I.getParent());
953 B.SetInsertPoint(&I);
955 for (auto &Op : I.operands())
956 Args.push_back(Op);
957 for (auto &Op : I.indices())
958 Args.push_back(B.getInt32(Op));
959 auto *NewI =
960 B.CreateIntrinsic(Intrinsic::spv_extractv, {I.getType()}, {Args});
961 I.replaceAllUsesWith(NewI);
962 I.eraseFromParent();
963 return NewI;
964}
965
966Instruction *SPIRVEmitIntrinsics::visitLoadInst(LoadInst &I) {
967 if (!I.getType()->isAggregateType())
968 return &I;
969 IRBuilder<> B(I.getParent());
970 B.SetInsertPoint(&I);
971 TrackConstants = false;
972 const auto *TLI = TM->getSubtargetImpl()->getTargetLowering();
974 TLI->getLoadMemOperandFlags(I, F->getParent()->getDataLayout());
975 auto *NewI =
976 B.CreateIntrinsic(Intrinsic::spv_load, {I.getOperand(0)->getType()},
977 {I.getPointerOperand(), B.getInt16(Flags),
978 B.getInt8(I.getAlign().value())});
979 replaceMemInstrUses(&I, NewI, B);
980 return NewI;
981}
982
983Instruction *SPIRVEmitIntrinsics::visitStoreInst(StoreInst &I) {
984 if (!AggrStores.contains(&I))
985 return &I;
986 IRBuilder<> B(I.getParent());
987 B.SetInsertPoint(&I);
988 TrackConstants = false;
989 const auto *TLI = TM->getSubtargetImpl()->getTargetLowering();
991 TLI->getStoreMemOperandFlags(I, F->getParent()->getDataLayout());
992 auto *PtrOp = I.getPointerOperand();
993 auto *NewI = B.CreateIntrinsic(
994 Intrinsic::spv_store, {I.getValueOperand()->getType(), PtrOp->getType()},
995 {I.getValueOperand(), PtrOp, B.getInt16(Flags),
996 B.getInt8(I.getAlign().value())});
997 I.eraseFromParent();
998 return NewI;
999}
1000
1001Instruction *SPIRVEmitIntrinsics::visitAllocaInst(AllocaInst &I) {
1002 Value *ArraySize = nullptr;
1003 if (I.isArrayAllocation()) {
1004 const SPIRVSubtarget *STI = TM->getSubtargetImpl(*I.getFunction());
1005 if (!STI->canUseExtension(
1006 SPIRV::Extension::SPV_INTEL_variable_length_array))
1008 "array allocation: this instruction requires the following "
1009 "SPIR-V extension: SPV_INTEL_variable_length_array",
1010 false);
1011 ArraySize = I.getArraySize();
1012 }
1013 IRBuilder<> B(I.getParent());
1014 B.SetInsertPoint(&I);
1015 TrackConstants = false;
1016 Type *PtrTy = I.getType();
1017 auto *NewI =
1018 ArraySize ? B.CreateIntrinsic(Intrinsic::spv_alloca_array,
1019 {PtrTy, ArraySize->getType()}, {ArraySize})
1020 : B.CreateIntrinsic(Intrinsic::spv_alloca, {PtrTy}, {});
1021 std::string InstName = I.hasName() ? I.getName().str() : "";
1022 I.replaceAllUsesWith(NewI);
1023 I.eraseFromParent();
1024 NewI->setName(InstName);
1025 return NewI;
1026}
1027
1028Instruction *SPIRVEmitIntrinsics::visitAtomicCmpXchgInst(AtomicCmpXchgInst &I) {
1029 assert(I.getType()->isAggregateType() && "Aggregate result is expected");
1030 IRBuilder<> B(I.getParent());
1031 B.SetInsertPoint(&I);
1033 for (auto &Op : I.operands())
1034 Args.push_back(Op);
1035 Args.push_back(B.getInt32(I.getSyncScopeID()));
1036 Args.push_back(B.getInt32(
1037 static_cast<uint32_t>(getMemSemantics(I.getSuccessOrdering()))));
1038 Args.push_back(B.getInt32(
1039 static_cast<uint32_t>(getMemSemantics(I.getFailureOrdering()))));
1040 auto *NewI = B.CreateIntrinsic(Intrinsic::spv_cmpxchg,
1041 {I.getPointerOperand()->getType()}, {Args});
1042 replaceMemInstrUses(&I, NewI, B);
1043 return NewI;
1044}
1045
1046Instruction *SPIRVEmitIntrinsics::visitUnreachableInst(UnreachableInst &I) {
1047 IRBuilder<> B(I.getParent());
1048 B.SetInsertPoint(&I);
1049 B.CreateIntrinsic(Intrinsic::spv_unreachable, {}, {});
1050 return &I;
1051}
1052
1053void SPIRVEmitIntrinsics::processGlobalValue(GlobalVariable &GV,
1054 IRBuilder<> &B) {
1055 // Skip special artifical variable llvm.global.annotations.
1056 if (GV.getName() == "llvm.global.annotations")
1057 return;
1058 if (GV.hasInitializer() && !isa<UndefValue>(GV.getInitializer())) {
1059 // Deduce element type and store results in Global Registry.
1060 // Result is ignored, because TypedPointerType is not supported
1061 // by llvm IR general logic.
1062 deduceElementTypeHelper(&GV);
1064 Type *Ty = isAggrConstForceInt32(Init) ? B.getInt32Ty() : Init->getType();
1065 Constant *Const = isAggrConstForceInt32(Init) ? B.getInt32(1) : Init;
1066 auto *InitInst = B.CreateIntrinsic(Intrinsic::spv_init_global,
1067 {GV.getType(), Ty}, {&GV, Const});
1068 InitInst->setArgOperand(1, Init);
1069 }
1070 if ((!GV.hasInitializer() || isa<UndefValue>(GV.getInitializer())) &&
1071 GV.getNumUses() == 0)
1072 B.CreateIntrinsic(Intrinsic::spv_unref_global, GV.getType(), &GV);
1073}
1074
1075void SPIRVEmitIntrinsics::insertAssignPtrTypeIntrs(Instruction *I,
1076 IRBuilder<> &B) {
1078 if (!isPointerTy(I->getType()) || !requireAssignType(I) ||
1079 isa<BitCastInst>(I))
1080 return;
1081
1082 setInsertPointSkippingPhis(B, I->getNextNode());
1083
1084 Type *ElemTy = deduceElementType(I);
1085 Constant *EltTyConst = UndefValue::get(ElemTy);
1086 unsigned AddressSpace = getPointerAddressSpace(I->getType());
1087 CallInst *CI = buildIntrWithMD(Intrinsic::spv_assign_ptr_type, {I->getType()},
1088 EltTyConst, I, {B.getInt32(AddressSpace)}, B);
1089 GR->addDeducedElementType(CI, ElemTy);
1090 AssignPtrTypeInstr[I] = CI;
1091}
1092
1093void SPIRVEmitIntrinsics::insertAssignTypeIntrs(Instruction *I,
1094 IRBuilder<> &B) {
1096 Type *Ty = I->getType();
1097 if (!Ty->isVoidTy() && !isPointerTy(Ty) && requireAssignType(I)) {
1098 setInsertPointSkippingPhis(B, I->getNextNode());
1099 Type *TypeToAssign = Ty;
1100 if (auto *II = dyn_cast<IntrinsicInst>(I)) {
1101 if (II->getIntrinsicID() == Intrinsic::spv_const_composite ||
1102 II->getIntrinsicID() == Intrinsic::spv_undef) {
1103 auto It = AggrConstTypes.find(II);
1104 if (It == AggrConstTypes.end())
1105 report_fatal_error("Unknown composite intrinsic type");
1106 TypeToAssign = It->second;
1107 }
1108 }
1109 Constant *Const = UndefValue::get(TypeToAssign);
1110 buildIntrWithMD(Intrinsic::spv_assign_type, {Ty}, Const, I, {}, B);
1111 }
1112 for (const auto &Op : I->operands()) {
1113 if (isa<ConstantPointerNull>(Op) || isa<UndefValue>(Op) ||
1114 // Check GetElementPtrConstantExpr case.
1115 (isa<ConstantExpr>(Op) && isa<GEPOperator>(Op))) {
1117 if (isa<UndefValue>(Op) && Op->getType()->isAggregateType())
1118 buildIntrWithMD(Intrinsic::spv_assign_type, {B.getInt32Ty()}, Op,
1119 UndefValue::get(B.getInt32Ty()), {}, B);
1120 else if (!isa<Instruction>(Op)) // TODO: This case could be removed
1121 buildIntrWithMD(Intrinsic::spv_assign_type, {Op->getType()}, Op, Op, {},
1122 B);
1123 }
1124 }
1125}
1126
1127void SPIRVEmitIntrinsics::insertSpirvDecorations(Instruction *I,
1128 IRBuilder<> &B) {
1129 if (MDNode *MD = I->getMetadata("spirv.Decorations")) {
1130 B.SetInsertPoint(I->getNextNode());
1131 B.CreateIntrinsic(Intrinsic::spv_assign_decoration, {I->getType()},
1132 {I, MetadataAsValue::get(I->getContext(), MD)});
1133 }
1134}
1135
1136void SPIRVEmitIntrinsics::processInstrAfterVisit(Instruction *I,
1137 IRBuilder<> &B) {
1138 auto *II = dyn_cast<IntrinsicInst>(I);
1139 if (II && II->getIntrinsicID() == Intrinsic::spv_const_composite &&
1140 TrackConstants) {
1141 B.SetInsertPoint(I->getNextNode());
1142 auto t = AggrConsts.find(I);
1143 assert(t != AggrConsts.end());
1144 auto *NewOp =
1145 buildIntrWithMD(Intrinsic::spv_track_constant,
1146 {II->getType(), II->getType()}, t->second, I, {}, B);
1147 I->replaceAllUsesWith(NewOp);
1148 NewOp->setArgOperand(0, I);
1149 }
1150 for (const auto &Op : I->operands()) {
1151 if ((isa<ConstantAggregateZero>(Op) && Op->getType()->isVectorTy()) ||
1152 isa<PHINode>(I) || isa<SwitchInst>(I))
1153 TrackConstants = false;
1154 if ((isa<ConstantData>(Op) || isa<ConstantExpr>(Op)) && TrackConstants) {
1155 unsigned OpNo = Op.getOperandNo();
1156 if (II && ((II->getIntrinsicID() == Intrinsic::spv_gep && OpNo == 0) ||
1157 (II->paramHasAttr(OpNo, Attribute::ImmArg))))
1158 continue;
1159 B.SetInsertPoint(I);
1160 Value *OpTyVal = Op;
1161 if (Op->getType()->isTargetExtTy())
1162 OpTyVal = Constant::getNullValue(
1163 IntegerType::get(I->getContext(), GR->getPointerSize()));
1164 auto *NewOp = buildIntrWithMD(Intrinsic::spv_track_constant,
1165 {Op->getType(), OpTyVal->getType()}, Op,
1166 OpTyVal, {}, B);
1167 I->setOperand(OpNo, NewOp);
1168 }
1169 }
1170 if (I->hasName()) {
1172 setInsertPointSkippingPhis(B, I->getNextNode());
1173 std::vector<Value *> Args = {I};
1174 addStringImm(I->getName(), B, Args);
1175 B.CreateIntrinsic(Intrinsic::spv_assign_name, {I->getType()}, Args);
1176 }
1177}
1178
1179Type *SPIRVEmitIntrinsics::deduceFunParamElementType(Function *F,
1180 unsigned OpIdx) {
1181 std::unordered_set<Function *> FVisited;
1182 return deduceFunParamElementType(F, OpIdx, FVisited);
1183}
1184
1185Type *SPIRVEmitIntrinsics::deduceFunParamElementType(
1186 Function *F, unsigned OpIdx, std::unordered_set<Function *> &FVisited) {
1187 // maybe a cycle
1188 if (FVisited.find(F) != FVisited.end())
1189 return nullptr;
1190 FVisited.insert(F);
1191
1192 std::unordered_set<Value *> Visited;
1194 // search in function's call sites
1195 for (User *U : F->users()) {
1196 CallInst *CI = dyn_cast<CallInst>(U);
1197 if (!CI || OpIdx >= CI->arg_size())
1198 continue;
1199 Value *OpArg = CI->getArgOperand(OpIdx);
1200 if (!isPointerTy(OpArg->getType()))
1201 continue;
1202 // maybe we already know operand's element type
1203 if (Type *KnownTy = GR->findDeducedElementType(OpArg))
1204 return KnownTy;
1205 // try to deduce from the operand itself
1206 Visited.clear();
1207 if (Type *Ty = deduceElementTypeHelper(OpArg, Visited))
1208 return Ty;
1209 // search in actual parameter's users
1210 for (User *OpU : OpArg->users()) {
1211 Instruction *Inst = dyn_cast<Instruction>(OpU);
1212 if (!Inst || Inst == CI)
1213 continue;
1214 Visited.clear();
1215 if (Type *Ty = deduceElementTypeHelper(Inst, Visited))
1216 return Ty;
1217 }
1218 // check if it's a formal parameter of the outer function
1219 if (!CI->getParent() || !CI->getParent()->getParent())
1220 continue;
1221 Function *OuterF = CI->getParent()->getParent();
1222 if (FVisited.find(OuterF) != FVisited.end())
1223 continue;
1224 for (unsigned i = 0; i < OuterF->arg_size(); ++i) {
1225 if (OuterF->getArg(i) == OpArg) {
1226 Lookup.push_back(std::make_pair(OuterF, i));
1227 break;
1228 }
1229 }
1230 }
1231
1232 // search in function parameters
1233 for (auto &Pair : Lookup) {
1234 if (Type *Ty = deduceFunParamElementType(Pair.first, Pair.second, FVisited))
1235 return Ty;
1236 }
1237
1238 return nullptr;
1239}
1240
1241void SPIRVEmitIntrinsics::processParamTypesByFunHeader(Function *F,
1242 IRBuilder<> &B) {
1243 B.SetInsertPointPastAllocas(F);
1244 for (unsigned OpIdx = 0; OpIdx < F->arg_size(); ++OpIdx) {
1245 Argument *Arg = F->getArg(OpIdx);
1246 if (!isUntypedPointerTy(Arg->getType()))
1247 continue;
1248 Type *ElemTy = GR->findDeducedElementType(Arg);
1249 if (!ElemTy && hasPointeeTypeAttr(Arg) &&
1250 (ElemTy = getPointeeTypeByAttr(Arg)) != nullptr)
1251 buildAssignPtr(B, ElemTy, Arg);
1252 }
1253}
1254
1255void SPIRVEmitIntrinsics::processParamTypes(Function *F, IRBuilder<> &B) {
1256 B.SetInsertPointPastAllocas(F);
1257 for (unsigned OpIdx = 0; OpIdx < F->arg_size(); ++OpIdx) {
1258 Argument *Arg = F->getArg(OpIdx);
1259 if (!isUntypedPointerTy(Arg->getType()))
1260 continue;
1261 Type *ElemTy = GR->findDeducedElementType(Arg);
1262 if (!ElemTy && (ElemTy = deduceFunParamElementType(F, OpIdx)) != nullptr)
1263 buildAssignPtr(B, ElemTy, Arg);
1264 }
1265}
1266
1267bool SPIRVEmitIntrinsics::runOnFunction(Function &Func) {
1268 if (Func.isDeclaration())
1269 return false;
1270
1271 const SPIRVSubtarget &ST = TM->getSubtarget<SPIRVSubtarget>(Func);
1272 GR = ST.getSPIRVGlobalRegistry();
1273
1274 F = &Func;
1275 IRBuilder<> B(Func.getContext());
1276 AggrConsts.clear();
1277 AggrConstTypes.clear();
1278 AggrStores.clear();
1279
1280 processParamTypesByFunHeader(F, B);
1281
1282 // StoreInst's operand type can be changed during the next transformations,
1283 // so we need to store it in the set. Also store already transformed types.
1284 for (auto &I : instructions(Func)) {
1285 StoreInst *SI = dyn_cast<StoreInst>(&I);
1286 if (!SI)
1287 continue;
1288 Type *ElTy = SI->getValueOperand()->getType();
1289 if (ElTy->isAggregateType() || ElTy->isVectorTy())
1290 AggrStores.insert(&I);
1291 }
1292
1293 B.SetInsertPoint(&Func.getEntryBlock(), Func.getEntryBlock().begin());
1294 for (auto &GV : Func.getParent()->globals())
1295 processGlobalValue(GV, B);
1296
1297 preprocessUndefs(B);
1298 preprocessCompositeConstants(B);
1300 for (auto &I : instructions(Func))
1301 Worklist.push_back(&I);
1302
1303 for (auto &I : Worklist) {
1304 insertAssignPtrTypeIntrs(I, B);
1305 insertAssignTypeIntrs(I, B);
1306 insertPtrCastOrAssignTypeInstr(I, B);
1308 }
1309
1310 for (auto &I : instructions(Func))
1311 deduceOperandElementType(&I);
1312
1313 for (auto *I : Worklist) {
1314 TrackConstants = true;
1315 if (!I->getType()->isVoidTy() || isa<StoreInst>(I))
1316 B.SetInsertPoint(I->getNextNode());
1317 // Visitors return either the original/newly created instruction for further
1318 // processing, nullptr otherwise.
1319 I = visit(*I);
1320 if (!I)
1321 continue;
1322 processInstrAfterVisit(I, B);
1323 }
1324
1325 return true;
1326}
1327
1328bool SPIRVEmitIntrinsics::runOnModule(Module &M) {
1329 bool Changed = false;
1330
1331 for (auto &F : M) {
1332 Changed |= runOnFunction(F);
1333 }
1334
1335 for (auto &F : M) {
1336 // check if function parameter types are set
1337 if (!F.isDeclaration() && !F.isIntrinsic()) {
1338 const SPIRVSubtarget &ST = TM->getSubtarget<SPIRVSubtarget>(F);
1339 GR = ST.getSPIRVGlobalRegistry();
1340 IRBuilder<> B(F.getContext());
1341 processParamTypes(&F, B);
1342 }
1343 }
1344
1345 return Changed;
1346}
1347
1349 return new SPIRVEmitIntrinsics(TM);
1350}
aarch64 promote const
unsigned Intr
always inline
Expand Atomic instructions
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
return RetTy
static bool runOnFunction(Function &F, bool PostInlining)
#define F(x, y, z)
Definition: MD5.cpp:55
#define I(x, y, z)
Definition: MD5.cpp:58
const char LLVMTargetMachineRef TM
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
Definition: PassSupport.h:38
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
static bool isMemInstrToReplace(Instruction *I)
static bool isAggrConstForceInt32(const Value *V)
static void reportFatalOnTokenType(const Instruction *I)
static Type * getPointeeTypeByCallInst(StringRef DemangledName, Function *CalledF, unsigned OpIdx)
static void setInsertPointSkippingPhis(IRBuilder<> &B, Instruction *I)
static bool requireAssignType(Instruction *I)
static void insertSpirvDecorations(MachineFunction &MF, MachineIRBuilder MIB)
static SymbolRef::Type getType(const Symbol *Sym)
Definition: TapiFile.cpp:40
static int Lookup(ArrayRef< TableEntry > Table, unsigned Opcode)
an instruction to allocate memory on the stack
Definition: Instructions.h:59
Represent the analysis usage information of a pass.
This class represents an incoming formal argument to a Function.
Definition: Argument.h:31
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
Definition: ArrayRef.h:41
An instruction that atomically checks whether a specified value is in a memory location,...
Definition: Instructions.h:539
LLVM Basic Block Representation.
Definition: BasicBlock.h:60
const Function * getParent() const
Return the enclosing method, or null if none.
Definition: BasicBlock.h:206
LLVMContext & getContext() const
Get the context in which this basic block lives.
Definition: BasicBlock.cpp:168
This class represents a no-op cast from one type to another.
static BlockAddress * get(Function *F, BasicBlock *BB)
Return a BlockAddress for the specified function and basic block.
Definition: Constants.cpp:1846
bool isInlineAsm() const
Check if this call is an inline asm statement.
Definition: InstrTypes.h:1809
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
Definition: InstrTypes.h:1742
bool isIndirectCall() const
Return true if the callsite is an indirect call.
Value * getArgOperand(unsigned i) const
Definition: InstrTypes.h:1687
void setArgOperand(unsigned i, Value *v)
Definition: InstrTypes.h:1692
unsigned arg_size() const
Definition: InstrTypes.h:1685
This class represents a function call, abstracting a target machine's calling convention.
This is an important base class in LLVM.
Definition: Constant.h:41
static Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
Definition: Constants.cpp:370
This class represents an Operation in the Expression.
iterator find(const_arg_type_t< KeyT > Val)
Definition: DenseMap.h:155
iterator end()
Definition: DenseMap.h:84
Implements a dense probed hash-table based set.
Definition: DenseSet.h:271
This instruction extracts a single (scalar) element from a VectorType value.
This instruction extracts a struct member or array element value from an aggregate value.
bool isIntrinsic() const
isIntrinsic - Returns true if the function's name starts with "llvm.".
Definition: Function.h:237
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
Definition: Function.cpp:356
size_t arg_size() const
Definition: Function.h:851
Argument * getArg(unsigned i) const
Definition: Function.h:836
an instruction for type-safe pointer arithmetic to access elements of arrays and structs
Definition: Instructions.h:973
PointerType * getType() const
Global values are always pointers.
Definition: GlobalValue.h:293
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
bool hasInitializer() const
Definitions have initializers, declarations don't.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
Definition: IRBuilder.h:2666
Indirect Branch Instruction.
void addDestination(BasicBlock *Dest)
Add a destination.
This instruction inserts a single (scalar) element into a VectorType value.
This instruction inserts a struct field of array element value into an aggregate value.
Base class for instruction visitors.
Definition: InstVisitor.h:78
RetTy visitExtractElementInst(ExtractElementInst &I)
Definition: InstVisitor.h:191
RetTy visitInsertValueInst(InsertValueInst &I)
Definition: InstVisitor.h:195
RetTy visitUnreachableInst(UnreachableInst &I)
Definition: InstVisitor.h:241
RetTy visitAtomicCmpXchgInst(AtomicCmpXchgInst &I)
Definition: InstVisitor.h:171
RetTy visitBitCastInst(BitCastInst &I)
Definition: InstVisitor.h:187
RetTy visitSwitchInst(SwitchInst &I)
Definition: InstVisitor.h:232
RetTy visitExtractValueInst(ExtractValueInst &I)
Definition: InstVisitor.h:194
RetTy visitStoreInst(StoreInst &I)
Definition: InstVisitor.h:170
RetTy visitInsertElementInst(InsertElementInst &I)
Definition: InstVisitor.h:192
RetTy visitAllocaInst(AllocaInst &I)
Definition: InstVisitor.h:168
RetTy visitGetElementPtrInst(GetElementPtrInst &I)
Definition: InstVisitor.h:174
void visitInstruction(Instruction &I)
Definition: InstVisitor.h:280
RetTy visitLoadInst(LoadInst &I)
Definition: InstVisitor.h:169
const BasicBlock * getParent() const
Definition: Instruction.h:152
InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
Instruction * user_back()
Specialize the methods defined in Value, as we know that an instruction can only be used by other ins...
Definition: Instruction.h:149
static IntegerType * get(LLVMContext &C, unsigned NumBits)
This static method is the primary way of constructing an IntegerType.
Definition: Type.cpp:278
A wrapper class for inspecting calls to intrinsic functions.
Definition: IntrinsicInst.h:47
This is an important class for using LLVM in a threaded context.
Definition: LLVMContext.h:67
An instruction for reading from memory.
Definition: Instructions.h:184
Metadata node.
Definition: Metadata.h:1067
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
Definition: Metadata.h:1541
Tuple of metadata.
Definition: Metadata.h:1470
Flags
Flags values. These may be or'd together.
Metadata wrapper in the Value hierarchy.
Definition: Metadata.h:176
static MetadataAsValue * get(LLVMContext &Context, Metadata *MD)
Definition: Metadata.cpp:103
Metadata * getMetadata() const
Definition: Metadata.h:193
ModulePass class - This class is used to implement unstructured interprocedural optimizations and ana...
Definition: Pass.h:251
virtual bool runOnModule(Module &M)=0
runOnModule - Virtual method overriden by subclasses to process the module being operated on.
A Module instance is used to store all the information related to an LLVM module.
Definition: Module.h:65
PassRegistry - This class manages the registration and intitialization of the pass subsystem as appli...
Definition: PassRegistry.h:37
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
virtual void getAnalysisUsage(AnalysisUsage &) const
getAnalysisUsage - This function should be overriden by passes that need analysis information to do t...
Definition: Pass.cpp:98
virtual StringRef getPassName() const
getPassName - Return a nice clean name for a pass.
Definition: Pass.cpp:81
Type * findDeducedCompositeType(const Value *Val)
void addDeducedElementType(Value *Val, Type *Ty)
void addReturnType(const Function *ArgF, TypedPointerType *DerivedTy)
void addDeducedCompositeType(Value *Val, Type *Ty)
Type * findDeducedElementType(const Value *Val)
bool canUseExtension(SPIRV::Extension::Extension E) const
size_t size() const
Definition: SmallVector.h:91
void push_back(const T &Elt)
Definition: SmallVector.h:426
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
Definition: SmallVector.h:1209
An instruction for storing to memory.
Definition: Instructions.h:317
StringRef - Represent a constant reference to a string, i.e.
Definition: StringRef.h:50
bool starts_with(StringRef Prefix) const
Check if this string starts with the given Prefix.
Definition: StringRef.h:258
static StructType * create(LLVMContext &Context, StringRef Name)
This creates an identified struct.
Definition: Type.cpp:513
Multiway switch.
Class to represent target extensions types, which are generally unintrospectable from target-independ...
Definition: DerivedTypes.h:720
The instances of the Type class are immutable: once they are created, they are never changed.
Definition: Type.h:45
bool isVectorTy() const
True if this is an instance of VectorType.
Definition: Type.h:265
StringRef getTargetExtName() const
bool isTargetExtTy() const
Return true if this is a target extension type.
Definition: Type.h:207
bool isAggregateType() const
Return true if the type is an aggregate type.
Definition: Type.h:295
bool isVoidTy() const
Return true if this is 'void'.
Definition: Type.h:140
A few GPU targets, such as DXIL and SPIR-V, have typed pointers.
static TypedPointerType * get(Type *ElementType, unsigned AddressSpace)
This constructs a pointer to an object of the specified type in a numbered address space.
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
Definition: Constants.cpp:1808
This function has undefined behavior.
op_range operands()
Definition: User.h:242
static ConstantAsMetadata * getConstant(Value *C)
Definition: Metadata.h:472
LLVM Value Representation.
Definition: Value.h:74
Type * getType() const
All values are typed, get the type of this value.
Definition: Value.h:255
void setName(const Twine &Name)
Change the name of the value.
Definition: Value.cpp:377
iterator_range< user_iterator > users()
Definition: Value.h:421
unsigned getNumUses() const
This method computes the number of uses of this Value.
Definition: Value.cpp:255
StringRef getName() const
Return a constant reference to the value's name.
Definition: Value.cpp:309
bool user_empty() const
Definition: Value.h:385
std::pair< iterator, bool > insert(const ValueT &V)
Definition: DenseSet.h:206
bool contains(const_arg_type_t< ValueT > V) const
Check if the set contains the given element.
Definition: DenseSet.h:185
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
Definition: CallingConv.h:24
@ SPIR_KERNEL
Used for SPIR kernel functions.
Definition: CallingConv.h:144
Type * parseBuiltinCallArgumentBaseType(const StringRef DemangledCall, unsigned ArgIdx, LLVMContext &Ctx)
Parses the provided ArgIdx argument base type in the DemangledCall skeleton.
NodeAddr< FuncNode * > Func
Definition: RDFGraph.h:393
This is an optimization pass for GlobalISel generic memory operations.
Definition: AddressRanges.h:18
void initializeSPIRVEmitIntrinsicsPass(PassRegistry &)
ModulePass * createSPIRVEmitIntrinsicsPass(SPIRVTargetMachine *TM)
unsigned getPointerAddressSpace(const Type *T)
Definition: SPIRVUtils.h:126
AddressSpace
Definition: NVPTXBaseInfo.h:21
std::string getOclOrSpirvBuiltinDemangledName(StringRef Name)
Definition: SPIRVUtils.cpp:335
bool isTypedPointerTy(const Type *T)
Definition: SPIRVUtils.h:110
bool isPointerTy(const Type *T)
Definition: SPIRVUtils.h:120
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
Definition: Error.cpp:156
@ Ref
The access may reference the value stored in memory.
DWARFExpression::Operation Op
Type * getPointeeTypeByAttr(Argument *Arg)
Definition: SPIRVUtils.h:139
bool hasPointeeTypeAttr(Argument *Arg)
Definition: SPIRVUtils.h:134
void addStringImm(const StringRef &Str, MCInst &Inst)
Definition: SPIRVUtils.cpp:51
bool isUntypedPointerTy(const Type *T)
Definition: SPIRVUtils.h:115
SPIRV::MemorySemantics::MemorySemantics getMemSemantics(AtomicOrdering Ord)
Definition: SPIRVUtils.cpp:236