24#include "llvm/IR/IntrinsicsSPIRV.h"
28#include <unordered_set>
52#define GET_BuiltinGroup_DECL
53#include "SPIRVGenTables.inc"
66class SPIRVEmitIntrinsics
68 public InstVisitor<SPIRVEmitIntrinsics, Instruction *> {
72 bool TrackConstants =
true;
73 bool HaveFunPtrs =
false;
82 bool CanTodoType =
true;
83 unsigned TodoTypeSz = 0;
85 void insertTodoType(
Value *
Op) {
87 if (CanTodoType && !isa<GetElementPtrInst>(
Op)) {
94 auto It = TodoType.
find(
Op);
95 if (It != TodoType.
end() && It->second) {
101 if (isa<GetElementPtrInst>(
Op))
103 auto It = TodoType.
find(
Op);
104 return It != TodoType.
end() && It->second;
108 std::unordered_set<Instruction *> TypeValidated;
111 enum WellKnownTypes { Event };
114 Type *deduceElementType(
Value *
I,
bool UnknownElemTypeI8);
115 Type *deduceElementTypeHelper(
Value *
I,
bool UnknownElemTypeI8);
116 Type *deduceElementTypeHelper(
Value *
I, std::unordered_set<Value *> &Visited,
117 bool UnknownElemTypeI8,
118 bool IgnoreKnownType =
false);
119 Type *deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
120 bool UnknownElemTypeI8);
121 Type *deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
122 std::unordered_set<Value *> &Visited,
123 bool UnknownElemTypeI8);
125 std::unordered_set<Value *> &Visited,
126 bool UnknownElemTypeI8);
128 bool UnknownElemTypeI8);
131 Type *deduceNestedTypeHelper(
User *U,
bool UnknownElemTypeI8);
133 std::unordered_set<Value *> &Visited,
134 bool UnknownElemTypeI8);
140 bool IsPostprocessing =
false);
149 Args.push_back(Arg2);
150 Args.push_back(buildMD(Arg));
151 for (
auto *Imm : Imms)
153 return B.CreateIntrinsic(IntrID, {
Types},
Args);
156 Type *reconstructType(
Value *
Op,
bool UnknownElemTypeI8,
157 bool IsPostprocessing);
166 bool UnknownElemTypeI8);
171 Type *ExpectedElementType,
172 unsigned OperandToReplace,
179 Type *deduceFunParamElementType(
Function *
F,
unsigned OpIdx);
181 std::unordered_set<Function *> &FVisited);
183 bool deduceOperandElementTypeCalledFunction(
186 void deduceOperandElementTypeFunctionPointer(
188 Type *&KnownElemTy,
bool IsPostprocessing);
189 bool deduceOperandElementTypeFunctionRet(
198 DenseSet<std::pair<Value *, Value *>> &VisitedSubst);
201 DenseSet<std::pair<Value *, Value *>> &VisitedSubst);
202 void propagateElemTypeRec(
Value *
Op,
Type *PtrElemTy,
Type *CastElemTy,
203 DenseSet<std::pair<Value *, Value *>> &VisitedSubst,
204 std::unordered_set<Value *> &Visited,
214 bool postprocessTypes(
Module &M);
215 bool processFunctionPointers(
Module &M);
216 void parseFunDeclarations(
Module &M);
253 const auto *
II = dyn_cast<IntrinsicInst>(
I);
257 return II->getIntrinsicID() == Intrinsic::experimental_convergence_entry ||
258 II->getIntrinsicID() == Intrinsic::experimental_convergence_loop ||
259 II->getIntrinsicID() == Intrinsic::experimental_convergence_anchor;
262bool expectIgnoredInIRTranslation(
const Instruction *
I) {
263 const auto *
II = dyn_cast<IntrinsicInst>(
I);
266 switch (
II->getIntrinsicID()) {
267 case Intrinsic::invariant_start:
268 case Intrinsic::spv_resource_handlefrombinding:
269 case Intrinsic::spv_resource_getpointer:
276bool allowEmitFakeUse(
const Value *Arg) {
279 if (dyn_cast<AtomicCmpXchgInst>(Arg) || dyn_cast<InsertValueInst>(Arg) ||
280 dyn_cast<UndefValue>(Arg))
282 if (
const auto *LI = dyn_cast<LoadInst>(Arg))
283 if (LI->getType()->isAggregateType())
290char SPIRVEmitIntrinsics::ID = 0;
296 return isa<IntrinsicInst>(
I) &&
297 cast<IntrinsicInst>(
I)->getIntrinsicID() == Intrinsic::spv_assign_type;
301 return isa<StoreInst>(
I) || isa<LoadInst>(
I) || isa<InsertValueInst>(
I) ||
302 isa<ExtractValueInst>(
I) || isa<AtomicCmpXchgInst>(
I);
306 return isa<ConstantArray>(V) || isa<ConstantStruct>(V) ||
307 isa<ConstantDataArray>(V) ||
308 (isa<ConstantAggregateZero>(V) && !V->getType()->isVectorTy());
313 B.SetInsertPoint(
I->getParent()->getFirstNonPHIOrDbgOrAlloca());
319 B.SetCurrentDebugLocation(
I->getDebugLoc());
320 if (
I->getType()->isVoidTy())
321 B.SetInsertPoint(
I->getNextNode());
323 B.SetInsertPoint(*
I->getInsertionPointAfterDef());
329 switch (
Intr->getIntrinsicID()) {
330 case Intrinsic::invariant_start:
331 case Intrinsic::invariant_end:
339 if (
I->getType()->isTokenTy())
341 "does not support token type",
346 if (!
I->hasName() ||
I->getType()->isAggregateType() ||
347 expectIgnoredInIRTranslation(
I))
351 std::vector<Value *> Args = {
I};
353 B.CreateIntrinsic(Intrinsic::spv_assign_name, {
I->getType()}, Args);
356void SPIRVEmitIntrinsics::replaceAllUsesWith(
Value *Src,
Value *Dest,
358 Src->replaceAllUsesWith(Dest);
363 if (isTodoType(Src)) {
366 insertTodoType(Dest);
370void SPIRVEmitIntrinsics::replaceAllUsesWithAndErase(
IRBuilder<> &
B,
375 std::string
Name = Src->hasName() ? Src->getName().str() :
"";
376 Src->eraseFromParent();
386 isa<Argument>(SI->getValueOperand());
403Type *SPIRVEmitIntrinsics::reconstructType(
Value *
Op,
bool UnknownElemTypeI8,
404 bool IsPostprocessing) {
406 if (
auto *OpI = dyn_cast<Instruction>(
Op))
417 return cast<ConstantAsMetadata>(MD->
getMetadata())->getType();
419 if (UnknownElemTypeI8) {
420 if (!IsPostprocessing)
433 allowEmitFakeUse(Arg)) {
438 B.CreateIntrinsic(Intrinsic::spv_value_md, {},
440 AssignCI =
B.CreateIntrinsic(Intrinsic::fake_use, {}, {Arg});
442 AssignCI = buildIntrWithMD(Intrinsic::spv_assign_type, {Arg->
getType()},
452 if (AssignPtrTyCI ==
nullptr ||
453 AssignPtrTyCI->
getParent()->getParent() != CurrF) {
454 AssignPtrTyCI = buildIntrWithMD(
455 Intrinsic::spv_assign_ptr_type, {Arg->
getType()}, OfType, Arg,
461 updateAssignType(AssignPtrTyCI, Arg, OfType);
465void SPIRVEmitIntrinsics::updateAssignType(
CallInst *AssignCI,
Value *Arg,
469 Intrinsic::spv_assign_ptr_type)
481 if (
auto *OpI = dyn_cast<Instruction>(
Op)) {
485 }
else if (
auto *OpA = dyn_cast<Argument>(
Op)) {
486 B.SetInsertPointPastAllocas(OpA->getParent());
489 B.SetInsertPoint(
F->getEntryBlock().getFirstNonPHIOrDbgOrAlloca());
491 Type *OpTy =
Op->getType();
496 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
497 buildAssignPtr(
B, ElemTy, PtrCasted);
501void SPIRVEmitIntrinsics::replaceUsesOfWithSpvPtrcast(
506 auto It = Ptrcasts.
find(
F);
507 if (It == Ptrcasts.
end()) {
508 PtrCastedI = buildSpvPtrcast(
F,
Op, ElemTy);
509 Ptrcasts[
F] = PtrCastedI;
511 PtrCastedI = It->second;
513 I->replaceUsesOfWith(
Op, PtrCastedI);
516void SPIRVEmitIntrinsics::propagateElemType(
518 DenseSet<std::pair<Value *, Value *>> &VisitedSubst) {
521 for (
auto *U :
Users) {
524 if (!VisitedSubst.insert(std::make_pair(U,
Op)).second)
529 if (isa<GetElementPtrInst>(UI) ||
530 TypeValidated.find(UI) != TypeValidated.end())
531 replaceUsesOfWithSpvPtrcast(
Op, ElemTy, UI, Ptrcasts);
535void SPIRVEmitIntrinsics::propagateElemTypeRec(
537 DenseSet<std::pair<Value *, Value *>> &VisitedSubst) {
538 std::unordered_set<Value *> Visited;
540 propagateElemTypeRec(
Op, PtrElemTy, CastElemTy, VisitedSubst, Visited,
544void SPIRVEmitIntrinsics::propagateElemTypeRec(
546 DenseSet<std::pair<Value *, Value *>> &VisitedSubst,
547 std::unordered_set<Value *> &Visited,
549 if (!Visited.insert(
Op).second)
552 for (
auto *U :
Users) {
555 if (!VisitedSubst.insert(std::make_pair(U,
Op)).second)
560 if (isa<GetElementPtrInst>(UI) ||
561 TypeValidated.find(UI) != TypeValidated.end())
562 replaceUsesOfWithSpvPtrcast(
Op, CastElemTy, UI, Ptrcasts);
570SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
Type *ValueTy,
Value *Operand,
571 bool UnknownElemTypeI8) {
572 std::unordered_set<Value *> Visited;
573 return deduceElementTypeByValueDeep(ValueTy, Operand, Visited,
577Type *SPIRVEmitIntrinsics::deduceElementTypeByValueDeep(
578 Type *ValueTy,
Value *Operand, std::unordered_set<Value *> &Visited,
579 bool UnknownElemTypeI8) {
582 if (
auto *PtrTy = dyn_cast<PointerType>(Ty)) {
584 deduceElementTypeHelper(Operand, Visited, UnknownElemTypeI8))
587 Ty = deduceNestedTypeHelper(dyn_cast<User>(Operand), Ty, Visited,
595Type *SPIRVEmitIntrinsics::deduceElementTypeByUsersDeep(
596 Value *
Op, std::unordered_set<Value *> &Visited,
bool UnknownElemTypeI8) {
608 for (
User *OpU :
Op->users()) {
609 if (
Instruction *Inst = dyn_cast<Instruction>(OpU)) {
610 if (
Type *Ty = deduceElementTypeHelper(Inst, Visited, UnknownElemTypeI8))
622 Function *CalledF,
unsigned OpIdx) {
623 if ((DemangledName.
starts_with(
"__spirv_ocl_printf(") ||
626 return IntegerType::getInt8Ty(CalledF->
getContext());
632Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
Value *
I,
633 bool UnknownElemTypeI8) {
634 std::unordered_set<Value *> Visited;
635 return deduceElementTypeHelper(
I, Visited, UnknownElemTypeI8);
638void SPIRVEmitIntrinsics::maybeAssignPtrType(
Type *&Ty,
Value *
Op,
Type *RefTy,
639 bool UnknownElemTypeI8) {
641 if (!UnknownElemTypeI8)
648Type *SPIRVEmitIntrinsics::deduceElementTypeHelper(
649 Value *
I, std::unordered_set<Value *> &Visited,
bool UnknownElemTypeI8,
650 bool IgnoreKnownType) {
656 if (!IgnoreKnownType)
661 if (!Visited.insert(
I).second)
667 if (
auto *
Ref = dyn_cast<AllocaInst>(
I)) {
668 maybeAssignPtrType(Ty,
I,
Ref->getAllocatedType(), UnknownElemTypeI8);
669 }
else if (
auto *
Ref = dyn_cast<GetElementPtrInst>(
I)) {
673 Ty =
Ref->getSourceElementType();
677 Ty =
Ref->getResultElementType();
679 }
else if (
auto *
Ref = dyn_cast<LoadInst>(
I)) {
683 KnownTy =
Op->getType();
685 maybeAssignPtrType(Ty,
I, ElemTy, UnknownElemTypeI8);
686 }
else if (
auto *
Ref = dyn_cast<GlobalValue>(
I)) {
687 Ty = deduceElementTypeByValueDeep(
689 Ref->getNumOperands() > 0 ?
Ref->getOperand(0) :
nullptr, Visited,
691 }
else if (
auto *
Ref = dyn_cast<AddrSpaceCastInst>(
I)) {
692 Type *RefTy = deduceElementTypeHelper(
Ref->getPointerOperand(), Visited,
694 maybeAssignPtrType(Ty,
I, RefTy, UnknownElemTypeI8);
695 }
else if (
auto *
Ref = dyn_cast<BitCastInst>(
I)) {
696 if (
Type *Src =
Ref->getSrcTy(), *Dest =
Ref->getDestTy();
698 Ty = deduceElementTypeHelper(
Ref->getOperand(0), Visited,
700 }
else if (
auto *
Ref = dyn_cast<AtomicCmpXchgInst>(
I)) {
703 Ty = deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8);
704 }
else if (
auto *
Ref = dyn_cast<AtomicRMWInst>(
I)) {
707 Ty = deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8);
708 }
else if (
auto *
Ref = dyn_cast<PHINode>(
I)) {
709 Type *BestTy =
nullptr;
712 for (
int i =
Ref->getNumIncomingValues() - 1; i >= 0; --i) {
713 Ty = deduceElementTypeByUsersDeep(
Ref->getIncomingValue(i), Visited,
720 if (It.first->second > MaxN) {
721 MaxN = It.first->second;
728 }
else if (
auto *
Ref = dyn_cast<SelectInst>(
I)) {
729 for (
Value *
Op : {
Ref->getTrueValue(),
Ref->getFalseValue()}) {
730 Ty = deduceElementTypeByUsersDeep(
Op, Visited, UnknownElemTypeI8);
734 }
else if (
auto *CI = dyn_cast<CallInst>(
I)) {
739 {
"__spirv_GenericCastToPtr_ToGlobal", 0},
740 {
"__spirv_GenericCastToPtr_ToLocal", 0},
741 {
"__spirv_GenericCastToPtr_ToPrivate", 0},
742 {
"__spirv_GenericCastToPtrExplicit_ToGlobal", 0},
743 {
"__spirv_GenericCastToPtrExplicit_ToLocal", 0},
744 {
"__spirv_GenericCastToPtrExplicit_ToPrivate", 0}};
747 auto *
II = dyn_cast<IntrinsicInst>(
I);
748 if (
II &&
II->getIntrinsicID() == Intrinsic::spv_resource_getpointer) {
749 auto *ImageType = cast<TargetExtType>(
II->getOperand(0)->getType());
750 assert(ImageType->getTargetExtName() ==
"spirv.Image");
751 Ty = ImageType->getTypeParameter(0);
753 std::string DemangledName =
755 if (DemangledName.length() > 0)
757 auto AsArgIt = ResTypeByArg.
find(DemangledName);
758 if (AsArgIt != ResTypeByArg.
end())
759 Ty = deduceElementTypeHelper(CI->
getArgOperand(AsArgIt->second),
760 Visited, UnknownElemTypeI8);
767 if (Ty && !IgnoreKnownType) {
778Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(
User *U,
779 bool UnknownElemTypeI8) {
780 std::unordered_set<Value *> Visited;
781 return deduceNestedTypeHelper(U,
U->getType(), Visited, UnknownElemTypeI8);
784Type *SPIRVEmitIntrinsics::deduceNestedTypeHelper(
785 User *U,
Type *OrigTy, std::unordered_set<Value *> &Visited,
786 bool UnknownElemTypeI8) {
795 if (!Visited.insert(U).second)
798 if (dyn_cast<StructType>(OrigTy)) {
801 for (
unsigned i = 0; i <
U->getNumOperands(); ++i) {
803 Type *OpTy =
Op->getType();
806 if (
auto *PtrTy = dyn_cast<PointerType>(OpTy)) {
808 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
811 Ty = deduceNestedTypeHelper(dyn_cast<User>(
Op), OpTy, Visited,
816 Change |= Ty != OpTy;
823 }
else if (
auto *ArrTy = dyn_cast<ArrayType>(OrigTy)) {
824 if (
Value *
Op =
U->getNumOperands() > 0 ?
U->getOperand(0) :
nullptr) {
825 Type *OpTy = ArrTy->getElementType();
827 if (
auto *PtrTy = dyn_cast<PointerType>(OpTy)) {
829 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
832 Ty = deduceNestedTypeHelper(dyn_cast<User>(
Op), OpTy, Visited,
836 Type *NewTy = ArrayType::get(Ty, ArrTy->getNumElements());
841 }
else if (
auto *VecTy = dyn_cast<VectorType>(OrigTy)) {
842 if (
Value *
Op =
U->getNumOperands() > 0 ?
U->getOperand(0) :
nullptr) {
843 Type *OpTy = VecTy->getElementType();
845 if (
auto *PtrTy = dyn_cast<PointerType>(OpTy)) {
847 deduceElementTypeHelper(
Op, Visited, UnknownElemTypeI8))
850 Ty = deduceNestedTypeHelper(dyn_cast<User>(
Op), OpTy, Visited,
854 Type *NewTy = VectorType::get(Ty, VecTy->getElementCount());
864Type *SPIRVEmitIntrinsics::deduceElementType(
Value *
I,
bool UnknownElemTypeI8) {
865 if (
Type *Ty = deduceElementTypeHelper(
I, UnknownElemTypeI8))
867 if (!UnknownElemTypeI8)
870 return IntegerType::getInt8Ty(
I->getContext());
874 Value *PointerOperand) {
878 auto *PtrTy = dyn_cast<PointerType>(
I->getType());
888bool SPIRVEmitIntrinsics::deduceOperandElementTypeCalledFunction(
890 Type *&KnownElemTy) {
894 std::string DemangledName =
896 if (DemangledName.length() > 0 &&
900 DemangledName,
ST.getPreferredInstructionSet());
901 if (Opcode == SPIRV::OpGroupAsyncCopy) {
902 for (
unsigned i = 0, PtrCnt = 0; i < CI->
arg_size() && PtrCnt < 2; ++i) {
908 KnownElemTy = ElemTy;
909 Ops.push_back(std::make_pair(
Op, i));
911 }
else if (Grp == SPIRV::Atomic || Grp == SPIRV::AtomicFloating) {
918 case SPIRV::OpAtomicLoad:
919 case SPIRV::OpAtomicCompareExchangeWeak:
920 case SPIRV::OpAtomicCompareExchange:
921 case SPIRV::OpAtomicExchange:
922 case SPIRV::OpAtomicIAdd:
923 case SPIRV::OpAtomicISub:
924 case SPIRV::OpAtomicOr:
925 case SPIRV::OpAtomicXor:
926 case SPIRV::OpAtomicAnd:
927 case SPIRV::OpAtomicUMin:
928 case SPIRV::OpAtomicUMax:
929 case SPIRV::OpAtomicSMin:
930 case SPIRV::OpAtomicSMax: {
934 Ops.push_back(std::make_pair(
Op, 0));
943void SPIRVEmitIntrinsics::deduceOperandElementTypeFunctionPointer(
945 Type *&KnownElemTy,
bool IsPostprocessing) {
949 Ops.push_back(std::make_pair(
Op, std::numeric_limits<unsigned>::max()));
951 bool IsNewFTy =
false, IsUncomplete =
false;
979 if (!IsPostprocessing && IsUncomplete)
982 IsNewFTy ? FunctionType::get(
RetTy, ArgTys, FTy->isVarArg()) : FTy;
985bool SPIRVEmitIntrinsics::deduceOperandElementTypeFunctionRet(
999 for (
User *U :
F->users()) {
1000 CallInst *CI = dyn_cast<CallInst>(U);
1006 propagateElemType(CI, PrevElemTy, VisitedSubst);
1016 for (
Instruction *UncompleteRetI : *UncompleteRets)
1017 deduceOperandElementType(UncompleteRetI,
nullptr, AskOps,
1019 }
else if (UncompleteRets) {
1022 TypeValidated.insert(
I);
1030void SPIRVEmitIntrinsics::deduceOperandElementType(
1034 Type *KnownElemTy =
nullptr;
1035 bool Uncomplete =
false;
1037 if (
auto *
Ref = dyn_cast<PHINode>(
I)) {
1041 Uncomplete = isTodoType(
I);
1042 for (
unsigned i = 0; i <
Ref->getNumIncomingValues(); i++) {
1047 }
else if (
auto *
Ref = dyn_cast<AddrSpaceCastInst>(
I)) {
1051 Uncomplete = isTodoType(
I);
1052 Ops.
push_back(std::make_pair(
Ref->getPointerOperand(), 0));
1053 }
else if (
auto *
Ref = dyn_cast<BitCastInst>(
I)) {
1059 Uncomplete = isTodoType(
I);
1061 }
else if (
auto *
Ref = dyn_cast<GetElementPtrInst>(
I)) {
1064 KnownElemTy =
Ref->getSourceElementType();
1067 }
else if (
auto *
Ref = dyn_cast<LoadInst>(
I)) {
1068 KnownElemTy =
I->getType();
1076 }
else if (
auto *
Ref = dyn_cast<StoreInst>(
I)) {
1078 reconstructType(
Ref->getValueOperand(),
false, IsPostprocessing)))
1085 }
else if (
auto *
Ref = dyn_cast<AtomicCmpXchgInst>(
I)) {
1091 }
else if (
auto *
Ref = dyn_cast<AtomicRMWInst>(
I)) {
1097 }
else if (
auto *
Ref = dyn_cast<SelectInst>(
I)) {
1101 Uncomplete = isTodoType(
I);
1102 for (
unsigned i = 0; i <
Ref->getNumOperands(); i++) {
1107 }
else if (
auto *
Ref = dyn_cast<ReturnInst>(
I)) {
1113 if (deduceOperandElementTypeFunctionRet(
I, UncompleteRets, AskOps,
1114 IsPostprocessing, KnownElemTy,
Op,
1117 Uncomplete = isTodoType(CurrF);
1119 }
else if (
auto *
Ref = dyn_cast<ICmpInst>(
I)) {
1127 KnownElemTy = ElemTy0;
1128 Uncomplete = isTodoType(Op0);
1130 }
else if (ElemTy1) {
1131 KnownElemTy = ElemTy1;
1132 Uncomplete = isTodoType(Op1);
1135 }
else if (
CallInst *CI = dyn_cast<CallInst>(
I)) {
1137 deduceOperandElementTypeCalledFunction(CI, Ops, KnownElemTy);
1138 else if (HaveFunPtrs)
1139 deduceOperandElementTypeFunctionPointer(CI, Ops, KnownElemTy,
1144 if (!KnownElemTy || Ops.
size() == 0)
1149 for (
auto &OpIt : Ops) {
1151 if (
Op->use_empty())
1155 Type *AskTy =
nullptr;
1157 if (IsPostprocessing && AskOps) {
1163 if (Ty == KnownElemTy)
1166 Type *OpTy =
Op->getType();
1173 else if (!IsPostprocessing)
1177 if (AssignCI ==
nullptr) {
1181 buildIntrWithMD(Intrinsic::spv_assign_ptr_type, {OpTy}, OpTyVal,
Op,
1185 updateAssignType(AssignCI,
Op, OpTyVal);
1187 std::make_pair(
I,
Op)};
1188 propagateElemTypeRec(
Op, KnownElemTy, PrevElemTy, VisitedSubst);
1193 buildSpvPtrcast(
I->getParent()->getParent(),
Op, KnownElemTy);
1194 if (OpIt.second == std::numeric_limits<unsigned>::max())
1195 dyn_cast<CallInst>(
I)->setCalledOperand(PtrCastI);
1197 I->setOperand(OpIt.second, PtrCastI);
1200 TypeValidated.insert(
I);
1203void SPIRVEmitIntrinsics::replaceMemInstrUses(
Instruction *Old,
1208 if (isAssignTypeInstr(U)) {
1209 B.SetInsertPoint(U);
1212 B.CreateIntrinsic(Intrinsic::spv_assign_type, {
New->getType()},
Args);
1214 U->eraseFromParent();
1217 U->replaceUsesOfWith(Old, New);
1225void SPIRVEmitIntrinsics::preprocessUndefs(
IRBuilder<> &
B) {
1226 std::queue<Instruction *> Worklist;
1230 while (!Worklist.empty()) {
1232 bool BPrepared =
false;
1235 for (
auto &
Op :
I->operands()) {
1236 auto *AggrUndef = dyn_cast<UndefValue>(
Op);
1237 if (!AggrUndef || !
Op->getType()->isAggregateType())
1244 auto *IntrUndef =
B.CreateIntrinsic(Intrinsic::spv_undef, {}, {});
1245 Worklist.push(IntrUndef);
1246 I->replaceUsesOfWith(
Op, IntrUndef);
1247 AggrConsts[IntrUndef] = AggrUndef;
1248 AggrConstTypes[IntrUndef] = AggrUndef->getType();
1253void SPIRVEmitIntrinsics::preprocessCompositeConstants(
IRBuilder<> &
B) {
1254 std::queue<Instruction *> Worklist;
1258 while (!Worklist.empty()) {
1259 auto *
I = Worklist.front();
1260 bool IsPhi = isa<PHINode>(
I), BPrepared =
false;
1262 bool KeepInst =
false;
1263 for (
const auto &
Op :
I->operands()) {
1265 Type *ResTy =
nullptr;
1266 if (
auto *COp = dyn_cast<ConstantVector>(
Op)) {
1267 AggrConst = cast<Constant>(COp);
1268 ResTy = COp->getType();
1269 }
else if (
auto *COp = dyn_cast<ConstantArray>(
Op)) {
1270 AggrConst = cast<Constant>(COp);
1271 ResTy =
B.getInt32Ty();
1272 }
else if (
auto *COp = dyn_cast<ConstantStruct>(
Op)) {
1273 AggrConst = cast<Constant>(COp);
1274 ResTy =
B.getInt32Ty();
1275 }
else if (
auto *COp = dyn_cast<ConstantDataArray>(
Op)) {
1276 AggrConst = cast<Constant>(COp);
1277 ResTy =
B.getInt32Ty();
1278 }
else if (
auto *COp = dyn_cast<ConstantAggregateZero>(
Op)) {
1279 AggrConst = cast<Constant>(COp);
1280 ResTy =
Op->getType()->isVectorTy() ? COp->getType() :
B.getInt32Ty();
1284 if (
auto *COp = dyn_cast<ConstantDataSequential>(
Op))
1285 for (
unsigned i = 0; i < COp->getNumElements(); ++i)
1286 Args.push_back(COp->getElementAsConstant(i));
1288 for (
auto &COp : AggrConst->
operands())
1289 Args.push_back(COp);
1291 IsPhi ?
B.SetInsertPointPastAllocas(
I->getParent()->getParent())
1292 :
B.SetInsertPoint(
I);
1296 B.CreateIntrinsic(Intrinsic::spv_const_composite, {ResTy}, {
Args});
1298 I->replaceUsesOfWith(
Op, CI);
1300 AggrConsts[CI] = AggrConst;
1301 AggrConstTypes[CI] = deduceNestedTypeHelper(AggrConst,
false);
1313 B.CreateIntrinsic(Intrinsic::spv_assign_decoration, {
I->getType()},
1318 unsigned RoundingModeDeco,
1325 ConstantInt::get(Int32Ty, SPIRV::Decoration::FPRoundingMode)),
1334 MDNode *SaturatedConversionNode =
1336 Int32Ty, SPIRV::Decoration::SaturatedConversion))});
1341 if (!
Call.isInlineAsm())
1352 for (
unsigned OpIdx = 0; OpIdx <
Call.arg_size(); OpIdx++)
1353 Args.push_back(
Call.getArgOperand(OpIdx));
1356 B.SetInsertPoint(&Call);
1357 B.CreateIntrinsic(Intrinsic::spv_inline_asm, {}, {
Args});
1365 if (!
RM.has_value())
1367 unsigned RoundingModeDeco = std::numeric_limits<unsigned>::max();
1368 switch (
RM.value()) {
1372 case RoundingMode::NearestTiesToEven:
1373 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTE;
1375 case RoundingMode::TowardNegative:
1376 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTN;
1378 case RoundingMode::TowardPositive:
1379 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTP;
1381 case RoundingMode::TowardZero:
1382 RoundingModeDeco = SPIRV::FPRoundingMode::FPRoundingMode::RTZ;
1384 case RoundingMode::Dynamic:
1385 case RoundingMode::NearestTiesToAway:
1389 if (RoundingModeDeco == std::numeric_limits<unsigned>::max())
1398 B.SetInsertPoint(&
I);
1401 for (
auto &
Op :
I.operands()) {
1402 if (
Op.get()->getType()->isSized()) {
1404 }
else if (
BasicBlock *BB = dyn_cast<BasicBlock>(
Op.get())) {
1411 CallInst *NewI =
B.CreateIntrinsic(Intrinsic::spv_switch,
1416 I.eraseFromParent();
1419 B.SetInsertPoint(ParentBB);
1430 B.SetInsertPoint(&
I);
1433 Args.push_back(
B.getInt1(
I.isInBounds()));
1434 for (
auto &
Op :
I.operands())
1436 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_gep, {
Types}, {
Args});
1437 replaceAllUsesWithAndErase(
B, &
I, NewI);
1443 B.SetInsertPoint(&
I);
1452 I.eraseFromParent();
1458 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_bitcast, {
Types}, {
Args});
1459 replaceAllUsesWithAndErase(
B, &
I, NewI);
1463void SPIRVEmitIntrinsics::insertAssignPtrTypeTargetExt(
1465 Type *VTy =
V->getType();
1470 if (ElemTy != AssignedType)
1475 buildAssignType(
B, AssignedType, V);
1480 dyn_cast<ConstantAsMetadata>(
1481 cast<MetadataAsValue>(AssignCI->
getOperand(1))->getMetadata())
1483 if (CurrentType == AssignedType)
1490 " for value " +
V->getName(),
1498void SPIRVEmitIntrinsics::replacePointerOperandWithPtrCast(
1501 TypeValidated.insert(
I);
1504 Type *PointerElemTy = deduceElementTypeHelper(Pointer,
false);
1505 if (PointerElemTy == ExpectedElementType ||
1513 bool FirstPtrCastOrAssignPtrType =
true;
1519 auto *
II = dyn_cast<IntrinsicInst>(
User);
1521 (
II->getIntrinsicID() != Intrinsic::spv_assign_ptr_type &&
1522 II->getIntrinsicID() != Intrinsic::spv_ptrcast) ||
1523 II->getOperand(0) != Pointer)
1528 FirstPtrCastOrAssignPtrType =
false;
1529 if (
II->getOperand(1) != VMD ||
1530 dyn_cast<ConstantInt>(
II->getOperand(2))->getSExtValue() !=
1536 if (
II->getIntrinsicID() != Intrinsic::spv_ptrcast)
1541 if (
II->getParent() !=
I->getParent())
1544 I->setOperand(OperandToReplace,
II);
1548 if (isa<Instruction>(Pointer) || isa<Argument>(Pointer)) {
1549 if (FirstPtrCastOrAssignPtrType) {
1552 buildAssignPtr(
B, ExpectedElementType, Pointer);
1554 }
else if (isTodoType(Pointer)) {
1555 eraseTodoType(Pointer);
1556 if (!isa<CallInst>(Pointer) && !isa<GetElementPtrInst>(Pointer)) {
1563 std::make_pair(
I, Pointer)};
1564 updateAssignType(AssignCI, Pointer, ExpectedElementVal);
1565 propagateElemType(Pointer, PrevElemTy, VisitedSubst);
1567 buildAssignPtr(
B, ExpectedElementType, Pointer);
1577 auto *PtrCastI =
B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
1578 I->setOperand(OperandToReplace, PtrCastI);
1580 buildAssignPtr(
B, ExpectedElementType, PtrCastI);
1583void SPIRVEmitIntrinsics::insertPtrCastOrAssignTypeInstr(
Instruction *
I,
1588 replacePointerOperandWithPtrCast(
1589 I,
SI->getValueOperand(), IntegerType::getInt8Ty(CurrF->
getContext()),
1595 Type *OpTy =
Op->getType();
1596 if (
auto *OpI = dyn_cast<Instruction>(
Op))
1598 if (OpTy ==
Op->getType())
1599 OpTy = deduceElementTypeByValueDeep(OpTy,
Op,
false);
1600 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 1,
B);
1603 if (
LoadInst *LI = dyn_cast<LoadInst>(
I)) {
1605 Type *OpTy = LI->getType();
1606 if (
auto *PtrTy = dyn_cast<PointerType>(OpTy)) {
1610 Type *NewOpTy = OpTy;
1611 OpTy = deduceElementTypeByValueDeep(OpTy, LI,
false);
1612 if (OpTy == NewOpTy)
1613 insertTodoType(Pointer);
1616 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 0,
B);
1621 Type *OpTy = GEPI->getSourceElementType();
1622 replacePointerOperandWithPtrCast(
I, Pointer, OpTy, 0,
B);
1624 insertTodoType(Pointer);
1636 std::string DemangledName =
1640 bool HaveTypes =
false;
1641 for (
unsigned OpIdx = 0; OpIdx < CalledF->
arg_size(); ++OpIdx) {
1659 if (
Instruction *Inst = dyn_cast<Instruction>(U)) {
1660 if ((ElemTy = deduceElementTypeHelper(Inst,
false)) !=
nullptr)
1666 HaveTypes |= ElemTy !=
nullptr;
1671 if (DemangledName.empty() && !HaveTypes)
1674 for (
unsigned OpIdx = 0; OpIdx < CI->
arg_size(); OpIdx++) {
1680 if (!isa<Instruction>(ArgOperand) && !isa<Argument>(ArgOperand)) {
1689 Type *ExpectedType =
1690 OpIdx < CalledArgTys.
size() ? CalledArgTys[OpIdx] :
nullptr;
1691 if (!ExpectedType && !DemangledName.empty())
1693 DemangledName, OpIdx,
I->getContext());
1694 if (!ExpectedType || ExpectedType->
isVoidTy())
1699 insertAssignPtrTypeTargetExt(cast<TargetExtType>(ExpectedType),
1702 replacePointerOperandWithPtrCast(CI, ArgOperand, ExpectedType, OpIdx,
B);
1708 I.getOperand(1)->getType(),
1709 I.getOperand(2)->getType()};
1711 B.SetInsertPoint(&
I);
1713 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_insertelt, {
Types}, {
Args});
1714 replaceAllUsesWithAndErase(
B, &
I, NewI);
1721 B.SetInsertPoint(&
I);
1723 I.getIndexOperand()->getType()};
1725 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_extractelt, {
Types}, {
Args});
1726 replaceAllUsesWithAndErase(
B, &
I, NewI);
1732 B.SetInsertPoint(&
I);
1735 for (
auto &
Op :
I.operands())
1736 if (isa<UndefValue>(
Op))
1740 for (
auto &
Op :
I.indices())
1741 Args.push_back(
B.getInt32(
Op));
1743 B.CreateIntrinsic(Intrinsic::spv_insertv, {
Types}, {
Args});
1744 replaceMemInstrUses(&
I, NewI,
B);
1749 if (
I.getAggregateOperand()->getType()->isAggregateType())
1752 B.SetInsertPoint(&
I);
1754 for (
auto &
Op :
I.operands())
1756 for (
auto &
Op :
I.indices())
1757 Args.push_back(
B.getInt32(
Op));
1759 B.CreateIntrinsic(Intrinsic::spv_extractv, {
I.getType()}, {
Args});
1760 replaceAllUsesWithAndErase(
B, &
I, NewI);
1765 if (!
I.getType()->isAggregateType())
1768 B.SetInsertPoint(&
I);
1769 TrackConstants =
false;
1770 const auto *TLI =
TM->getSubtargetImpl()->getTargetLowering();
1774 B.CreateIntrinsic(Intrinsic::spv_load, {
I.getOperand(0)->
getType()},
1775 {
I.getPointerOperand(),
B.getInt16(Flags),
1776 B.getInt8(
I.getAlign().value())});
1777 replaceMemInstrUses(&
I, NewI,
B);
1785 B.SetInsertPoint(&
I);
1786 TrackConstants =
false;
1787 const auto *TLI =
TM->getSubtargetImpl()->getTargetLowering();
1790 auto *PtrOp =
I.getPointerOperand();
1791 auto *NewI =
B.CreateIntrinsic(
1792 Intrinsic::spv_store, {
I.getValueOperand()->
getType(), PtrOp->getType()},
1793 {
I.getValueOperand(), PtrOp,
B.getInt16(Flags),
1794 B.getInt8(
I.getAlign().value())});
1795 I.eraseFromParent();
1800 Value *ArraySize =
nullptr;
1801 if (
I.isArrayAllocation()) {
1804 SPIRV::Extension::SPV_INTEL_variable_length_array))
1806 "array allocation: this instruction requires the following "
1807 "SPIR-V extension: SPV_INTEL_variable_length_array",
1809 ArraySize =
I.getArraySize();
1812 B.SetInsertPoint(&
I);
1813 TrackConstants =
false;
1814 Type *PtrTy =
I.getType();
1817 ?
B.CreateIntrinsic(Intrinsic::spv_alloca_array,
1818 {PtrTy, ArraySize->
getType()},
1819 {ArraySize,
B.getInt8(
I.getAlign().value())})
1820 :
B.CreateIntrinsic(Intrinsic::spv_alloca, {PtrTy},
1821 {B.getInt8(I.getAlign().value())});
1822 replaceAllUsesWithAndErase(
B, &
I, NewI);
1827 assert(
I.getType()->isAggregateType() &&
"Aggregate result is expected");
1829 B.SetInsertPoint(&
I);
1831 for (
auto &
Op :
I.operands())
1833 Args.push_back(
B.getInt32(
1835 Args.push_back(
B.getInt32(
1837 Args.push_back(
B.getInt32(
1839 auto *NewI =
B.CreateIntrinsic(Intrinsic::spv_cmpxchg,
1841 replaceMemInstrUses(&
I, NewI,
B);
1847 B.SetInsertPoint(&
I);
1848 B.CreateIntrinsic(Intrinsic::spv_unreachable, {}, {});
1855 if (GV.
getName() ==
"llvm.global.annotations")
1862 deduceElementTypeHelper(&GV,
false);
1866 auto *InitInst =
B.CreateIntrinsic(Intrinsic::spv_init_global,
1868 InitInst->setArgOperand(1,
Init);
1871 B.CreateIntrinsic(Intrinsic::spv_unref_global, GV.
getType(), &GV);
1877bool SPIRVEmitIntrinsics::insertAssignPtrTypeIntrs(
Instruction *
I,
1879 bool UnknownElemTypeI8) {
1885 if (
Type *ElemTy = deduceElementType(
I, UnknownElemTypeI8)) {
1886 buildAssignPtr(
B, ElemTy,
I);
1892void SPIRVEmitIntrinsics::insertAssignTypeIntrs(
Instruction *
I,
1896 {
"async_work_group_copy", WellKnownTypes::Event},
1897 {
"async_work_group_strided_copy", WellKnownTypes::Event},
1898 {
"__spirv_GroupAsyncCopy", WellKnownTypes::Event}};
1902 bool IsKnown =
false;
1903 if (
auto *CI = dyn_cast<CallInst>(
I)) {
1907 std::string DemangledName =
1910 if (DemangledName.length() > 0)
1913 auto ResIt = ResTypeWellKnown.
find(DemangledName);
1914 if (ResIt != ResTypeWellKnown.
end()) {
1917 switch (ResIt->second) {
1918 case WellKnownTypes::Event:
1925 switch (DecorationId) {
1928 case FPDecorationId::SAT:
1931 case FPDecorationId::RTE:
1933 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTE,
B);
1935 case FPDecorationId::RTZ:
1937 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTZ,
B);
1939 case FPDecorationId::RTP:
1941 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTP,
B);
1943 case FPDecorationId::RTN:
1945 CI, SPIRV::FPRoundingMode::FPRoundingMode::RTN,
B);
1951 Type *Ty =
I->getType();
1954 Type *TypeToAssign = Ty;
1955 if (
auto *
II = dyn_cast<IntrinsicInst>(
I)) {
1956 if (
II->getIntrinsicID() == Intrinsic::spv_const_composite ||
1957 II->getIntrinsicID() == Intrinsic::spv_undef) {
1958 auto It = AggrConstTypes.
find(
II);
1959 if (It == AggrConstTypes.
end())
1961 TypeToAssign = It->second;
1965 buildAssignType(
B, TypeToAssign,
I);
1967 for (
const auto &
Op :
I->operands()) {
1968 if (isa<ConstantPointerNull>(
Op) || isa<UndefValue>(
Op) ||
1970 (isa<ConstantExpr>(
Op) && isa<GEPOperator>(
Op))) {
1972 Type *OpTy =
Op->getType();
1975 buildIntrWithMD(Intrinsic::spv_assign_type, {
B.getInt32Ty()},
Op,
1978 }
else if (!isa<Instruction>(
Op)) {
1979 Type *OpTy =
Op->getType();
1982 buildAssignPtr(
B, OpTyElem,
Op);
1985 buildAssignPtr(
B, ElemTy ? ElemTy : deduceElementType(
Op,
true),
Op);
1987 CallInst *AssignCI = buildIntrWithMD(Intrinsic::spv_assign_type,
1988 {OpTy},
Op,
Op, {},
B);
1996void SPIRVEmitIntrinsics::insertSpirvDecorations(
Instruction *
I,
1998 if (
MDNode *MD =
I->getMetadata(
"spirv.Decorations")) {
2000 B.CreateIntrinsic(Intrinsic::spv_assign_decoration, {
I->getType()},
2005void SPIRVEmitIntrinsics::processInstrAfterVisit(
Instruction *
I,
2007 auto *
II = dyn_cast<IntrinsicInst>(
I);
2008 bool IsConstComposite =
2009 II &&
II->getIntrinsicID() == Intrinsic::spv_const_composite;
2010 if (IsConstComposite && TrackConstants) {
2012 auto t = AggrConsts.
find(
I);
2015 buildIntrWithMD(Intrinsic::spv_track_constant,
2016 {
II->getType(),
II->getType()}, t->second,
I, {},
B);
2018 NewOp->setArgOperand(0,
I);
2020 bool IsPhi = isa<PHINode>(
I), BPrepared =
false;
2021 for (
const auto &
Op :
I->operands()) {
2022 if (isa<PHINode>(
I) || isa<SwitchInst>(
I))
2023 TrackConstants =
false;
2024 if ((isa<ConstantData>(
Op) || isa<ConstantExpr>(
Op)) && TrackConstants) {
2025 unsigned OpNo =
Op.getOperandNo();
2026 if (
II && ((
II->getIntrinsicID() == Intrinsic::spv_gep && OpNo == 0) ||
2027 (
II->paramHasAttr(OpNo, Attribute::ImmArg))))
2030 IsPhi ?
B.SetInsertPointPastAllocas(
I->getParent()->getParent())
2031 :
B.SetInsertPoint(
I);
2034 Type *OpTy =
Op->getType();
2039 buildIntrWithMD(Intrinsic::spv_track_constant,
2040 {OpTy, OpTyVal->
getType()},
Op, OpTyVal, {},
B);
2041 Type *OpElemTy =
nullptr;
2044 OpElemTy != IntegerType::getInt8Ty(
I->getContext())) {
2045 buildAssignPtr(
B, IntegerType::getInt8Ty(
I->getContext()), NewOp);
2051 B.CreateIntrinsic(Intrinsic::spv_ptrcast, {
Types},
Args);
2052 buildAssignPtr(
B, OpElemTy, PtrCasted);
2055 I->setOperand(OpNo, NewOp);
2061Type *SPIRVEmitIntrinsics::deduceFunParamElementType(
Function *
F,
2063 std::unordered_set<Function *> FVisited;
2064 return deduceFunParamElementType(
F, OpIdx, FVisited);
2067Type *SPIRVEmitIntrinsics::deduceFunParamElementType(
2068 Function *
F,
unsigned OpIdx, std::unordered_set<Function *> &FVisited) {
2070 if (!FVisited.insert(
F).second)
2073 std::unordered_set<Value *> Visited;
2076 for (
User *U :
F->users()) {
2077 CallInst *CI = dyn_cast<CallInst>(U);
2078 if (!CI || OpIdx >= CI->
arg_size())
2088 if (
Type *Ty = deduceElementTypeHelper(OpArg, Visited,
false))
2093 if (!Inst || Inst == CI)
2096 if (
Type *Ty = deduceElementTypeHelper(Inst, Visited,
false))
2103 if (FVisited.find(OuterF) != FVisited.end())
2105 for (
unsigned i = 0; i < OuterF->
arg_size(); ++i) {
2106 if (OuterF->
getArg(i) == OpArg) {
2107 Lookup.push_back(std::make_pair(OuterF, i));
2114 for (
auto &Pair :
Lookup) {
2115 if (
Type *Ty = deduceFunParamElementType(Pair.first, Pair.second, FVisited))
2122void SPIRVEmitIntrinsics::processParamTypesByFunHeader(
Function *
F,
2124 B.SetInsertPointPastAllocas(
F);
2125 for (
unsigned OpIdx = 0; OpIdx <
F->arg_size(); ++OpIdx) {
2134 buildAssignPtr(
B, ElemTy, Arg);
2138 for (
User *U :
F->users()) {
2139 CallInst *CI = dyn_cast<CallInst>(U);
2140 if (!CI || OpIdx >= CI->
arg_size())
2150 buildAssignPtr(
B, ElemTy, Arg);
2155 CallInst *CI = dyn_cast<CallInst>(U);
2158 CI->
getParent()->getParent() == CurrF) {
2160 deduceOperandElementTypeFunctionPointer(CI, Ops, ElemTy,
false);
2162 buildAssignPtr(
B, ElemTy, Arg);
2172 B.SetInsertPointPastAllocas(
F);
2173 for (
unsigned OpIdx = 0; OpIdx <
F->arg_size(); ++OpIdx) {
2178 if (!ElemTy && (ElemTy = deduceFunParamElementType(
F, OpIdx)) !=
nullptr) {
2182 propagateElemType(Arg, IntegerType::getInt8Ty(
F->getContext()),
2185 buildAssignPtr(
B, ElemTy, Arg);
2194 bool IsNewFTy =
false;
2206 ? FunctionType::get(FTy->getReturnType(), ArgTys, FTy->isVarArg())
2210bool SPIRVEmitIntrinsics::processFunctionPointers(
Module &M) {
2213 if (
F.isIntrinsic())
2215 if (
F.isDeclaration()) {
2216 for (
User *U :
F.users()) {
2217 CallInst *CI = dyn_cast<CallInst>(U);
2229 for (
User *U :
F.users()) {
2231 if (!
II ||
II->arg_size() != 3 ||
II->getOperand(0) != &
F)
2233 if (
II->getIntrinsicID() == Intrinsic::spv_assign_ptr_type ||
2234 II->getIntrinsicID() == Intrinsic::spv_ptrcast) {
2241 if (Worklist.
empty())
2247 "cannot allocate a name for the internal service function");
2258 for (
const auto &Arg :
F->args())
2260 IRB.CreateCall(
F, Args);
2262 IRB.CreateRetVoid();
2268void SPIRVEmitIntrinsics::applyDemangledPtrArgTypes(
IRBuilder<> &
B) {
2269 for (
auto It : FDeclPtrTys) {
2271 for (
auto *U :
F->users()) {
2272 CallInst *CI = dyn_cast<CallInst>(U);
2276 for (
auto [
Idx, ElemTy] : It.second) {
2282 if (
Argument *Arg = dyn_cast<Argument>(Param)) {
2284 B.SetInsertPointPastAllocas(Arg->
getParent());
2286 buildAssignPtr(
B, ElemTy, Arg);
2288 }
else if (isa<Instruction>(Param)) {
2295 .getFirstNonPHIOrDbgOrAlloca());
2296 buildAssignPtr(
B, ElemTy, Param);
2314bool SPIRVEmitIntrinsics::runOnFunction(
Function &Func) {
2315 if (
Func.isDeclaration())
2319 GR =
ST.getSPIRVGlobalRegistry();
2323 ST.canUseExtension(SPIRV::Extension::SPV_INTEL_function_pointers);
2328 AggrConstTypes.
clear();
2331 processParamTypesByFunHeader(CurrF,
B);
2339 Type *ElTy =
SI->getValueOperand()->getType();
2344 B.SetInsertPoint(&
Func.getEntryBlock(),
Func.getEntryBlock().begin());
2345 for (
auto &GV :
Func.getParent()->globals())
2346 processGlobalValue(GV,
B);
2348 preprocessUndefs(
B);
2349 preprocessCompositeConstants(
B);
2354 applyDemangledPtrArgTypes(
B);
2357 for (
auto &
I : Worklist) {
2359 if (isConvergenceIntrinsic(
I))
2362 bool Postpone = insertAssignPtrTypeIntrs(
I,
B,
false);
2364 insertAssignTypeIntrs(
I,
B);
2365 insertPtrCastOrAssignTypeInstr(
I,
B);
2369 if (Postpone && !GR->findAssignPtrTypeInstr(
I))
2370 insertAssignPtrTypeIntrs(
I,
B,
true);
2372 if (
auto *FPI = dyn_cast<ConstrainedFPIntrinsic>(
I))
2373 useRoundingMode(FPI,
B);
2380 deduceOperandElementType(&
I, &UncompleteRets);
2387 deduceOperandElementType(&Phi,
nullptr);
2389 for (
auto *
I : Worklist) {
2390 TrackConstants =
true;
2391 if (!
I->getType()->isVoidTy() || isa<StoreInst>(
I))
2400 if (isConvergenceIntrinsic(
I))
2403 processInstrAfterVisit(
I,
B);
2410bool SPIRVEmitIntrinsics::postprocessTypes(
Module &M) {
2411 if (!GR || TodoTypeSz == 0)
2414 unsigned SzTodo = TodoTypeSz;
2418 if (!
Enabled || isa<GetElementPtrInst>(
Op))
2420 CallInst *AssignCI = GR->findAssignPtrTypeInstr(
Op);
2421 Type *KnownTy = GR->findDeducedElementType(
Op);
2422 if (!KnownTy || !AssignCI)
2426 if (
auto *CI = dyn_cast<Instruction>(
Op)) {
2428 std::unordered_set<Value *> Visited;
2429 if (
Type *ElemTy = deduceElementTypeHelper(
Op, Visited,
false,
true)) {
2430 if (ElemTy != KnownTy) {
2432 propagateElemType(CI, ElemTy, VisitedSubst);
2438 for (
User *U :
Op->users()) {
2440 if (Inst && !isa<IntrinsicInst>(Inst))
2444 if (TodoTypeSz == 0)
2451 auto It = ToProcess.
find(&
I);
2452 if (It == ToProcess.
end())
2454 It->second.remove_if([
this](
Value *V) {
return !isTodoType(V); });
2455 if (It->second.size() == 0)
2457 deduceOperandElementType(&
I, &UncompleteRets, &It->second,
true);
2458 if (TodoTypeSz == 0)
2463 return SzTodo > TodoTypeSz;
2467void SPIRVEmitIntrinsics::parseFunDeclarations(
Module &M) {
2469 if (!
F.isDeclaration() ||
F.isIntrinsic())
2473 if (DemangledName.empty())
2478 DemangledName,
ST.getPreferredInstructionSet());
2479 if (Opcode != SPIRV::OpGroupAsyncCopy)
2483 for (
unsigned OpIdx = 0; OpIdx <
F.arg_size(); ++OpIdx) {
2494 if (!TypeStrs.
size())
2497 for (
unsigned Idx : Idxs) {
2504 FDeclPtrTys[&
F].push_back(std::make_pair(
Idx, ElemTy));
2509bool SPIRVEmitIntrinsics::runOnModule(
Module &M) {
2510 bool Changed =
false;
2512 parseFunDeclarations(M);
2522 if (!
F.isDeclaration() && !
F.isIntrinsic()) {
2524 processParamTypes(&
F,
B);
2528 CanTodoType =
false;
2529 Changed |= postprocessTypes(M);
2532 Changed |= processFunctionPointers(M);
2538 return new SPIRVEmitIntrinsics(TM);
static unsigned getIntrinsicID(const SDNode *N)
Expand Atomic instructions
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static void replaceAllUsesWith(Value *Old, Value *New, SmallSet< BasicBlock *, 32 > &FreshBBs, bool IsHuge)
Replace all old uses with new ones, and push the updated BBs into FreshBBs.
Returns the sub type a function will return at a given Idx Should correspond to the result type of an ExtractValue instruction executed with just that one unsigned Idx
This file defines the DenseSet and SmallDenseSet classes.
static bool runOnFunction(Function &F, bool PostInlining)
iv Induction Variable Users
uint64_t IntrinsicInst * II
#define INITIALIZE_PASS(passName, arg, name, cfg, analysis)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
static bool isMemInstrToReplace(Instruction *I)
static bool isAggrConstForceInt32(const Value *V)
static Type * getAtomicElemTy(SPIRVGlobalRegistry *GR, Instruction *I, Value *PointerOperand)
static void reportFatalOnTokenType(const Instruction *I)
static void setInsertPointAfterDef(IRBuilder<> &B, Instruction *I)
static void emitAssignName(Instruction *I, IRBuilder<> &B)
static Type * getPointeeTypeByCallInst(StringRef DemangledName, Function *CalledF, unsigned OpIdx)
static void createRoundingModeDecoration(Instruction *I, unsigned RoundingModeDeco, IRBuilder<> &B)
static void createDecorationIntrinsic(Instruction *I, MDNode *Node, IRBuilder<> &B)
static bool IsKernelArgInt8(Function *F, StoreInst *SI)
static void setInsertPointSkippingPhis(IRBuilder<> &B, Instruction *I)
static FunctionType * getFunctionPointerElemType(Function *F, SPIRVGlobalRegistry *GR)
static void createSaturatedConversionDecoration(Instruction *I, IRBuilder<> &B)
static Type * restoreMutatedType(SPIRVGlobalRegistry *GR, Instruction *I, Type *Ty)
static bool requireAssignType(Instruction *I)
void visit(MachineFunction &MF, MachineBasicBlock &Start, std::function< void(MachineBasicBlock *)> op)
static void insertSpirvDecorations(MachineFunction &MF, MachineIRBuilder MIB)
#define SPIRV_BACKEND_SERVICE_FUN_NAME
static SymbolRef::Type getType(const Symbol *Sym)
static int Lookup(ArrayRef< TableEntry > Table, unsigned Opcode)
an instruction to allocate memory on the stack
Represent the analysis usage information of a pass.
This class represents an incoming formal argument to a Function.
const Function * getParent() const
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
An instruction that atomically checks whether a specified value is in a memory location,...
static unsigned getPointerOperandIndex()
static unsigned getPointerOperandIndex()
LLVM Basic Block Representation.
iterator_range< const_phi_iterator > phis() const
Returns a range that iterates over the phis in the basic block.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
LLVMContext & getContext() const
Get the context in which this basic block lives.
This class represents a no-op cast from one type to another.
static BlockAddress * get(Function *F, BasicBlock *BB)
Return a BlockAddress for the specified function and basic block.
bool isInlineAsm() const
Check if this call is an inline asm statement.
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
bool isIndirectCall() const
Return true if the callsite is an indirect call.
Value * getCalledOperand() const
Value * getArgOperand(unsigned i) const
void setArgOperand(unsigned i, Value *v)
FunctionType * getFunctionType() const
iterator_range< User::op_iterator > args()
Iteration adapter for range-for loops.
unsigned arg_size() const
This class represents a function call, abstracting a target machine's calling convention.
This is an important base class in LLVM.
static Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
This is the common base class for constrained floating point intrinsics.
std::optional< RoundingMode > getRoundingMode() const
This class represents an Operation in the Expression.
iterator find(const_arg_type_t< KeyT > Val)
std::pair< iterator, bool > try_emplace(KeyT &&Key, Ts &&...Args)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Implements a dense probed hash-table based set.
void addFnAttr(Attribute::AttrKind Kind)
Add function attributes to this function.
static Function * Create(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
const DataLayout & getDataLayout() const
Get the data layout of the module this function belongs to.
bool isIntrinsic() const
isIntrinsic - Returns true if the function's name starts with "llvm.".
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
Type * getReturnType() const
Returns the type of the ret val.
Argument * getArg(unsigned i) const
an instruction for type-safe pointer arithmetic to access elements of arrays and structs
static Type * getTypeAtIndex(Type *Ty, Value *Idx)
Return the type of the element at the given index of an indexable type.
static unsigned getPointerOperandIndex()
PointerType * getType() const
Global values are always pointers.
@ PrivateLinkage
Like Internal, but omit from symbol table.
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
Indirect Branch Instruction.
void addDestination(BasicBlock *Dest)
Add a destination.
This instruction inserts a single (scalar) element into a VectorType value.
This instruction inserts a struct field of array element value into an aggregate value.
Base class for instruction visitors.
RetTy visitExtractElementInst(ExtractElementInst &I)
RetTy visitInsertValueInst(InsertValueInst &I)
RetTy visitUnreachableInst(UnreachableInst &I)
RetTy visitAtomicCmpXchgInst(AtomicCmpXchgInst &I)
RetTy visitBitCastInst(BitCastInst &I)
RetTy visitSwitchInst(SwitchInst &I)
RetTy visitExtractValueInst(ExtractValueInst &I)
RetTy visitStoreInst(StoreInst &I)
RetTy visitInsertElementInst(InsertElementInst &I)
RetTy visitAllocaInst(AllocaInst &I)
RetTy visitCallInst(CallInst &I)
RetTy visitGetElementPtrInst(GetElementPtrInst &I)
void visitInstruction(Instruction &I)
RetTy visitLoadInst(LoadInst &I)
InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
Instruction * user_back()
Specialize the methods defined in Value, as we know that an instruction can only be used by other ins...
A wrapper class for inspecting calls to intrinsic functions.
This is an important class for using LLVM in a threaded context.
An instruction for reading from memory.
static unsigned getPointerOperandIndex()
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
static MDString * get(LLVMContext &Context, StringRef Str)
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
Flags
Flags values. These may be or'd together.
ModulePass class - This class is used to implement unstructured interprocedural optimizations and ana...
virtual bool runOnModule(Module &M)=0
runOnModule - Virtual method overriden by subclasses to process the module being operated on.
A Module instance is used to store all the information related to an LLVM module.
PassRegistry - This class manages the registration and intitialization of the pass subsystem as appli...
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
virtual void getAnalysisUsage(AnalysisUsage &) const
getAnalysisUsage - This function should be overriden by passes that need analysis information to do t...
virtual StringRef getPassName() const
getPassName - Return a nice clean name for a pass.
static PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
void addAssignPtrTypeInstr(Value *Val, CallInst *AssignPtrTyCI)
Type * findDeducedCompositeType(const Value *Val)
void addDeducedElementType(Value *Val, Type *Ty)
void addReturnType(const Function *ArgF, TypedPointerType *DerivedTy)
void updateIfExistDeducedElementType(Value *OldVal, Value *NewVal, bool DeleteOld)
Type * findMutated(const Value *Val)
void addDeducedCompositeType(Value *Val, Type *Ty)
void updateIfExistAssignPtrTypeInstr(Value *OldVal, Value *NewVal, bool DeleteOld)
Type * findDeducedElementType(const Value *Val)
CallInst * findAssignPtrTypeInstr(const Value *Val)
bool canUseExtension(SPIRV::Extension::Extension E) const
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
bool contains(ConstPtrType Ptr) const
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
An instruction for storing to memory.
static unsigned getPointerOperandIndex()
StringMap - This is an unconventional map that is specialized for handling keys that are "strings",...
iterator find(StringRef Key)
StringRef - Represent a constant reference to a string, i.e.
bool starts_with(StringRef Prefix) const
Check if this string starts with the given Prefix.
static StructType * create(LLVMContext &Context, StringRef Name)
This creates an identified struct.
Class to represent target extensions types, which are generally unintrospectable from target-independ...
static TargetExtType * get(LLVMContext &Context, StringRef Name, ArrayRef< Type * > Types={}, ArrayRef< unsigned > Ints={})
Return a target extension type having the specified name and optional type and integer parameters.
The instances of the Type class are immutable: once they are created, they are never changed.
bool isVectorTy() const
True if this is an instance of VectorType.
bool isPointerTy() const
True if this is an instance of PointerType.
StringRef getTargetExtName() const
static Type * getVoidTy(LLVMContext &C)
bool isTargetExtTy() const
Return true if this is a target extension type.
bool isAggregateType() const
Return true if the type is an aggregate type.
static IntegerType * getInt32Ty(LLVMContext &C)
bool isVoidTy() const
Return true if this is 'void'.
static bool isValidElementType(Type *ElemTy)
Return true if the specified type is valid as a element type.
static TypedPointerType * get(Type *ElementType, unsigned AddressSpace)
This constructs a pointer to an object of the specified type in a numbered address space.
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
This function has undefined behavior.
A Use represents the edge between a Value definition and its users.
Value * getOperand(unsigned i) const
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
void setName(const Twine &Name)
Change the name of the value.
iterator_range< user_iterator > users()
LLVMContext & getContext() const
All values hold a context through their type.
unsigned getNumUses() const
This method computes the number of uses of this Value.
StringRef getName() const
Return a constant reference to the value's name.
std::pair< iterator, bool > insert(const ValueT &V)
bool contains(const_arg_type_t< ValueT > V) const
Check if the set contains the given element.
const ParentTy * getParent() const
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
@ SPIR_KERNEL
Used for SPIR kernel functions.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
ID ArrayRef< Type * > Tys
std::string lookupBuiltinNameHelper(StringRef DemangledCall, FPDecorationId *DecorationId)
Parses the name part of the demangled builtin call.
Type * parseBuiltinCallArgumentType(StringRef TypeStr, LLVMContext &Ctx)
bool parseBuiltinTypeStr(SmallVector< StringRef, 10 > &BuiltinArgsTypeStrs, const StringRef DemangledCall, LLVMContext &Ctx)
std::tuple< int, unsigned, unsigned > mapBuiltinToOpcode(const StringRef DemangledCall, SPIRV::InstructionSet::InstructionSet Set)
Helper function for finding a builtin function attributes by a demangled function name.
Type * parseBuiltinCallArgumentBaseType(const StringRef DemangledCall, unsigned ArgIdx, LLVMContext &Ctx)
Parses the provided ArgIdx argument base type in the DemangledCall skeleton.
NodeAddr< PhiNode * > Phi
NodeAddr< FuncNode * > Func
This is an optimization pass for GlobalISel generic memory operations.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
bool getVacantFunctionName(Module &M, std::string &Name)
void initializeSPIRVEmitIntrinsicsPass(PassRegistry &)
bool isTypedPointerWrapper(const TargetExtType *ExtTy)
ModulePass * createSPIRVEmitIntrinsicsPass(SPIRVTargetMachine *TM)
unsigned getPointerAddressSpace(const Type *T)
bool isNestedPointer(const Type *Ty)
std::string getOclOrSpirvBuiltinDemangledName(StringRef Name)
auto reverse(ContainerTy &&C)
Type * getTypedPointerWrapper(Type *ElemTy, unsigned AS)
bool isPointerTy(const Type *T)
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
SPIRV::Scope::Scope getMemScope(LLVMContext &Ctx, SyncScope::ID Id)
@ Ref
The access may reference the value stored in memory.
DWARFExpression::Operation Op
Type * getPointeeTypeByAttr(Argument *Arg)
bool hasPointeeTypeAttr(Argument *Arg)
bool isEquivalentTypes(Type *Ty1, Type *Ty2)
bool hasInitializer(const GlobalVariable *GV)
bool isSpvIntrinsic(const MachineInstr &MI, Intrinsic::ID IntrinsicID)
Type * getPointeeType(const Type *Ty)
void addStringImm(const StringRef &Str, MCInst &Inst)
bool isUntypedPointerTy(const Type *T)
SPIRV::MemorySemantics::MemorySemantics getMemSemantics(AtomicOrdering Ord)