43 VPTypeAnalysis TypeInfo;
47 SmallPtrSet<VPRecipeBase *, 8> ToSkip;
51 DenseMap<VPValue *, SmallVector<VPValue *>> VPV2Parts;
54 void unrollReplicateRegionByUF(VPRegionBlock *VPR);
58 void unrollRecipeByUF(VPRecipeBase &R);
63 void unrollHeaderPHIByUF(VPHeaderPHIRecipe *R,
68 void unrollWidenInductionByUF(VPWidenInductionRecipe *
IV,
71 VPValue *getConstantVPV(
unsigned Part) {
73 Plan.getVectorLoopRegion()->getCanonicalIV()->getScalarType();
74 return Plan.getOrAddLiveIn(ConstantInt::get(CanIVIntTy, Part));
78 UnrollState(VPlan &Plan,
unsigned UF) : Plan(Plan), UF(UF), TypeInfo(Plan) {}
80 void unrollBlock(VPBlockBase *VPB);
82 VPValue *getValueForPart(VPValue *V,
unsigned Part) {
83 if (Part == 0 ||
V->isLiveIn())
85 assert((VPV2Parts.contains(V) && VPV2Parts[V].size() >= Part) &&
86 "accessed value does not exist");
87 return VPV2Parts[
V][Part - 1];
93 void addRecipeForPart(VPRecipeBase *OrigR, VPRecipeBase *CopyR,
96 const auto &[
V,
_] = VPV2Parts.try_emplace(VPV);
97 assert(
V->second.size() == Part - 1 &&
"earlier parts not set");
103 void addUniformForAllParts(VPSingleDefRecipe *R) {
104 const auto &[
V,
Inserted] = VPV2Parts.try_emplace(R);
105 assert(Inserted &&
"uniform value already added");
106 for (
unsigned Part = 0; Part != UF; ++Part)
107 V->second.push_back(R);
110 bool contains(VPValue *VPV)
const {
return VPV2Parts.contains(VPV); }
114 void remapOperand(VPRecipeBase *R,
unsigned OpIdx,
unsigned Part) {
116 R->setOperand(
OpIdx, getValueForPart(
Op, Part));
122 R->setOperand(
OpIdx, getValueForPart(
Op, Part));
127void UnrollState::unrollReplicateRegionByUF(
VPRegionBlock *VPR) {
129 for (
unsigned Part = 1; Part !=
UF; ++Part) {
135 for (
const auto &[PartIVPBB, Part0VPBB] :
138 for (
const auto &[PartIR, Part0R] :
zip(*PartIVPBB, *Part0VPBB)) {
141 ScalarIVSteps->addOperand(getConstantVPV(Part));
144 addRecipeForPart(&Part0R, &PartIR, Part);
150void UnrollState::unrollWidenInductionByUF(
153 IV->getParent()->getEnclosingLoopRegion()->getSinglePredecessor());
155 auto &
ID =
IV->getInductionDescriptor();
158 Flags =
ID.getInductionBinOp()->getFastMathFlags();
160 VPValue *ScalarStep =
IV->getStepValue();
161 VPBuilder Builder(PH);
164 VPInstruction *VectorStep = Builder.createNaryOp(
168 ToSkip.
insert(VectorStep);
183 Builder.setInsertPoint(
IV->getParent(), InsertPtForPhi);
188 AddOpc =
ID.getInductionOpcode();
190 AddOpc = Instruction::Add;
191 for (
unsigned Part = 1; Part !=
UF; ++Part) {
193 Part > 1 ?
"step.add." + std::to_string(Part) :
"step.add";
195 VPInstruction *
Add = Builder.createNaryOp(AddOpc,
200 Flags,
IV->getDebugLoc(), Name);
202 addRecipeForPart(
IV,
Add, Part);
205 IV->addOperand(VectorStep);
206 IV->addOperand(Prev);
209void UnrollState::unrollHeaderPHIByUF(VPHeaderPHIRecipe *R,
218 unrollWidenInductionByUF(
IV, InsertPtForPhi);
223 if (RdxPhi && RdxPhi->isOrdered())
226 auto InsertPt = std::next(
R->getIterator());
227 for (
unsigned Part = 1; Part !=
UF; ++Part) {
228 VPRecipeBase *
Copy =
R->clone();
229 Copy->insertBefore(*
R->getParent(), InsertPt);
230 addRecipeForPart(R, Copy, Part);
238 "unexpected start VPInstruction");
243 StartV = VPI->getOperand(1);
245 auto *
C = VPI->clone();
246 C->setOperand(0,
C->getOperand(1));
250 for (
unsigned Part = 1; Part !=
UF; ++Part)
251 VPV2Parts[VPI][Part - 1] = StartV;
253 Copy->addOperand(getConstantVPV(Part));
256 "unexpected header phi recipe not needing unrolled part");
262void UnrollState::unrollRecipeByUF(VPRecipeBase &R) {
268 addUniformForAllParts(VPI);
274 RepR->getOperand(1)->isDefinedOutsideLoopRegions()) {
280 if (
II->getIntrinsicID() == Intrinsic::experimental_noalias_scope_decl) {
281 addUniformForAllParts(RepR);
288 auto InsertPt = std::next(
R.getIterator());
289 VPBasicBlock &VPBB = *
R.getParent();
290 for (
unsigned Part = 1; Part !=
UF; ++Part) {
291 VPRecipeBase *
Copy =
R.clone();
292 Copy->insertBefore(VPBB, InsertPt);
293 addRecipeForPart(&R, Copy, Part);
298 Copy->setOperand(0, getValueForPart(
Op, Part - 1));
299 Copy->setOperand(1, getValueForPart(
Op, Part));
304 if (Phi &&
Phi->isOrdered()) {
305 auto &Parts = VPV2Parts[
Phi];
308 Parts.push_back(Red);
310 Parts.push_back(
Copy->getVPSingleValue());
311 Phi->setOperand(1,
Copy->getVPSingleValue());
318 if (
isa<VPScalarIVStepsRecipe, VPWidenCanonicalIVRecipe,
319 VPVectorPointerRecipe, VPVectorEndPointerRecipe>(Copy) ||
322 Copy->addOperand(getConstantVPV(Part));
325 Copy->setOperand(0,
R.getOperand(0));
329void UnrollState::unrollBlock(VPBlockBase *VPB) {
333 return unrollReplicateRegionByUF(VPR);
337 ReversePostOrderTraversal<VPBlockShallowTraversalWrapper<VPBlockBase *>>
339 for (VPBlockBase *VPB : RPOT)
363 for (
unsigned Part = 1; Part !=
UF; ++Part)
364 R.addOperand(getValueForPart(Op1, Part));
371 for (
unsigned Part = 1; Part !=
UF; ++Part)
372 R.addOperand(getValueForPart(Op1, Part));
385 I->replaceAllUsesWith(getValueForPart(Op0, UF -
Offset));
396 addUniformForAllParts(SingleDef);
401 unrollHeaderPHIByUF(
H, InsertPtForPhi);
410 assert(UF > 0 &&
"Unroll factor must be positive");
420 VPI->getNumOperands() == 1) {
421 VPI->replaceAllUsesWith(VPI->getOperand(0));
422 VPI->eraseFromParent();
431 UnrollState Unroller(Plan, UF);
439 Unroller.unrollBlock(VPB);
451 Unroller.remapOperand(&
H, 1, UF - 1);
454 if (Unroller.contains(
H.getVPSingleValue())) {
458 Unroller.remapOperands(&
H, Part);
474 auto LaneDefs = Def2LaneDefs.find(
Op);
475 if (LaneDefs != Def2LaneDefs.end())
480 return Builder.createNaryOp(Instruction::ExtractElement, {
Op, Idx});
488 auto LaneDefs = Def2LaneDefs.find(
Op);
489 if (LaneDefs != Def2LaneDefs.end()) {
495 [[maybe_unused]]
bool Matched =
497 assert(Matched &&
"original op must have been Unpack");
515 VPValue *Ext = Builder.createNaryOp(Instruction::ExtractElement, {
Op, Idx});
526 true,
nullptr, *RepR);
529 "DefR must be a VPReplicateRecipe or VPInstruction");
532 New->setOperand(Idx,
Op);
535 New->transferFlags(*DefR);
536 New->insertBefore(DefR);
571 if (DefR->getNumUsers() == 0) {
575 DefR->eraseFromParent();
584 Def2LaneDefs[DefR] = LaneDefs;
587 DefR->replaceUsesWithIf(LaneDefs[0], [DefR](
VPUser &U,
unsigned) {
588 return U.onlyFirstLaneUsed(DefR);
598 assert(VPI->getNumOperands() == 1 &&
599 "Build(Struct)Vector must have a single operand before "
600 "replicating by VF");
601 VPI->setOperand(0, LaneDefs[0]);
603 VPI->addOperand(LaneDef);
609 R->eraseFromParent();
assert(UImm &&(UImm !=~static_cast< T >(0)) &&"Invalid immediate!")
ReachingDefInfo InstSet & ToRemove
static const HTTPClientCleanup Cleanup
MachineInstr unsigned OpIdx
uint64_t IntrinsicInst * II
This file builds on the ADT/GraphTraits.h file to build a generic graph post order iterator.
static bool contains(SmallPtrSetImpl< ConstantExpr * > &Cache, ConstantExpr *Expr, Constant *C)
This file defines the make_scope_exit function, which executes user-defined cleanup logic at scope ex...
This file contains the declarations of different VPlan-related auxiliary helpers.
static std::optional< unsigned > getOpcode(ArrayRef< VPValue * > Values)
Returns the opcode of Values or ~0 if they do not all agree.
static VPValue * cloneForLane(VPlan &Plan, VPBuilder &Builder, Type *IdxTy, VPRecipeWithIRFlags *DefR, VPLane Lane, const DenseMap< VPValue *, SmallVector< VPValue * > > &Def2LaneDefs)
Create a single-scalar clone of DefR (must be a VPReplicateRecipe or VPInstruction) for lane Lane.
static void remapOperands(VPBlockBase *Entry, VPBlockBase *NewEntry, DenseMap< VPValue *, VPValue * > &Old2NewVPValues)
This file contains the declarations of the Vectorization Plan base classes:
static const uint32_t IV[8]
LLVM_ABI LLVMContext & getContext() const
Get the context in which this basic block lives.
static LLVM_ABI IntegerType * get(LLVMContext &C, unsigned NumBits)
This static method is the primary way of constructing an IntegerType.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
bool contains(ConstPtrType Ptr) const
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
The instances of the Type class are immutable: once they are created, they are never changed.
bool isPointerTy() const
True if this is an instance of PointerType.
bool isFloatingPointTy() const
Return true if this is one of the floating-point types.
VPBasicBlock serves as the leaf of the Hierarchical Control-Flow Graph.
RecipeListTy::iterator iterator
Instruction iterators...
iterator_range< iterator > phis()
Returns an iterator range over the PHI-like recipes in the block.
iterator getFirstNonPhi()
Return the position of the first non-phi node recipe in the block.
VPBlockBase is the building block of the Hierarchical Control-Flow Graph.
const VPBasicBlock * getEntryBasicBlock() const
VPBlockBase * getSingleSuccessor() const
static auto blocksOnly(const T &Range)
Return an iterator range over Range which only includes BlockTy blocks.
static void insertBlockBefore(VPBlockBase *NewBlock, VPBlockBase *BlockPtr)
Insert disconnected block NewBlock before Blockptr.
VPlan-based builder utility analogous to IRBuilder.
ArrayRef< VPValue * > definedValues()
Returns an ArrayRef of the values defined by the VPDef.
VPValue * getVPValue(unsigned I)
Returns the VPValue with index I defined by the VPDef.
BasicBlock * getIRBasicBlock() const
@ WideIVStep
Scale the first operand (vector step) by the second operand (scalar-step).
@ Unpack
Extracts all lanes from its (non-scalable) vector operand.
@ ReductionStartVector
Start vector for reductions with 3 operands: the original start value, the identity value for the red...
@ BuildVector
Creates a fixed-width vector containing all operands.
@ BuildStructVector
Given operands of (the same) struct type, creates a struct of fixed- width vectors each containing a ...
@ CanonicalIVIncrementForPart
In what follows, the term "input IR" refers to code that is fed into the vectorizer whereas the term ...
Kind getKind() const
Returns the Kind of lane offset.
unsigned getKnownLane() const
Returns a compile-time known value for the lane index and asserts if the lane can only be calculated ...
@ ScalableLast
For ScalableLast, Lane is the offset from the start of the last N-element subvector in a scalable vec...
VPRecipeBase is a base class modeling a sequence of one or more output IR instructions.
VPRegionBlock represents a collection of VPBasicBlocks and VPRegionBlocks which form a Single-Entry-S...
VPRegionBlock * clone() override
Clone all blocks in the single-entry single-exit region of the block and their recipes without updati...
const VPBlockBase * getEntry() const
bool isReplicator() const
An indicator whether this region is to generate multiple replicated instances of output IR correspond...
VPReplicateRecipe replicates a given instruction producing multiple scalar copies of the original sca...
Type * inferScalarType(const VPValue *V)
Infer the type of V. Returns the scalar type of V.
This class augments VPValue with operands which provide the inverse def-use edges from VPValue's user...
VPlan models a candidate for vectorization, encoding various decisions take to produce efficient outp...
VPBasicBlock * getEntry()
VPValue & getVF()
Returns the VF of the vector loop region.
LLVM_ABI_FOR_TEST VPRegionBlock * getVectorLoopRegion()
Returns the VPRegionBlock of the vector loop.
VPValue * getOrAddLiveIn(Value *V)
Gets the live-in VPValue for V or adds a new live-in (if none exists yet) for V.
bool hasScalarVFOnly() const
VPIRBasicBlock * getScalarHeader() const
Return the VPIRBasicBlock wrapping the header of the scalar loop.
constexpr ScalarTy getKnownMinValue() const
Returns the minimum value this quantity can represent.
unsigned ID
LLVM IR allows to use arbitrary numbers as calling convention identifiers.
@ C
The default llvm calling convention, compatible with C.
bool match(Val *V, const Pattern &P)
cst_pred_ty< is_one > m_One()
Match an integer 1 or a vector with all elements equal to 1.
match_combine_or< LTy, RTy > m_CombineOr(const LTy &L, const RTy &R)
Combine two pattern matchers matching L || R.
VPInstruction_match< VPInstruction::ExtractLastElement, Op0_t > m_ExtractLastElement(const Op0_t &Op0)
VPInstruction_match< VPInstruction::BranchOnCount > m_BranchOnCount()
class_match< VPValue > m_VPValue()
Match an arbitrary VPValue and ignore it.
VPInstruction_match< VPInstruction::BuildVector > m_BuildVector()
BuildVector is matches only its opcode, w/o matching its operands as the number of operands is not fi...
VPInstruction_match< VPInstruction::FirstActiveLane, Op0_t > m_FirstActiveLane(const Op0_t &Op0)
bind_ty< VPInstruction > m_VPInstruction(VPInstruction *&V)
Match a VPInstruction, capturing if we match.
VPInstruction_match< VPInstruction::BranchOnCond > m_BranchOnCond()
NodeAddr< PhiNode * > Phi
bool isSingleScalar(const VPValue *VPV)
Returns true if VPV is a single scalar, either because it produces the same value for all lanes or on...
bool isUniformAcrossVFsAndUFs(VPValue *V)
Checks if V is uniform across all VF lanes and UF parts.
bool onlyFirstPartUsed(const VPValue *Def)
Returns true if only the first part of Def is used.
This is an optimization pass for GlobalISel generic memory operations.
auto drop_begin(T &&RangeOrContainer, size_t N=1)
Return a range covering RangeOrContainer with the first N elements excluded.
detail::zippy< detail::zip_shortest, T, U, Args... > zip(T &&t, U &&u, Args &&...args)
zip iterator for two or more iteratable types.
detail::scope_exit< std::decay_t< Callable > > make_scope_exit(Callable &&F)
auto enumerate(FirstRange &&First, RestRanges &&...Rest)
Given two or more input ranges, returns a new range whose values are tuples (A, B,...
decltype(auto) dyn_cast(const From &Val)
dyn_cast<X> - Return the argument parameter cast to the specified type.
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
iterator_range< df_iterator< VPBlockShallowTraversalWrapper< VPBlockBase * > > > vp_depth_first_shallow(VPBlockBase *G)
Returns an iterator range to traverse the graph starting at G in depth-first order.
iterator_range< df_iterator< VPBlockDeepTraversalWrapper< VPBlockBase * > > > vp_depth_first_deep(VPBlockBase *G)
Returns an iterator range to traverse the graph starting at G in depth-first order while traversing t...
detail::concat_range< ValueT, RangeTs... > concat(RangeTs &&...Ranges)
Returns a concatenated range across two or more ranges.
auto reverse(ContainerTy &&C)
bool isa_and_present(const Y &Val)
isa_and_present<X> - Functionally identical to isa, except that a null value is accepted.
SmallVector< ValueTypeFromRangeType< R >, Size > to_vector(R &&Range)
Given a range of type R, iterate the entire range and return a SmallVector with elements of the vecto...
bool isa(const From &Val)
isa<X> - Return true if the parameter to the template is an instance of one of the template type argu...
DWARFExpression::Operation Op
decltype(auto) cast(const From &Val)
cast<X> - Return the argument parameter cast to the specified type.
A pure-virtual common base class for recipes defining a single VPValue and using IR flags.
virtual VPRecipeWithIRFlags * clone() override=0
Clone the current recipe.