Skip to content

Commit 5e32f72

Browse files
committed
[VPlan] Move logic to compute cost for intrinsic to helper (NFC).
Refactor to prepare for #154617.
1 parent 888ceac commit 5e32f72

File tree

1 file changed

+25
-11
lines changed

1 file changed

+25
-11
lines changed

llvm/lib/Transforms/Vectorize/VPlanRecipes.cpp

Lines changed: 25 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1715,18 +1715,22 @@ void VPWidenIntrinsicRecipe::execute(VPTransformState &State) {
17151715
State.set(this, V);
17161716
}
17171717

1718-
InstructionCost VPWidenIntrinsicRecipe::computeCost(ElementCount VF,
1719-
VPCostContext &Ctx) const {
1718+
/// Compute the cost for the intrinsic \p ID with \p Operands, produced by \p R.
1719+
static InstructionCost getCostForIntrinsics(Intrinsic::ID ID,
1720+
ArrayRef<const VPValue *> Operands,
1721+
const VPRecipeWithIRFlags &R,
1722+
ElementCount VF,
1723+
VPCostContext &Ctx) {
17201724
// Some backends analyze intrinsic arguments to determine cost. Use the
17211725
// underlying value for the operand if it has one. Otherwise try to use the
17221726
// operand of the underlying call instruction, if there is one. Otherwise
17231727
// clear Arguments.
17241728
// TODO: Rework TTI interface to be independent of concrete IR values.
17251729
SmallVector<const Value *> Arguments;
1726-
for (const auto &[Idx, Op] : enumerate(operands())) {
1730+
for (const auto &[Idx, Op] : enumerate(Operands)) {
17271731
auto *V = Op->getUnderlyingValue();
17281732
if (!V) {
1729-
if (auto *UI = dyn_cast_or_null<CallBase>(getUnderlyingValue())) {
1733+
if (auto *UI = dyn_cast_or_null<CallBase>(R.getUnderlyingValue())) {
17301734
Arguments.push_back(UI->getArgOperand(Idx));
17311735
continue;
17321736
}
@@ -1736,21 +1740,31 @@ InstructionCost VPWidenIntrinsicRecipe::computeCost(ElementCount VF,
17361740
Arguments.push_back(V);
17371741
}
17381742

1739-
Type *RetTy = toVectorizedTy(Ctx.Types.inferScalarType(this), VF);
1743+
Type *ScalarRetTy = Ctx.Types.inferScalarType(&R);
1744+
Type *RetTy = VF.isVector() ? toVectorizedTy(ScalarRetTy, VF) : ScalarRetTy;
17401745
SmallVector<Type *> ParamTys;
1741-
for (unsigned I = 0; I != getNumOperands(); ++I)
1742-
ParamTys.push_back(
1743-
toVectorTy(Ctx.Types.inferScalarType(getOperand(I)), VF));
1746+
for (const VPValue *Op : Operands) {
1747+
ParamTys.push_back(VF.isVector()
1748+
? toVectorTy(Ctx.Types.inferScalarType(Op), VF)
1749+
: Ctx.Types.inferScalarType(Op));
1750+
}
17441751

17451752
// TODO: Rework TTI interface to avoid reliance on underlying IntrinsicInst.
1746-
FastMathFlags FMF = hasFastMathFlags() ? getFastMathFlags() : FastMathFlags();
1753+
FastMathFlags FMF =
1754+
R.hasFastMathFlags() ? R.getFastMathFlags() : FastMathFlags();
17471755
IntrinsicCostAttributes CostAttrs(
1748-
VectorIntrinsicID, RetTy, Arguments, ParamTys, FMF,
1749-
dyn_cast_or_null<IntrinsicInst>(getUnderlyingValue()),
1756+
ID, RetTy, Arguments, ParamTys, FMF,
1757+
dyn_cast_or_null<IntrinsicInst>(R.getUnderlyingValue()),
17501758
InstructionCost::getInvalid(), &Ctx.TLI);
17511759
return Ctx.TTI.getIntrinsicInstrCost(CostAttrs, Ctx.CostKind);
17521760
}
17531761

1762+
InstructionCost VPWidenIntrinsicRecipe::computeCost(ElementCount VF,
1763+
VPCostContext &Ctx) const {
1764+
SmallVector<const VPValue *> ArgOps(operands());
1765+
return getCostForIntrinsics(VectorIntrinsicID, ArgOps, *this, VF, Ctx);
1766+
}
1767+
17541768
StringRef VPWidenIntrinsicRecipe::getIntrinsicName() const {
17551769
return Intrinsic::getBaseName(VectorIntrinsicID);
17561770
}

0 commit comments

Comments
 (0)