Skip to content

Commit deeceea

Browse files
committed
!fixup use new getGEPExpr.
1 parent 0f1f329 commit deeceea

File tree

2 files changed

+6
-45
lines changed

2 files changed

+6
-45
lines changed

llvm/lib/Transforms/Vectorize/VPlanRecipes.cpp

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3336,7 +3336,6 @@ InstructionCost VPReplicateRecipe::computeCost(ElementCount VF,
33363336
UI->getOpcode(), ValTy, Alignment, AS, Ctx.CostKind, OpInfo);
33373337

33383338
Type *PtrTy = isSingleScalar() ? ScalarPtrTy : toVectorTy(ScalarPtrTy, VF);
3339-
33403339
bool PreferVectorizedAddressing = Ctx.TTI.prefersVectorizedAddressing();
33413340
bool UsedByLoadStoreAddress =
33423341
!PreferVectorizedAddressing && isUsedByLoadStoreAddress(this);

llvm/lib/Transforms/Vectorize/VPlanUtils.cpp

Lines changed: 6 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -119,55 +119,17 @@ const SCEV *vputils::getSCEVExprForVPValue(const VPValue *V,
119119
if (isa<SCEVCouldNotCompute>(Base))
120120
return SE.getCouldNotCompute();
121121

122-
Type *IntIdxTy = SE.getEffectiveSCEVType(Base->getType());
123-
Type *CurTy = IntIdxTy;
124-
bool FirstIter = true;
125-
SmallVector<const SCEV *, 4> Offsets;
122+
SmallVector<const SCEV *> IndexExprs;
126123
for (VPValue *Index : drop_begin(R->operands())) {
127124
const SCEV *IndexExpr = getSCEVExprForVPValue(Index, SE, L);
128125
if (isa<SCEVCouldNotCompute>(IndexExpr))
129126
return SE.getCouldNotCompute();
130-
// Compute the (potentially symbolic) offset in bytes for this index.
131-
if (StructType *STy = dyn_cast<StructType>(CurTy)) {
132-
// For a struct, add the member offset.
133-
ConstantInt *Index = cast<SCEVConstant>(IndexExpr)->getValue();
134-
unsigned FieldNo = Index->getZExtValue();
135-
const SCEV *FieldOffset =
136-
SE.getOffsetOfExpr(IntIdxTy, STy, FieldNo);
137-
Offsets.push_back(FieldOffset);
138-
139-
// Update CurTy to the type of the field at Index.
140-
CurTy = STy->getTypeAtIndex(Index);
141-
} else {
142-
// Update CurTy to its element type.
143-
if (FirstIter) {
144-
CurTy = cast<GetElementPtrInst>(R->getUnderlyingInstr())
145-
->getSourceElementType();
146-
FirstIter = false;
147-
} else {
148-
CurTy = GetElementPtrInst::getTypeAtIndex(CurTy, (uint64_t)0);
149-
}
150-
// For an array, add the element offset, explicitly scaled.
151-
const SCEV *ElementSize = SE.getSizeOfExpr(IntIdxTy, CurTy);
152-
// Getelementptr indices are signed.
153-
IndexExpr = SE.getTruncateOrSignExtend(IndexExpr, IntIdxTy);
154-
155-
// Multiply the index by the element size to compute the element
156-
// offset.
157-
const SCEV *LocalOffset = SE.getMulExpr(IndexExpr, ElementSize);
158-
Offsets.push_back(LocalOffset);
159-
}
127+
IndexExprs.push_back(IndexExpr);
160128
}
161-
// Handle degenerate case of GEP without offsets.
162-
if (Offsets.empty())
163-
return Base;
164-
165-
// Add the offsets together, assuming nsw if inbounds.
166-
const SCEV *Offset = SE.getAddExpr(Offsets);
167-
// Add the base address and the offset. We cannot use the nsw flag, as
168-
// the base address is unsigned. However, if we know that the offset is
169-
// non-negative, we can use nuw.
170-
return SE.getAddExpr(Base, Offset);
129+
130+
Type *SrcElementTy = cast<GetElementPtrInst>(R->getUnderlyingInstr())
131+
->getSourceElementType();
132+
return SE.getGEPExpr(Base, IndexExprs, SrcElementTy, SCEV::FlagAnyWrap);
171133
})
172134
.Default([&SE](const VPRecipeBase *) { return SE.getCouldNotCompute(); });
173135
}

0 commit comments

Comments
 (0)