@@ -2241,10 +2241,9 @@ class BoUpSLP {
2241
2241
/// TODO: If load combining is allowed in the IR optimizer, this analysis
2242
2242
/// may not be necessary.
2243
2243
bool isLoadCombineCandidate(ArrayRef<Value *> Stores) const;
2244
- bool isStridedLoad(ArrayRef<Value *> VL, ArrayRef<Value *> PointerOps,
2245
- ArrayRef<unsigned> Order, const TargetTransformInfo &TTI,
2246
- const DataLayout &DL, ScalarEvolution &SE,
2247
- const int64_t Diff, StridedPtrInfo &SPtrInfo) const;
2244
+ bool isStridedLoad(ArrayRef<Value *> PointerOps, Type *ScalarTy,
2245
+ Align Alignment, const int64_t Diff, Value *Ptr0,
2246
+ Value *PtrN, StridedPtrInfo &SPtrInfo) const;
2248
2247
2249
2248
/// Checks if the given array of loads can be represented as a vectorized,
2250
2249
/// scatter or just simple gather.
@@ -6824,13 +6823,10 @@ isMaskedLoadCompress(ArrayRef<Value *> VL, ArrayRef<Value *> PointerOps,
6824
6823
/// 4. Any pointer operand is an instruction with the users outside of the
6825
6824
/// current graph (for masked gathers extra extractelement instructions
6826
6825
/// might be required).
6827
- bool BoUpSLP::isStridedLoad(ArrayRef<Value *> VL, ArrayRef<Value *> PointerOps,
6828
- ArrayRef<unsigned> Order,
6829
- const TargetTransformInfo &TTI,
6830
- const DataLayout &DL, ScalarEvolution &SE,
6831
- const int64_t Diff,
6832
- StridedPtrInfo &SPtrInfo) const {
6833
- const size_t Sz = VL.size();
6826
+ bool BoUpSLP::isStridedLoad(ArrayRef<Value *> PointerOps, Type *ScalarTy,
6827
+ Align Alignment, const int64_t Diff, Value *Ptr0,
6828
+ Value *PtrN, StridedPtrInfo &SPtrInfo) const {
6829
+ const size_t Sz = PointerOps.size();
6834
6830
if (Diff % (Sz - 1) != 0)
6835
6831
return false;
6836
6832
@@ -6842,7 +6838,6 @@ bool BoUpSLP::isStridedLoad(ArrayRef<Value *> VL, ArrayRef<Value *> PointerOps,
6842
6838
});
6843
6839
6844
6840
const uint64_t AbsoluteDiff = std::abs(Diff);
6845
- Type *ScalarTy = VL.front()->getType();
6846
6841
auto *VecTy = getWidenedType(ScalarTy, Sz);
6847
6842
if (IsAnyPointerUsedOutGraph ||
6848
6843
(AbsoluteDiff > Sz &&
@@ -6853,20 +6848,9 @@ bool BoUpSLP::isStridedLoad(ArrayRef<Value *> VL, ArrayRef<Value *> PointerOps,
6853
6848
int64_t Stride = Diff / static_cast<int64_t>(Sz - 1);
6854
6849
if (Diff != Stride * static_cast<int64_t>(Sz - 1))
6855
6850
return false;
6856
- Align Alignment =
6857
- cast<LoadInst>(Order.empty() ? VL.front() : VL[Order.front()])
6858
- ->getAlign();
6859
- if (!TTI.isLegalStridedLoadStore(VecTy, Alignment))
6851
+ if (!TTI->isLegalStridedLoadStore(VecTy, Alignment))
6860
6852
return false;
6861
- Value *Ptr0;
6862
- Value *PtrN;
6863
- if (Order.empty()) {
6864
- Ptr0 = PointerOps.front();
6865
- PtrN = PointerOps.back();
6866
- } else {
6867
- Ptr0 = PointerOps[Order.front()];
6868
- PtrN = PointerOps[Order.back()];
6869
- }
6853
+
6870
6854
// Iterate through all pointers and check if all distances are
6871
6855
// unique multiple of Dist.
6872
6856
SmallSet<int64_t, 4> Dists;
@@ -6875,14 +6859,14 @@ bool BoUpSLP::isStridedLoad(ArrayRef<Value *> VL, ArrayRef<Value *> PointerOps,
6875
6859
if (Ptr == PtrN)
6876
6860
Dist = Diff;
6877
6861
else if (Ptr != Ptr0)
6878
- Dist = *getPointersDiff(ScalarTy, Ptr0, ScalarTy, Ptr, DL, SE);
6862
+ Dist = *getPointersDiff(ScalarTy, Ptr0, ScalarTy, Ptr, * DL, * SE);
6879
6863
// If the strides are not the same or repeated, we can't
6880
6864
// vectorize.
6881
6865
if (((Dist / Stride) * Stride) != Dist || !Dists.insert(Dist).second)
6882
6866
break;
6883
6867
}
6884
6868
if (Dists.size() == Sz) {
6885
- Type *StrideTy = DL. getIndexType(Ptr0->getType());
6869
+ Type *StrideTy = DL-> getIndexType(Ptr0->getType());
6886
6870
SPtrInfo.StrideVal = ConstantInt::get(StrideTy, Stride);
6887
6871
SPtrInfo.Ty = getWidenedType(ScalarTy, Sz);
6888
6872
return true;
@@ -6971,7 +6955,11 @@ BoUpSLP::LoadsState BoUpSLP::canVectorizeLoads(
6971
6955
cast<Instruction>(V), UserIgnoreList);
6972
6956
}))
6973
6957
return LoadsState::CompressVectorize;
6974
- if (isStridedLoad(VL, PointerOps, Order, *TTI, *DL, *SE, *Diff, SPtrInfo))
6958
+ Align Alignment =
6959
+ cast<LoadInst>(Order.empty() ? VL.front() : VL[Order.front()])
6960
+ ->getAlign();
6961
+ if (isStridedLoad(PointerOps, ScalarTy, Alignment, *Diff, Ptr0, PtrN,
6962
+ SPtrInfo))
6975
6963
return LoadsState::StridedVectorize;
6976
6964
}
6977
6965
if (!TTI->isLegalMaskedGather(VecTy, CommonAlignment) ||
0 commit comments