Skip to content

Commit e5fdddc

Browse files
committed
[SLP/LAA] Address review
1 parent 103d95b commit e5fdddc

File tree

3 files changed

+45
-47
lines changed

3 files changed

+45
-47
lines changed

llvm/include/llvm/Analysis/LoopAccessAnalysis.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -870,7 +870,7 @@ getPointersDiff(Type *ElemTyA, Value *PtrA, Type *ElemTyB, Value *PtrB,
870870
/// \p SortedIndices as <1,2,0,3>
871871
bool sortPtrAccesses(ArrayRef<Value *> VL, Type *ElemTy, const DataLayout &DL,
872872
ScalarEvolution &SE,
873-
SmallVectorImpl<uint64_t> &SortedIndices);
873+
SmallVectorImpl<unsigned> &SortedIndices);
874874

875875
/// Returns true if the memory operations \p A and \p B are consecutive.
876876
/// This is a simple API that does not depend on the analysis pass.

llvm/lib/Analysis/LoopAccessAnalysis.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1612,15 +1612,15 @@ std::optional<int64_t> llvm::getPointersDiff(Type *ElemTyA, Value *PtrA,
16121612

16131613
bool llvm::sortPtrAccesses(ArrayRef<Value *> VL, Type *ElemTy,
16141614
const DataLayout &DL, ScalarEvolution &SE,
1615-
SmallVectorImpl<uint64_t> &SortedIndices) {
1615+
SmallVectorImpl<unsigned> &SortedIndices) {
16161616
assert(llvm::all_of(
16171617
VL, [](const Value *V) { return V->getType()->isPointerTy(); }) &&
16181618
"Expected list of pointer operands.");
16191619
// Walk over the pointers, and map each of them to an offset relative to
16201620
// first pointer in the array.
16211621
Value *Ptr0 = VL[0];
16221622

1623-
using DistOrdPair = std::pair<int64_t, uint64_t>;
1623+
using DistOrdPair = std::pair<int64_t, unsigned>;
16241624
auto Compare = llvm::less_first();
16251625
std::set<DistOrdPair, decltype(Compare)> Offsets(Compare);
16261626
Offsets.emplace(0, 0);

llvm/lib/Transforms/Vectorize/SLPVectorizer.cpp

Lines changed: 42 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -1583,7 +1583,7 @@ static void addMask(SmallVectorImpl<int> &Mask, ArrayRef<int> SubMask,
15831583
/// values 3 and 7 respectively:
15841584
/// before: 6 9 5 4 9 2 1 0
15851585
/// after: 6 3 5 4 7 2 1 0
1586-
static void fixupOrderingIndices(MutableArrayRef<uint64_t> Order) {
1586+
static void fixupOrderingIndices(MutableArrayRef<unsigned> Order) {
15871587
const uint64_t Sz = Order.size();
15881588
SmallBitVector UnusedIndices(Sz, /*t=*/true);
15891589
SmallBitVector MaskedIndices(Sz);
@@ -1636,7 +1636,7 @@ static SmallVector<Constant *> replicateMask(ArrayRef<Constant *> Val,
16361636

16371637
namespace llvm {
16381638

1639-
static void inversePermutation(ArrayRef<uint64_t> Indices,
1639+
static void inversePermutation(ArrayRef<unsigned> Indices,
16401640
SmallVectorImpl<int> &Mask) {
16411641
Mask.clear();
16421642
const unsigned E = Indices.size();
@@ -1766,7 +1766,7 @@ class BoUpSLP {
17661766
using ValueSet = SmallPtrSet<Value *, 16>;
17671767
using StoreList = SmallVector<StoreInst *, 8>;
17681768
using ExtraValueToDebugLocsMap = SmallDenseSet<Value *, 4>;
1769-
using OrdersType = SmallVector<uint64_t, 4>;
1769+
using OrdersType = SmallVector<unsigned, 4>;
17701770

17711771
BoUpSLP(Function *Func, ScalarEvolution *Se, TargetTransformInfo *Tti,
17721772
TargetLibraryInfo *TLi, AAResults *Aa, LoopInfo *Li,
@@ -1923,7 +1923,7 @@ class BoUpSLP {
19231923
/// should be represented as an empty order, so this is used to
19241924
/// decide if we can canonicalize a computed order. Undef elements
19251925
/// (represented as size) are ignored.
1926-
static bool isIdentityOrder(ArrayRef<uint64_t> Order) {
1926+
static bool isIdentityOrder(ArrayRef<unsigned> Order) {
19271927
assert(!Order.empty() && "expected non-empty order");
19281928
const unsigned Sz = Order.size();
19291929
return all_of(enumerate(Order), [&](const auto &P) {
@@ -2056,7 +2056,7 @@ class BoUpSLP {
20562056
/// \param TryRecursiveCheck used to check if long masked gather can be
20572057
/// represented as a serie of loads/insert subvector, if profitable.
20582058
LoadsState canVectorizeLoads(ArrayRef<Value *> VL, const Value *VL0,
2059-
SmallVectorImpl<uint64_t> &Order,
2059+
SmallVectorImpl<unsigned> &Order,
20602060
SmallVectorImpl<Value *> &PointerOps,
20612061
unsigned *BestVF = nullptr,
20622062
bool TryRecursiveCheck = true) const;
@@ -3503,7 +3503,7 @@ class BoUpSLP {
35033503
/// \param ResizeAllowed indicates whether it is allowed to handle subvector
35043504
/// extract order.
35053505
bool canReuseExtract(ArrayRef<Value *> VL,
3506-
SmallVectorImpl<uint64_t> &CurrentOrder,
3506+
SmallVectorImpl<unsigned> &CurrentOrder,
35073507
bool ResizeAllowed = false) const;
35083508

35093509
/// Vectorize a single entry in the tree.
@@ -3789,7 +3789,7 @@ class BoUpSLP {
37893789
SmallVector<int, 4> ReuseShuffleIndices;
37903790

37913791
/// Does this entry require reordering?
3792-
SmallVector<uint64_t, 4> ReorderIndices;
3792+
SmallVector<unsigned, 4> ReorderIndices;
37933793

37943794
/// Points back to the VectorizableTree.
37953795
///
@@ -4026,7 +4026,7 @@ class BoUpSLP {
40264026
dbgs() << ReuseIdx << ", ";
40274027
dbgs() << "\n";
40284028
dbgs() << "ReorderIndices: ";
4029-
for (uint64_t ReorderIdx : ReorderIndices)
4029+
for (unsigned ReorderIdx : ReorderIndices)
40304030
dbgs() << ReorderIdx << ", ";
40314031
dbgs() << "\n";
40324032
dbgs() << "UserTreeIndex: ";
@@ -4075,7 +4075,7 @@ class BoUpSLP {
40754075
const InstructionsState &S,
40764076
const EdgeInfo &UserTreeIdx,
40774077
ArrayRef<int> ReuseShuffleIndices = {},
4078-
ArrayRef<uint64_t> ReorderIndices = {},
4078+
ArrayRef<unsigned> ReorderIndices = {},
40794079
unsigned InterleaveFactor = 0) {
40804080
TreeEntry::EntryState EntryState =
40814081
Bundle ? TreeEntry::Vectorize : TreeEntry::NeedToGather;
@@ -4091,7 +4091,7 @@ class BoUpSLP {
40914091
ScheduleBundle &Bundle, const InstructionsState &S,
40924092
const EdgeInfo &UserTreeIdx,
40934093
ArrayRef<int> ReuseShuffleIndices = {},
4094-
ArrayRef<uint64_t> ReorderIndices = {}) {
4094+
ArrayRef<unsigned> ReorderIndices = {}) {
40954095
assert(((!Bundle && (EntryState == TreeEntry::NeedToGather ||
40964096
EntryState == TreeEntry::SplitVectorize)) ||
40974097
(Bundle && EntryState != TreeEntry::NeedToGather &&
@@ -4123,7 +4123,7 @@ class BoUpSLP {
41234123
// Reorder scalars and build final mask.
41244124
Last->Scalars.assign(VL.size(), nullptr);
41254125
transform(ReorderIndices, Last->Scalars.begin(),
4126-
[VL](uint64_t Idx) -> Value * {
4126+
[VL](unsigned Idx) -> Value * {
41274127
if (Idx >= VL.size())
41284128
return UndefValue::get(VL.front()->getType());
41294129
return VL[Idx];
@@ -5317,12 +5317,12 @@ static void reorderReuses(SmallVectorImpl<int> &Reuses, ArrayRef<int> Mask) {
53175317
/// the original order of the scalars. Procedure transforms the provided order
53185318
/// in accordance with the given \p Mask. If the resulting \p Order is just an
53195319
/// identity order, \p Order is cleared.
5320-
static void reorderOrder(SmallVectorImpl<uint64_t> &Order, ArrayRef<int> Mask,
5320+
static void reorderOrder(SmallVectorImpl<unsigned> &Order, ArrayRef<int> Mask,
53215321
bool BottomOrder = false) {
53225322
assert(!Mask.empty() && "Expected non-empty mask.");
53235323
unsigned Sz = Mask.size();
53245324
if (BottomOrder) {
5325-
SmallVector<uint64_t> PrevOrder;
5325+
SmallVector<unsigned> PrevOrder;
53265326
if (Order.empty()) {
53275327
PrevOrder.resize(Sz);
53285328
std::iota(PrevOrder.begin(), PrevOrder.end(), 0);
@@ -5431,7 +5431,7 @@ BoUpSLP::findReusedOrderedScalars(const BoUpSLP::TreeEntry &TE,
54315431
(GatherShuffles.empty() && IsSplatMask(ExtractMask)))
54325432
return std::nullopt;
54335433
SmallBitVector ShuffledSubMasks(NumParts);
5434-
auto TransformMaskToOrder = [&](MutableArrayRef<uint64_t> CurrentOrder,
5434+
auto TransformMaskToOrder = [&](MutableArrayRef<unsigned> CurrentOrder,
54355435
ArrayRef<int> Mask, int PartSz, int NumParts,
54365436
function_ref<unsigned(unsigned)> GetVF) {
54375437
for (int I : seq<int>(0, NumParts)) {
@@ -5441,9 +5441,9 @@ BoUpSLP::findReusedOrderedScalars(const BoUpSLP::TreeEntry &TE,
54415441
if (VF == 0)
54425442
continue;
54435443
unsigned Limit = getNumElems(CurrentOrder.size(), PartSz, I);
5444-
MutableArrayRef<uint64_t> Slice = CurrentOrder.slice(I * PartSz, Limit);
5444+
MutableArrayRef<unsigned> Slice = CurrentOrder.slice(I * PartSz, Limit);
54455445
// Shuffle of at least 2 vectors - ignore.
5446-
if (any_of(Slice, [&](uint64_t I) { return I != NumScalars; })) {
5446+
if (any_of(Slice, [&](unsigned I) { return I != NumScalars; })) {
54475447
std::fill(Slice.begin(), Slice.end(), NumScalars);
54485448
ShuffledSubMasks.set(I);
54495449
continue;
@@ -5542,7 +5542,7 @@ BoUpSLP::findReusedOrderedScalars(const BoUpSLP::TreeEntry &TE,
55425542
Entries[I].back()->getVectorFactor());
55435543
});
55445544
unsigned NumUndefs =
5545-
count_if(CurrentOrder, [&](uint64_t Idx) { return Idx == NumScalars; });
5545+
count_if(CurrentOrder, [&](unsigned Idx) { return Idx == NumScalars; });
55465546
if (ShuffledSubMasks.all() || (NumScalars > 2 && NumUndefs >= NumScalars / 2))
55475547
return std::nullopt;
55485548
return std::move(CurrentOrder);
@@ -5575,7 +5575,7 @@ static Align computeCommonAlignment(ArrayRef<Value *> VL) {
55755575
}
55765576

55775577
/// Check if \p Order represents reverse order.
5578-
static bool isReverseOrder(ArrayRef<uint64_t> Order) {
5578+
static bool isReverseOrder(ArrayRef<unsigned> Order) {
55795579
assert(!Order.empty() &&
55805580
"Order is empty. Please check it before using isReverseOrder.");
55815581
unsigned Sz = Order.size();
@@ -5594,7 +5594,7 @@ static bool isReverseOrder(ArrayRef<uint64_t> Order) {
55945594
static std::optional<Value *>
55955595
calculateRtStride(ArrayRef<Value *> PointerOps, Type *ElemTy,
55965596
const DataLayout &DL, ScalarEvolution &SE,
5597-
SmallVectorImpl<uint64_t> &SortedIndices,
5597+
SmallVectorImpl<unsigned> &SortedIndices,
55985598
Instruction *Inst = nullptr) {
55995599
SmallVector<const SCEV *> SCEVs;
56005600
const SCEV *PtrSCEVLowest = nullptr;
@@ -5857,7 +5857,7 @@ static Value *createExtractVector(IRBuilderBase &Builder, Value *Vec,
58575857
/// with \p Order.
58585858
/// \return true if the mask represents strided access, false - otherwise.
58595859
static bool buildCompressMask(ArrayRef<Value *> PointerOps,
5860-
ArrayRef<uint64_t> Order, Type *ScalarTy,
5860+
ArrayRef<unsigned> Order, Type *ScalarTy,
58615861
const DataLayout &DL, ScalarEvolution &SE,
58625862
SmallVectorImpl<int> &CompressMask) {
58635863
const unsigned Sz = PointerOps.size();
@@ -5891,7 +5891,7 @@ static bool buildCompressMask(ArrayRef<Value *> PointerOps,
58915891
/// (masked) interleaved load.
58925892
static bool isMaskedLoadCompress(
58935893
ArrayRef<Value *> VL, ArrayRef<Value *> PointerOps,
5894-
ArrayRef<uint64_t> Order, const TargetTransformInfo &TTI,
5894+
ArrayRef<unsigned> Order, const TargetTransformInfo &TTI,
58955895
const DataLayout &DL, ScalarEvolution &SE, AssumptionCache &AC,
58965896
const DominatorTree &DT, const TargetLibraryInfo &TLI,
58975897
const function_ref<bool(Value *)> AreAllUsersVectorized, bool &IsMasked,
@@ -6025,7 +6025,7 @@ static bool isMaskedLoadCompress(
60256025
/// (masked) interleaved load.
60266026
static bool
60276027
isMaskedLoadCompress(ArrayRef<Value *> VL, ArrayRef<Value *> PointerOps,
6028-
ArrayRef<uint64_t> Order, const TargetTransformInfo &TTI,
6028+
ArrayRef<unsigned> Order, const TargetTransformInfo &TTI,
60296029
const DataLayout &DL, ScalarEvolution &SE,
60306030
AssumptionCache &AC, const DominatorTree &DT,
60316031
const TargetLibraryInfo &TLI,
@@ -6053,7 +6053,7 @@ isMaskedLoadCompress(ArrayRef<Value *> VL, ArrayRef<Value *> PointerOps,
60536053
/// current graph (for masked gathers extra extractelement instructions
60546054
/// might be required).
60556055
static bool isStridedLoad(ArrayRef<Value *> VL, ArrayRef<Value *> PointerOps,
6056-
ArrayRef<uint64_t> Order,
6056+
ArrayRef<unsigned> Order,
60576057
const TargetTransformInfo &TTI, const DataLayout &DL,
60586058
ScalarEvolution &SE,
60596059
const bool IsAnyPointerUsedOutGraph,
@@ -6107,7 +6107,7 @@ static bool isStridedLoad(ArrayRef<Value *> VL, ArrayRef<Value *> PointerOps,
61076107

61086108
BoUpSLP::LoadsState
61096109
BoUpSLP::canVectorizeLoads(ArrayRef<Value *> VL, const Value *VL0,
6110-
SmallVectorImpl<uint64_t> &Order,
6110+
SmallVectorImpl<unsigned> &Order,
61116111
SmallVectorImpl<Value *> &PointerOps,
61126112
unsigned *BestVF, bool TryRecursiveCheck) const {
61136113
// Check that a vectorized load would load the same memory as a scalar
@@ -6275,7 +6275,7 @@ BoUpSLP::canVectorizeLoads(ArrayRef<Value *> VL, const Value *VL0,
62756275
SmallVector<LoadsState> States;
62766276
for (unsigned Cnt = 0, End = VL.size(); Cnt + VF <= End; Cnt += VF) {
62776277
ArrayRef<Value *> Slice = VL.slice(Cnt, VF);
6278-
SmallVector<uint64_t> Order;
6278+
SmallVector<unsigned> Order;
62796279
SmallVector<Value *> PointerOps;
62806280
LoadsState LS =
62816281
canVectorizeLoads(Slice, Slice.front(), Order, PointerOps, BestVF,
@@ -6426,7 +6426,7 @@ BoUpSLP::canVectorizeLoads(ArrayRef<Value *> VL, const Value *VL0,
64266426
static bool clusterSortPtrAccesses(ArrayRef<Value *> VL,
64276427
ArrayRef<BasicBlock *> BBs, Type *ElemTy,
64286428
const DataLayout &DL, ScalarEvolution &SE,
6429-
SmallVectorImpl<uint64_t> &SortedIndices) {
6429+
SmallVectorImpl<unsigned> &SortedIndices) {
64306430
assert(
64316431
all_of(VL, [](const Value *V) { return V->getType()->isPointerTy(); }) &&
64326432
"Expected list of pointer operands.");
@@ -6727,7 +6727,7 @@ BoUpSLP::getReorderingData(const TreeEntry &TE, bool TopToBottom,
67276727
if (SubMask.front() == PoisonMaskElem)
67286728
std::iota(SubMask.begin(), SubMask.end(), 0);
67296729
reorderOrder(CurrentOrder, SubMask);
6730-
transform(CurrentOrder, It, [K](uint64_t Pos) { return Pos + K; });
6730+
transform(CurrentOrder, It, [K](unsigned Pos) { return Pos + K; });
67316731
std::advance(It, Sz);
67326732
}
67336733
if (TE.isGather() && all_of(enumerate(ResOrder), [](const auto &Data) {
@@ -7001,7 +7001,7 @@ void BoUpSLP::reorderNodeWithReuses(TreeEntry &TE, ArrayRef<int> Mask) const {
70017001
TE.ReorderIndices.clear();
70027002
// Try to improve gathered nodes with clustered reuses, if possible.
70037003
ArrayRef<int> Slice = ArrayRef(NewMask).slice(0, Sz);
7004-
SmallVector<uint64_t> NewOrder(Slice);
7004+
SmallVector<unsigned> NewOrder(Slice);
70057005
inversePermutation(NewOrder, NewMask);
70067006
reorderScalars(TE.Scalars, NewMask);
70077007
// Fill the reuses mask with the identity submasks.
@@ -7011,8 +7011,8 @@ void BoUpSLP::reorderNodeWithReuses(TreeEntry &TE, ArrayRef<int> Mask) const {
70117011
std::iota(It, std::next(It, Sz), 0);
70127012
}
70137013

7014-
static void combineOrders(MutableArrayRef<uint64_t> Order,
7015-
ArrayRef<uint64_t> SecondaryOrder) {
7014+
static void combineOrders(MutableArrayRef<unsigned> Order,
7015+
ArrayRef<unsigned> SecondaryOrder) {
70167016
assert((SecondaryOrder.empty() || Order.size() == SecondaryOrder.size()) &&
70177017
"Expected same size of orders");
70187018
uint64_t Sz = Order.size();
@@ -7303,7 +7303,7 @@ void BoUpSLP::reorderTopToBottom() {
73037303
combineOrders(IdentityOrder, Pair.first);
73047304
}
73057305
}
7306-
MutableArrayRef<uint64_t> BestOrder = IdentityOrder;
7306+
MutableArrayRef<unsigned> BestOrder = IdentityOrder;
73077307
unsigned Cnt = IdentityCnt;
73087308
for (auto &Pair : OrdersUses) {
73097309
// Prefer identity order. But, if filled identity found (non-empty order)
@@ -7328,7 +7328,7 @@ void BoUpSLP::reorderTopToBottom() {
73287328
inversePermutation(BestOrder, Mask);
73297329
SmallVector<int> MaskOrder(BestOrder.size(), PoisonMaskElem);
73307330
unsigned E = BestOrder.size();
7331-
transform(BestOrder, MaskOrder.begin(), [E](uint64_t I) {
7331+
transform(BestOrder, MaskOrder.begin(), [E](unsigned I) {
73327332
return I < E ? static_cast<int>(I) : PoisonMaskElem;
73337333
});
73347334
// Do an actual reordering, if profitable.
@@ -7560,7 +7560,7 @@ void BoUpSLP::reorderBottomToTop(bool IgnoreReorder) {
75607560
inversePermutation(Order, Mask);
75617561
const unsigned E = Order.size();
75627562
SmallVector<int> MaskOrder(E, PoisonMaskElem);
7563-
transform(Order, MaskOrder.begin(), [E](uint64_t I) {
7563+
transform(Order, MaskOrder.begin(), [E](unsigned I) {
75647564
return I < E ? static_cast<int>(I) : PoisonMaskElem;
75657565
});
75667566
Data.first->reorderSplitNode(P.second ? 1 : 0, Mask, MaskOrder);
@@ -7777,7 +7777,7 @@ void BoUpSLP::reorderBottomToTop(bool IgnoreReorder) {
77777777
combineOrders(IdentityOrder, Pair.first);
77787778
}
77797779
}
7780-
MutableArrayRef<uint64_t> BestOrder = IdentityOrder;
7780+
MutableArrayRef<unsigned> BestOrder = IdentityOrder;
77817781
unsigned Cnt = IdentityCnt;
77827782
for (auto &Pair : OrdersUses) {
77837783
// Prefer identity order. But, if filled identity found (non-empty
@@ -7803,7 +7803,7 @@ void BoUpSLP::reorderBottomToTop(bool IgnoreReorder) {
78037803
inversePermutation(BestOrder, Mask);
78047804
SmallVector<int> MaskOrder(BestOrder.size(), PoisonMaskElem);
78057805
unsigned E = BestOrder.size();
7806-
transform(BestOrder, MaskOrder.begin(), [E](uint64_t I) {
7806+
transform(BestOrder, MaskOrder.begin(), [E](unsigned I) {
78077807
return I < E ? static_cast<int>(I) : PoisonMaskElem;
78087808
});
78097809
for (const std::pair<unsigned, TreeEntry *> &Op : Data.second) {
@@ -10077,7 +10077,7 @@ BoUpSLP::getScalarsVectorizationLegality(ArrayRef<Value *> VL, unsigned Depth,
1007710077
}
1007810078
return true;
1007910079
};
10080-
SmallVector<uint64_t> SortedIndices;
10080+
SmallVector<unsigned> SortedIndices;
1008110081
BasicBlock *BB = nullptr;
1008210082
bool IsScatterVectorizeUserTE =
1008310083
UserTreeIdx.UserTE &&
@@ -10369,7 +10369,7 @@ void BoUpSLP::buildTreeRec(ArrayRef<Value *> VLRef, unsigned Depth,
1036910369
LLVM_DEBUG({
1037010370
dbgs() << "SLP: Reusing or shuffling of reordered extract sequence "
1037110371
"with order";
10372-
for (uint64_t Idx : CurrentOrder)
10372+
for (unsigned Idx : CurrentOrder)
1037310373
dbgs() << " " << Idx;
1037410374
dbgs() << "\n";
1037510375
});
@@ -10770,7 +10770,7 @@ unsigned BoUpSLP::canMapToVector(Type *T) const {
1077010770
}
1077110771

1077210772
bool BoUpSLP::canReuseExtract(ArrayRef<Value *> VL,
10773-
SmallVectorImpl<uint64_t> &CurrentOrder,
10773+
SmallVectorImpl<unsigned> &CurrentOrder,
1077410774
bool ResizeAllowed) const {
1077510775
const auto *It = find_if(VL, IsaPred<ExtractElementInst, ExtractValueInst>);
1077610776
assert(It != VL.end() && "Expected at least one extract instruction.");
@@ -16789,7 +16789,7 @@ class BoUpSLP::ShuffleInstructionBuilder final : public BaseShuffleAnalysis {
1678916789
CommonMask[Idx] = Mask[Idx] + (It == InVectors.begin() ? 0 : VF);
1679016790
}
1679116791
/// Adds another one input vector and the mask for the shuffling.
16792-
void addOrdered(Value *V1, ArrayRef<uint64_t> Order) {
16792+
void addOrdered(Value *V1, ArrayRef<unsigned> Order) {
1679316793
SmallVector<int> NewMask;
1679416794
inversePermutation(Order, NewMask);
1679516795
add(V1, NewMask);
@@ -17712,11 +17712,9 @@ Value *BoUpSLP::vectorizeTree(TreeEntry *E) {
1771217712
ShuffleInstructionBuilder ShuffleBuilder(ScalarTy, Builder, *this);
1771317713
if (E->getOpcode() == Instruction::Store &&
1771417714
E->State == TreeEntry::Vectorize) {
17715-
SmallVector<int> Mask(E->ReorderIndices.size());
17716-
// This cast should be safe, as ReorderIndices is only ever assigned a
17717-
// 32-bit value.
17718-
transform(E->ReorderIndices, Mask.begin(),
17719-
[](const uint64_t &I) { return static_cast<int>(I); });
17715+
ArrayRef<int> Mask =
17716+
ArrayRef(reinterpret_cast<const int *>(E->ReorderIndices.begin()),
17717+
E->ReorderIndices.size());
1772017718
ShuffleBuilder.add(V, Mask);
1772117719
} else if ((E->State == TreeEntry::StridedVectorize && IsReverseOrder) ||
1772217720
E->State == TreeEntry::CompressVectorize) {

0 commit comments

Comments
 (0)